读书人

关于android视频采集的有关问题求解

发布时间: 2012-04-22 18:34:46 作者: rapoo

关于android视频采集的问题,急求解
最近在有个项目是关于android的视频采集,要求是捕捉某个事件发生前后30秒的视频片段。以前从没有做过android或多媒体开发,很多概念和技术不是很清楚,在网上各论坛转了很久,大致有个思路(MediaRecorder类不知道能不能满足上面的要求),就是利用Camera的PreviewCallback接口,通过实现onPreviewFrame(byte[] data, Camera camera)来捕捉每一帧数据,然后将data保存到文件里,问题是直接保存的数据无法播放,网上有人提供过decodeYUV420SP方法来将默认的YUV格式转码为RGB格式,问题是无论转码前还是转码后我都不知道应该保存为什么文件,是不是直接保存为后缀名为".3gp"或".mp4"就可以了?
我一直是在模拟器上调试的,不知道是不是一定需要在真机上运行才行。
跪求各达人显显灵,谢啦!

以下是我的代码,请各位达人斧正

Java code
import java.io.File;import java.io.RandomAccessFile;import android.app.Activity;import android.graphics.PixelFormat;import android.hardware.Camera;import android.os.Bundle;import android.util.Log;import android.view.SurfaceHolder;import android.view.SurfaceView;import android.view.Window;import android.view.WindowManager;public class AndroidCamera extends Activity implements SurfaceHolder.Callback {        private SurfaceView mSurfaceView = null;    private SurfaceHolder mSurfaceHolder = null;    private Camera mCamera = null;        private boolean mPreviewRunning = false;        /** Called when the activity is first created. */    @Override    public void onCreate(Bundle savedInstanceState) {        super.onCreate(savedInstanceState);                getWindow().setFormat(PixelFormat.TRANSLUCENT);        requestWindowFeature(Window.FEATURE_NO_TITLE);        getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,                WindowManager.LayoutParams.FLAG_FULLSCREEN);                        setContentView(R.layout.main);                mSurfaceView = (SurfaceView)findViewById(R.id.surface_camera);        mSurfaceHolder = mSurfaceView.getHolder();        mSurfaceHolder.addCallback(this);        mSurfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);    }    @Override    public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {        if(mPreviewRunning) {            mCamera.stopPreview();        }                Camera.Parameters p = mCamera.getParameters();                p.setPreviewSize(width, height);        mCamera.setPreviewCallback(new VideoData(width, height));        mCamera.setParameters(p);                try {            mCamera.setPreviewDisplay(holder);        } catch(Exception e) {            e.printStackTrace();        }                mCamera.startPreview();        mPreviewRunning = true;            }    @Override    public void surfaceCreated(SurfaceHolder holder) {        mCamera = Camera.open();    }    @Override    public void surfaceDestroyed(SurfaceHolder holder) {        Log.v("AndroidCamera", "surfaceDestroyed");        if(mCamera != null) {            mCamera.setPreviewCallback(null);            mCamera.stopPreview();            mPreviewRunning =false;            mCamera.release();            mCamera = null;        }            }}class VideoData implements Camera.PreviewCallback {    RandomAccessFile raf=null;    byte[] h264Buff =null;        public VideoData(int width, int height) {        Log.v("androidCamera", "new VideoData");        h264Buff = new byte[width * height *8];                try {            Log.v("androidCamera", "Create File: /sdcard/camera.dat start");            File file = new File("/sdcard/camera.3gp");            Log.v("androidCamera", "Create File: /sdcard/camera.dat end");            raf = new RandomAccessFile(file, "rw");        } catch (Exception ex) {            ex.printStackTrace();        }    }        @Override    public void onPreviewFrame(byte[] data, Camera camera) {        if(data == null) {            return;        }        int previewWidth = camera.getParameters().getPreviewSize().width;        int previewHeight = camera.getParameters().getPreviewSize().height;                byte[] rgbBuffer = new byte[previewWidth * previewHeight * 3];                decodeYUV420SP(rgbBuffer, data, previewWidth, previewHeight);                try {            raf.write(rgbBuffer, 0, data.length);        } catch(Exception ex) {            ex.printStackTrace();        }    }        protected void finalize() {        if (null != raf) {            try {                raf.close();            } catch (Exception ex) {                ex.printStackTrace();            }        }                try {            super.finalize();        } catch (Throwable e) {            // TODO Auto-generated catch block            e.printStackTrace();        }    }        private void decodeYUV420SP(byte[] rgbBuf, byte[] yuv420sp, int width, int height) {            final int frameSize = width * height;            if (rgbBuf == null)                throw new NullPointerException("buffer 'rgbBuf' is null");            if (rgbBuf.length < frameSize * 3)                throw new IllegalArgumentException("buffer 'rgbBuf' size "                        + rgbBuf.length + " < minimum " + frameSize * 3);                       if (yuv420sp == null)                throw new NullPointerException("buffer 'yuv420sp' is null");                       if (yuv420sp.length < frameSize * 3 / 2)                throw new IllegalArgumentException("buffer 'yuv420sp' size " + yuv420sp.length                         + " < minimum " + frameSize * 3 / 2);                            int i = 0, y = 0;            int uvp = 0, u = 0, v = 0;            int y1192 = 0, r = 0, g = 0, b = 0;                        for (int j = 0, yp = 0; j < height; j++) {                 uvp = frameSize + (j >> 1) * width;                 u = 0;                 v = 0;                for (i = 0; i < width; i++, yp++) {                     y = (0xff & ((int) yuv420sp[yp])) - 16;                    if (y < 0) y = 0;                    if ((i & 1) == 0) {                         v = (0xff & yuv420sp[uvp++]) - 128;                         u = (0xff & yuv420sp[uvp++]) - 128;                     }                                         y1192 = 1192 * y;                     r = (y1192 + 1634 * v);                     g = (y1192 - 833 * v - 400 * u);                     b = (y1192 + 2066 * u);                                        if (r < 0) r = 0; else if (r > 262143) r = 262143;                    if (g < 0) g = 0; else if (g > 262143) g = 262143;                    if (b < 0) b = 0; else if (b > 262143) b = 262143;                                         rgbBuf[yp * 3] = (byte)(r >> 10);                     rgbBuf[yp * 3 + 1] = (byte)(g >> 10);                     rgbBuf[yp * 3 + 2] = (byte)(b >> 10);                }            }    }} 



[解决办法]
保存的是原始数据YUV和RGB,不要高什么MP4,3GP这个是编码后的数据
[解决办法]
Java code
/**     * 捕获视频     *      * @param filePath     *            视频保存的路径     */    public void captureVideo(String filePath) {        mCamera.unlock();        mRecorder = new MediaRecorder();        mRecorder.setCamera(mCamera);        mRecorder.setPreviewDisplay(mHolder.getSurface());        mRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);        mRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);        // mRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);        mRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);        mRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);        mRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.MPEG_4_SP);        mRecorder.setVideoSize(320, 240);        // mRecorder.setVideoFrameRate(15);        mRecorder.setOutputFile(filePath);        try {            mRecorder.prepare();        } catch (Exception e) {        }        mRecorder.start(); // Recording is now started    }    /**     * 停止捕获视频     */    public void stopVideo() {        if (mRecorder == null)            return;        mRecorder.stop();        mRecorder.reset(); // You can reuse the object by going back to                            // setAudioSource() step        mRecorder.release(); // Now the object cannot be reused        mRecorder = null;        try {            mCamera.reconnect();        } catch (Exception e) {        }    }
[解决办法]
MediaRecorder类没用过
yellshine用MediaRecorder实现,认为是设置了MPEG_4_SP属性,这样输出的数据是mpeg4编码后的数据
看你的程序,你的程序获取的数据是RGB格式的,可能和你使用的MediaRecorder中播放数据格式不一样

只要采集到得数据和MediaRecorder能播放的数据一致,应该就可以播放出来
你可以把摄像头输出的数据就为RGB格式,然后用Bitmap的方式进行播放
[解决办法]
camera相关绝对需要在真机上测试的。

camera采集的格式有:JPEG,YV12,NV16,NV21,RGB_565

Java code
File file = new File(Environment.getExternalStorageDirectory(),"myvideo.yuv");outStream = new FileOutputStream(file);
[解决办法]
这两个我都尝试过,感觉是PreviewCallback接口更适合用在实时传输上,因为他获取数据是一帧一帧的,但是在编码的时候就会有点问题,网上上采用那些软编码算法效率实在太低,而直接传YUV则带宽吃不消。
MediaRecorder比较适合用录制和采集,虽然不知道是软还是硬编码,但是设置起来还是相对轻松的。只是如果用于实时的话则有点问题,因为你无法即时的获取没帧的数据,你能获取的只是一个长串的流。

读书人网 >Android

热点推荐