在Android上录制实时OpenCV处理

我的目标是做一些事情:

  • 使用OpenCV和JavaCameraView来处理来自手机摄像头的帧
  • 在处理视频发生时启用录制
  • 我有他们两个工作,但我不得不实施第2号的方式是荒谬的:

  • 对于每个帧,将处理后的Mat写入图像文件。
  • 当录制停止时,使用JCodec的Android库将它们拼接成一个视频文件。
  • 这很有效,但它带来了很多缺点:在录制过程中,帧速率降低到难以忍受的程度,并且拼接步骤每帧需要大约半秒,并且视频耗尽时间超过几秒钟 - 这是在我降低相机的分辨率,以确保图像尽可能小。 即使如此,视频帧速率仍然不符合现实,视频看起来疯狂加速。

    这看起来很荒谬,因为很多原因,所以我的问题是:有没有更好的方法来做到这一点?

    这里有个例子,如果有人想运行它。 这需要OpenCV Android项目以及JCodec Android项目。

    的Manifest.xml:

    <uses-sdk
        android:minSdkVersion="8"
        android:targetSdkVersion="22"
    />
    
    <application
        android:allowBackup="true"
        android:icon="@drawable/ic_launcher"
        android:label="@string/app_name"
        android:theme="@android:style/Theme.NoTitleBar.Fullscreen" >
    
        <activity
            android:name=".MainActivity"
            android:screenOrientation="landscape"
            android:configChanges="orientation|keyboardHidden|screenSize"
            android:label="@string/app_name" >
            <intent-filter>
                <action android:name="android.intent.action.MAIN" />
                <category android:name="android.intent.category.LAUNCHER" />
            </intent-filter>
        </activity>
    
    </application>
    
    <uses-permission android:name="android.permission.CAMERA"/>
    <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
    

    主要活动:

    package com.example.videotest;
    
    import java.io.File;
    import java.util.List;
    
    import org.opencv.android.BaseLoaderCallback;
    import org.opencv.android.LoaderCallbackInterface;
    import org.opencv.android.OpenCVLoader;
    import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
    import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2;
    import org.opencv.core.Mat;
    import org.opencv.core.Scalar;
    import org.opencv.imgproc.Imgproc;
    
    import android.app.Activity;
    import android.media.MediaScannerConnection;
    import android.os.Bundle;
    import android.os.Environment;
    import android.util.Log;
    import android.view.SurfaceView;
    import android.view.View;
    import android.view.WindowManager;
    import android.widget.Toast;
    
    public class MainActivity extends Activity implements CvCameraViewListener2{
    
        private CameraView cameraView;
        private Mat edgesMat;
        private final Scalar greenScalar = new Scalar(0,255,0);
        private int resolutionIndex = 0;
        private MatVideoWriter matVideoWriter = new MatVideoWriter();
    
    
        private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
            @Override
            public void onManagerConnected(int status) {
                switch (status) {
                case LoaderCallbackInterface.SUCCESS:
                {
                    Log.i("VideoTest", "OpenCV loaded successfully");
    
                    cameraView.enableView();
    
                } break;
                default:
                {
                    super.onManagerConnected(status);
                } break;
                }
            }
        };
    
    
        public void onCreate(Bundle savedInstanceState) {
    
            super.onCreate(savedInstanceState);
            getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
    
            setContentView(R.layout.activity_main);
    
            cameraView = (CameraView) findViewById(R.id.cameraView);
            cameraView.setVisibility(SurfaceView.VISIBLE);
            cameraView.setCvCameraViewListener(this);
        }
    
        @Override
        public void onPause()
        {
            super.onPause();
            if (cameraView != null){
                cameraView.disableView();
            }
        }
    
        @Override
        public void onResume()
        {
            super.onResume();
            OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_3, this, mLoaderCallback);
        }
    
        public void onDestroy() {
            super.onDestroy();
            if (cameraView != null)
                cameraView.disableView();
        }
    
        public void onCameraViewStarted(int width, int height) {
            edgesMat = new Mat();
        }
    
        public void onCameraViewStopped() {
            if (edgesMat != null)
                edgesMat.release();
    
            edgesMat = null;
        }
    
        public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
    
            Mat rgba = inputFrame.rgba();
            org.opencv.core.Size sizeRgba = rgba.size();
    
            int rows = (int) sizeRgba.height;
            int cols = (int) sizeRgba.width;
    
            int left = cols / 8;
            int top = rows / 8;
            int width = cols * 3 / 4;
            int height = rows * 3 / 4;
    
            //get sub-image
            Mat rgbaInnerWindow = rgba.submat(top, top + height, left, left + width);
    
            //create edgesMat from sub-image
            Imgproc.Canny(rgbaInnerWindow, edgesMat, 100, 100);
    
            Mat colorEdges = new Mat();
            Mat killMe = colorEdges;
            edgesMat.copyTo(colorEdges);
            Imgproc.cvtColor(colorEdges, colorEdges, Imgproc.COLOR_GRAY2BGRA);
    
    
            colorEdges = colorEdges.setTo(greenScalar, edgesMat);
            colorEdges.copyTo(rgbaInnerWindow, edgesMat);
    
            killMe.release();
            colorEdges.release();
    
            rgbaInnerWindow.release();
    
            if(matVideoWriter.isRecording()){
                matVideoWriter.write(rgba);
            }
    
            return rgba;
        }
    
    
        public void changeResolution(View v){
            List<android.hardware.Camera.Size> cameraResolutionList = cameraView.getResolutionList();
            resolutionIndex++;
            if(resolutionIndex >= cameraResolutionList.size()){
                resolutionIndex = 0;
            }
    
            android.hardware.Camera.Size resolution = cameraResolutionList.get(resolutionIndex);
            cameraView.setResolution(resolution.width, resolution.height);
            resolution = cameraView.getResolution();
            String caption = Integer.valueOf(resolution.width).toString() + "x" + Integer.valueOf(resolution.height).toString();
            Toast.makeText(this, caption, Toast.LENGTH_SHORT).show();
        }
    
        public void startVideo(View v){
    
            if(matVideoWriter.isRecording()){
                matVideoWriter.stop();
                File file = new File(getExternalFilesDir(null), "VideoTest/images/");
                for(String img : file.list()){
                    String scanMe = new File(file, img).getAbsolutePath();
                    MediaScannerConnection.scanFile(this, new String[]{scanMe}, null, null);
                    Log.i("VideoTest", "Scanning: " +scanMe);
                }
    
                file = new File(file, "video.mp4");
                MediaScannerConnection.scanFile(this, new String[]{file.getAbsolutePath()}, null, null);
    
            }
            else{ 
    
                String state = Environment.getExternalStorageState();
                Log.i("VideoTest", "state: " + state);
    
                File ext = getExternalFilesDir(null);
                Log.i("VideoTest", "ext: " + ext.getAbsolutePath());
    
    
                File file = new File(getExternalFilesDir(null), "VideoTest/images/");
                if(!file.exists()){
                    boolean success = file.mkdirs();
    
                    Log.i("VideoTest", "mkdirs: " + success);
                }
                else{
                    Log.i("VideoTest", "file exists.");
                }
    
                Log.i("VideoTest", "starting recording: " + file.getAbsolutePath());
    
                matVideoWriter.start(file);
            }
    
        }
    
    }
    

    CameraView:

    package com.example.videotest;
    
    import java.io.FileOutputStream;
    import java.util.List;
    
    import org.opencv.android.JavaCameraView;
    
    import android.content.Context;
    import android.hardware.Camera;
    import android.hardware.Camera.PictureCallback;
    import android.util.AttributeSet;
    import android.util.Log;
    
    public class CameraView extends JavaCameraView{
    
        private String mPictureFileName;
    
        public CameraView(Context context, AttributeSet attrs) {
            super(context, attrs);
        }
    
        public List<String> getEffectList() {
            return mCamera.getParameters().getSupportedColorEffects();
        }
    
        public boolean isEffectSupported() {
            return (mCamera.getParameters().getColorEffect() != null);
        }
    
        public String getEffect() {
            return mCamera.getParameters().getColorEffect();
        }
    
        public void setEffect(String effect) {
            Camera.Parameters params = mCamera.getParameters();
            params.setColorEffect(effect);
            mCamera.setParameters(params);
        }
    
        public List<android.hardware.Camera.Size> getResolutionList() {
            return mCamera.getParameters().getSupportedPreviewSizes();
        }
    
        public void setResolution(int width, int height) {
            disconnectCamera();
            mMaxHeight = height;
            mMaxWidth = width;
            connectCamera(getWidth(), getHeight());
        }
    
        public android.hardware.Camera.Size getResolution() {
            return mCamera.getParameters().getPreviewSize();
        }
    
    }
    

    MatVideoWriter:

    package com.example.videotest;
    
    import java.io.File;
    import java.util.Arrays;
    import java.util.Collections;
    import java.util.Comparator;
    import java.util.List;
    
    import org.jcodec.api.android.SequenceEncoder;
    import org.opencv.core.Mat;
    import org.opencv.highgui.Highgui;
    import org.opencv.imgproc.Imgproc;
    
    import android.graphics.Bitmap;
    import android.graphics.BitmapFactory;
    import android.util.Log;
    
    public class MatVideoWriter {
    
        boolean recording;
        File dir;
        int imageIndex = 0;
    
        public void start(File dir){
            this.dir = dir;
            recording = true;
        }
    
        public void stop(){
            recording = false;
    
            try{
                File file = new File(dir, "video.mp4");
                SequenceEncoder encoder = new SequenceEncoder(file);
    
                List<File> files = Arrays.asList(dir.listFiles());
                Collections.sort(files, new Comparator<File>(){
                    @Override
                    public int compare(File lhs, File rhs) {
                        return lhs.getName().compareTo(rhs.getName());
                    }
                });
    
                for(File f : files){
                    Log.i("VideoTest", "Encoding image: " + f.getAbsolutePath());
                    try{
                        Bitmap frame = BitmapFactory.decodeFile(f.getAbsolutePath());
                        encoder.encodeImage(frame);
                    }
                    catch(Exception e){
                        e.printStackTrace();
                    }
    
                }
                encoder.finish();
            }
            catch(Exception e){
                e.printStackTrace();
            }
        }
    
        public void write(Mat mat){
    
            //convert from BGR to RGB
            Mat rgbMat = new Mat();
            Imgproc.cvtColor(mat, rgbMat, Imgproc.COLOR_BGR2RGB);
    
            File file = new File(dir, "img" + imageIndex + ".png");
    
            String filename = file.toString();
            boolean success = Highgui.imwrite(filename, rgbMat);
    
            Log.i("VideoTest", "Success writing img" + imageIndex +".png: " + success);
    
            imageIndex++;
        }
    
        public boolean isRecording() {
            return recording;
        }
    }
    

    编辑:我还没有收到任何意见或答案,所以我在这里交叉发布了OpenCV论坛。


    我已经通过创建MediaRecorder并将其传递给OpenCV CameraBridgeViewBase解决了类似的问题,我修改如下。

    protected MediaRecorder mRecorder;
    protected Surface mSurface = null;
    
    public void setRecorder(MediaRecorder rec) {
        mRecorder = rec;
        if (mRecorder != null) {
            mSurface = mRecorder.getSurface();
        }
    

    protected void deliverAndDrawFrame(CvCameraViewFrame frame) {
        Mat modified;
    
        if (mListener != null) {
            modified = mListener.onCameraFrame(frame);
        } else {
            modified = frame.rgba();
        }
    
        boolean bmpValid = true;
        if (modified != null) {
            try {
                Utils.matToBitmap(modified, mCacheBitmap);
            } catch(Exception e) {
                Log.e(TAG, "Mat type: " + modified);
                Log.e(TAG, "Bitmap type: " + mCacheBitmap.getWidth() + "*" + mCacheBitmap.getHeight());
                Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
                bmpValid = false;
            }
        }
    
        if (bmpValid && mCacheBitmap != null) {
            Canvas canvas;
    
            if (mRecorder != null) {
                canvas = mSurface.lockCanvas(null);
    
                canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
                Log.d(TAG, "mStretch value: " + mScale);
    
                if (mScale != 0) {
                    canvas.drawBitmap(mCacheBitmap, new Rect(0,0,mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
                         new Rect((int)((canvas.getWidth() - mScale*mCacheBitmap.getWidth()) / 2),
                         (int)((canvas.getHeight() - mScale*mCacheBitmap.getHeight()) / 2),
                         (int)((canvas.getWidth() - mScale*mCacheBitmap.getWidth()) / 2 + mScale*mCacheBitmap.getWidth()),
                         (int)((canvas.getHeight() - mScale*mCacheBitmap.getHeight()) / 2 + mScale*mCacheBitmap.getHeight())), null);
                } else {
                     canvas.drawBitmap(mCacheBitmap, new Rect(0,0,mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
                         new Rect((canvas.getWidth() - mCacheBitmap.getWidth()) / 2,
                         (canvas.getHeight() - mCacheBitmap.getHeight()) / 2,
                         (canvas.getWidth() - mCacheBitmap.getWidth()) / 2 + mCacheBitmap.getWidth(),
                         (canvas.getHeight() - mCacheBitmap.getHeight()) / 2 + mCacheBitmap.getHeight()), null);
                }
    
                if (mFpsMeter != null) {
                    mFpsMeter.measure();
                    mFpsMeter.draw(canvas, 20, 30);
                }
                mSurface.unlockCanvasAndPost(canvas);
            } 
    
        }
    
        ...
    
    }
    

    我已经将deliverAndDrawFrame的原始部分deliverAndDrawFrame原样,以便它始终将输出显示到原始曲面。 这样我就可以通过在MainActivity实现onCameraFrame处理来自相机的图像,并将生成的图像保存到视频中,而无需使用ffmpeg

    编辑我已经设置了MediaRecorder如下

    recorder.setAudioSource(MediaRecorder.AudioSource.MIC);
    recorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
    
    CamcorderProfile cpHigh = CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH);
    recorder.setProfile(cpHigh);
    recorder.setOutputFile("out.mp4");
    recorder.setVideoSize(mOpenCvCameraView.mFrameWidth, mOpenCvCameraView.mFrameHeight);
    
    recorder.setOnInfoListener(this);
    recorder.setOnErrorListener(this);
    recorder.prepare();
    

    OpenCvCameraView注册它

    mOpenCvCameraView.setRecorder(recorder);
    

    并开始录制

    recorder.start();
    

    @哈当向我指出了这些链接:

    http://www.walking-productions.com/notslop/2013/01/16/android-live-streaming-courtesy-of-javacv-and-ffmpeg/

    https://code.google.com/p/javacv/source/browse/samples/RecordActivity.java

    该示例使用FFMPEG的Java包装来进行视频录制。 对于任何想要这样做的人来说,这个项目是一个非常有用的起点:https://github.com/vanevery/JavaCV_0.3_stream_test

    我参加了上述项目,并将其作为我的榜样。 这是非常混乱,但它的工作原理:

    package com.example.videotest;
    
    
    import java.io.File;
    import java.io.IOException;
    import java.nio.ShortBuffer;
    import java.util.List;
    
    import org.opencv.android.BaseLoaderCallback;
    import org.opencv.android.LoaderCallbackInterface;
    import org.opencv.android.OpenCVLoader;
    import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
    import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2;
    import org.opencv.core.Mat;
    import org.opencv.core.Scalar;
    import org.opencv.imgproc.Imgproc;
    
    import com.googlecode.javacv.FFmpegFrameRecorder;
    import com.googlecode.javacv.FrameRecorder.Exception;
    import com.googlecode.javacv.cpp.opencv_core.IplImage;
    
    import android.app.Activity;
    import android.media.AudioFormat;
    import android.media.AudioRecord;
    import android.media.MediaRecorder;
    import android.media.MediaScannerConnection;
    import android.os.Bundle;
    import android.os.Environment;
    import android.util.Log;
    import android.view.SurfaceView;
    import android.view.View;
    import android.view.WindowManager;
    import android.widget.Toast;
    
    public class MainActivity extends Activity implements CvCameraViewListener2{
    
        private CameraView cameraView;
        private Mat edgesMat;
        private final Scalar greenScalar = new Scalar(0,255,0);
        private int resolutionIndex = 0;
    
        private IplImage videoImage = null;
    
        boolean recording = false;
        private volatile FFmpegFrameRecorder recorder;
    
        private int sampleAudioRateInHz = 44100;
        private int imageWidth = 320;
        private int imageHeight = 240;
        private int frameRate = 30;
    
        private Thread audioThread;
        volatile boolean runAudioThread = true;
        private AudioRecord audioRecord;
        private AudioRecordRunnable audioRecordRunnable;
    
        private String ffmpeg_link;
    
        long startTime = 0;
    
        private String LOG_TAG = "VideoTest";
    
        private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
            @Override
            public void onManagerConnected(int status) {
                switch (status) {
                case LoaderCallbackInterface.SUCCESS:
                    Log.i("VideoTest", "OpenCV loaded successfully");
                    cameraView.enableView();
                    break;
                default:
                    super.onManagerConnected(status);
                    break;
                }
            }
        };
    
    
        public void onCreate(Bundle savedInstanceState) {
    
            super.onCreate(savedInstanceState);
            getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
    
            setContentView(R.layout.activity_main);
    
            cameraView = (CameraView) findViewById(R.id.cameraView);
            cameraView.setVisibility(SurfaceView.VISIBLE);
            cameraView.setCvCameraViewListener(this);
        }
    
        private void initRecorder() {
            Log.w(LOG_TAG,"initRecorder");
    
            int depth = com.googlecode.javacv.cpp.opencv_core.IPL_DEPTH_8U;
            int channels = 4;
    
            // if (yuvIplimage == null) {
            // Recreated after frame size is set in surface change method
            videoImage = IplImage.create(imageWidth, imageHeight, depth, channels);
            //yuvIplimage = IplImage.create(imageWidth, imageHeight, IPL_DEPTH_32S, 2);
    
            Log.v(LOG_TAG, "IplImage.create");
            // }
    
            File videoFile = new File(getExternalFilesDir(null), "VideoTest/images/video.mp4");
            boolean mk = videoFile.getParentFile().mkdirs();
            Log.v(LOG_TAG, "Mkdir: " + mk);
    
            boolean del = videoFile.delete();
            Log.v(LOG_TAG, "del: " + del);
    
            try {
                boolean created = videoFile.createNewFile();
                Log.v(LOG_TAG, "Created: " + created);
            } catch (IOException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
    
            ffmpeg_link = videoFile.getAbsolutePath();
            recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1);
            Log.v(LOG_TAG, "FFmpegFrameRecorder: " + ffmpeg_link + " imageWidth: " + imageWidth + " imageHeight " + imageHeight);
    
            recorder.setFormat("mp4");
            Log.v(LOG_TAG, "recorder.setFormat("mp4")");
    
            recorder.setSampleRate(sampleAudioRateInHz);
            Log.v(LOG_TAG, "recorder.setSampleRate(sampleAudioRateInHz)");
    
            // re-set in the surface changed method as well
            recorder.setFrameRate(frameRate);
            Log.v(LOG_TAG, "recorder.setFrameRate(frameRate)");
    
            // Create audio recording thread
            audioRecordRunnable = new AudioRecordRunnable();
            audioThread = new Thread(audioRecordRunnable);
        }
    
        @Override
        public void onPause()
        {
            super.onPause();
            if (cameraView != null){
                cameraView.disableView();
            }
        }
    
        @Override
        public void onResume()
        {
            super.onResume();
            OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_3, this, mLoaderCallback);
        }
    
        public void onDestroy() {
            super.onDestroy();
            if (cameraView != null)
                cameraView.disableView();
        }
    
        public void onCameraViewStarted(int width, int height) {
            edgesMat = new Mat();
        }
    
        public void onCameraViewStopped() {
            if (edgesMat != null)
                edgesMat.release();
    
            edgesMat = null;
        }
    
        public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
    
            Mat rgba = inputFrame.rgba();
            org.opencv.core.Size sizeRgba = rgba.size();
    
            int rows = (int) sizeRgba.height;
            int cols = (int) sizeRgba.width;
    
            int left = cols / 8;
            int top = rows / 8;
            int width = cols * 3 / 4;
            int height = rows * 3 / 4;
    
            //get sub-image
            Mat rgbaInnerWindow = rgba.submat(top, top + height, left, left + width);
    
            //create edgesMat from sub-image
            Imgproc.Canny(rgbaInnerWindow, edgesMat, 100, 100);
    
            Mat colorEdges = new Mat();
            Mat killMe = colorEdges;
            edgesMat.copyTo(colorEdges);
            Imgproc.cvtColor(colorEdges, colorEdges, Imgproc.COLOR_GRAY2BGRA);
    
    
            colorEdges = colorEdges.setTo(greenScalar, edgesMat);
            colorEdges.copyTo(rgbaInnerWindow, edgesMat);
    
            killMe.release();
            colorEdges.release();
    
            rgbaInnerWindow.release();
    
            if(recording){
                byte[] byteFrame = new byte[(int) (rgba.total() * rgba.channels())];
                rgba.get(0, 0, byteFrame);
                onFrame(byteFrame);
            }
    
            return rgba;
        }
    
        public void stopRecording() {
            // This should stop the audio thread from running
            runAudioThread = false;
    
            if (recorder != null) {
                Log.v(LOG_TAG,"Finishing recording, calling stop and release on recorder");
                try {
                    recorder.stop();
                    recorder.release();
                } catch (FFmpegFrameRecorder.Exception e) {
                    e.printStackTrace();
                }
                recorder = null;
            }
    
            MediaScannerConnection.scanFile(MainActivity.this, new String[]{ffmpeg_link}, null, null);
        }
    
    
        public void changeResolution(View v){
            List<android.hardware.Camera.Size> cameraResolutionList = cameraView.getResolutionList();
            resolutionIndex++;
            if(resolutionIndex >= cameraResolutionList.size()){
                resolutionIndex = 0;
            }
    
            android.hardware.Camera.Size resolution = cameraResolutionList.get(resolutionIndex);
            cameraView.setResolution(resolution.width, resolution.height);
            resolution = cameraView.getResolution();
            String caption = Integer.valueOf(resolution.width).toString() + "x" + Integer.valueOf(resolution.height).toString();
            Toast.makeText(this, caption, Toast.LENGTH_SHORT).show();
    
            imageWidth = resolution.width;
            imageHeight = resolution.height;
    
            frameRate = cameraView.getFrameRate();
    
            initRecorder();
        }
    
        int frames = 0;
    
        private void onFrame(byte[] data){
    
            if (videoImage != null && recording) {
                long videoTimestamp = 1000 * (System.currentTimeMillis() - startTime);
    
                // Put the camera preview frame right into the yuvIplimage object
                videoImage.getByteBuffer().put(data);
    
                try {
    
                    // Get the correct time
                    recorder.setTimestamp(videoTimestamp);
    
                    // Record the image into FFmpegFrameRecorder
                    recorder.record(videoImage);
    
                    frames++;
    
                    Log.i(LOG_TAG, "Wrote Frame: " + frames);
    
                } 
                catch (FFmpegFrameRecorder.Exception e) {
                    Log.v(LOG_TAG,e.getMessage());
                    e.printStackTrace();
                }
            }
    
        }
    
        public void startVideo(View v){
    
            recording = !recording;
    
            Log.i(LOG_TAG, "Recording: " + recording);
    
            if(recording){
                startTime = System.currentTimeMillis();
                try {
                    recorder.start();
    
                    Log.i(LOG_TAG, "STARTED RECORDING.");
    
                } catch (Exception e) {
                    // TODO Auto-generated catch block
                    e.printStackTrace();
                }
            }
            else{
                stopRecording();
            }
        }
    
    
    
        class AudioRecordRunnable implements Runnable {
    
            @Override
            public void run() {
                // Set the thread priority
                android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
    
                // Audio
                int bufferSize;
                short[] audioData;
                int bufferReadResult;
    
                bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz, 
                        AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT);
                audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz, 
                        AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);
    
                audioData = new short[bufferSize];
    
                Log.d(LOG_TAG, "audioRecord.startRecording()");
                audioRecord.startRecording();
    
                // Audio Capture/Encoding Loop
                while (runAudioThread) {
                    // Read from audioRecord
                    bufferReadResult = audioRecord.read(audioData, 0, audioData.length);
                    if (bufferReadResult > 0) {
                        //Log.v(LOG_TAG,"audioRecord bufferReadResult: " + bufferReadResult);
    
                        // Changes in this variable may not be picked up despite it being "volatile"
                        if (recording) {
                            try {
                                // Write to FFmpegFrameRecorder
                                recorder.record(ShortBuffer.wrap(audioData, 0, bufferReadResult));
                            } catch (FFmpegFrameRecorder.Exception e) {
                                Log.v(LOG_TAG,e.getMessage());
                                e.printStackTrace();
                            }
                        }
                    }
                }
                Log.v(LOG_TAG,"AudioThread Finished");
    
                /* Capture/Encoding finished, release recorder */
                if (audioRecord != null) {
                    audioRecord.stop();
                    audioRecord.release();
                    audioRecord = null;
    
                    MediaScannerConnection.scanFile(MainActivity.this, new String[]{ffmpeg_link}, null, null);
    
                    Log.v(LOG_TAG,"audioRecord released");
                }
            }
        }
    
    }
    

    尝试编译用于​​Android的FFMPEG以进行实况视频处理。 与android和openCV集成,这个链接将有助于:

    http://www.jayrambhia.com/blog/ffmpeg-opencv-android

    链接地址: http://www.djcxy.com/p/5253.html

    上一篇: Recording Live OpenCV Processing on Android

    下一篇: android mediarecorder error (MediaRecorder start failed :