在安卓上录制实时OpenCV处理

2022-09-02 21:49:54

我的目标是做几件事:

  1. 使用 OpenCV 和 JavaCameraView 处理来自手机摄像头源的帧
  2. 在处理后的视频发生时启用录制

我让它们都工作了,但是我必须实现数字2的方式是荒谬的:

  1. 对于每一帧,将处理后的 Mat 写入图像文件。
  2. 录制停止后,使用JCodec的Android库将它们拼接成视频文件。

这有效,但它带来了很多缺点:帧速率在录制过程中下降得难以忍受,拼接步骤每帧大约需要半秒,并且超过几秒钟的视频的内存不足 - 那是在我降低相机分辨率以确保图像尽可能小之后。即便如此,视频帧速率也与现实脱节,视频看起来速度惊人。

由于很多原因,这似乎很荒谬,所以我的问题是:有没有更好的方法来做到这一点?

这里有一个小例子,如果有人想运行它。这需要 OpenCV Android 项目在这里提供,JCodec Android 项目在这里提供。

清单.xml:

<uses-sdk
    android:minSdkVersion="8"
    android:targetSdkVersion="22"
/>

<application
    android:allowBackup="true"
    android:icon="@drawable/ic_launcher"
    android:label="@string/app_name"
    android:theme="@android:style/Theme.NoTitleBar.Fullscreen" >

    <activity
        android:name=".MainActivity"
        android:screenOrientation="landscape"
        android:configChanges="orientation|keyboardHidden|screenSize"
        android:label="@string/app_name" >
        <intent-filter>
            <action android:name="android.intent.action.MAIN" />
            <category android:name="android.intent.category.LAUNCHER" />
        </intent-filter>
    </activity>

</application>

<uses-permission android:name="android.permission.CAMERA"/>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />

主要活动:

package com.example.videotest;

import java.io.File;
import java.util.List;

import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2;
import org.opencv.core.Mat;
import org.opencv.core.Scalar;
import org.opencv.imgproc.Imgproc;

import android.app.Activity;
import android.media.MediaScannerConnection;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.SurfaceView;
import android.view.View;
import android.view.WindowManager;
import android.widget.Toast;

public class MainActivity extends Activity implements CvCameraViewListener2{

    private CameraView cameraView;
    private Mat edgesMat;
    private final Scalar greenScalar = new Scalar(0,255,0);
    private int resolutionIndex = 0;
    private MatVideoWriter matVideoWriter = new MatVideoWriter();


    private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
        @Override
        public void onManagerConnected(int status) {
            switch (status) {
            case LoaderCallbackInterface.SUCCESS:
            {
                Log.i("VideoTest", "OpenCV loaded successfully");

                cameraView.enableView();

            } break;
            default:
            {
                super.onManagerConnected(status);
            } break;
            }
        }
    };


    public void onCreate(Bundle savedInstanceState) {

        super.onCreate(savedInstanceState);
        getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);

        setContentView(R.layout.activity_main);

        cameraView = (CameraView) findViewById(R.id.cameraView);
        cameraView.setVisibility(SurfaceView.VISIBLE);
        cameraView.setCvCameraViewListener(this);
    }

    @Override
    public void onPause()
    {
        super.onPause();
        if (cameraView != null){
            cameraView.disableView();
        }
    }

    @Override
    public void onResume()
    {
        super.onResume();
        OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_3, this, mLoaderCallback);
    }

    public void onDestroy() {
        super.onDestroy();
        if (cameraView != null)
            cameraView.disableView();
    }

    public void onCameraViewStarted(int width, int height) {
        edgesMat = new Mat();
    }

    public void onCameraViewStopped() {
        if (edgesMat != null)
            edgesMat.release();

        edgesMat = null;
    }

    public Mat onCameraFrame(CvCameraViewFrame inputFrame) {

        Mat rgba = inputFrame.rgba();
        org.opencv.core.Size sizeRgba = rgba.size();

        int rows = (int) sizeRgba.height;
        int cols = (int) sizeRgba.width;

        int left = cols / 8;
        int top = rows / 8;
        int width = cols * 3 / 4;
        int height = rows * 3 / 4;

        //get sub-image
        Mat rgbaInnerWindow = rgba.submat(top, top + height, left, left + width);

        //create edgesMat from sub-image
        Imgproc.Canny(rgbaInnerWindow, edgesMat, 100, 100);

        Mat colorEdges = new Mat();
        Mat killMe = colorEdges;
        edgesMat.copyTo(colorEdges);
        Imgproc.cvtColor(colorEdges, colorEdges, Imgproc.COLOR_GRAY2BGRA);


        colorEdges = colorEdges.setTo(greenScalar, edgesMat);
        colorEdges.copyTo(rgbaInnerWindow, edgesMat);

        killMe.release();
        colorEdges.release();

        rgbaInnerWindow.release();

        if(matVideoWriter.isRecording()){
            matVideoWriter.write(rgba);
        }

        return rgba;
    }


    public void changeResolution(View v){
        List<android.hardware.Camera.Size> cameraResolutionList = cameraView.getResolutionList();
        resolutionIndex++;
        if(resolutionIndex >= cameraResolutionList.size()){
            resolutionIndex = 0;
        }

        android.hardware.Camera.Size resolution = cameraResolutionList.get(resolutionIndex);
        cameraView.setResolution(resolution.width, resolution.height);
        resolution = cameraView.getResolution();
        String caption = Integer.valueOf(resolution.width).toString() + "x" + Integer.valueOf(resolution.height).toString();
        Toast.makeText(this, caption, Toast.LENGTH_SHORT).show();
    }

    public void startVideo(View v){

        if(matVideoWriter.isRecording()){
            matVideoWriter.stop();
            File file = new File(getExternalFilesDir(null), "VideoTest/images/");
            for(String img : file.list()){
                String scanMe = new File(file, img).getAbsolutePath();
                MediaScannerConnection.scanFile(this, new String[]{scanMe}, null, null);
                Log.i("VideoTest", "Scanning: " +scanMe);
            }

            file = new File(file, "video.mp4");
            MediaScannerConnection.scanFile(this, new String[]{file.getAbsolutePath()}, null, null);

        }
        else{ 

            String state = Environment.getExternalStorageState();
            Log.i("VideoTest", "state: " + state);

            File ext = getExternalFilesDir(null);
            Log.i("VideoTest", "ext: " + ext.getAbsolutePath());


            File file = new File(getExternalFilesDir(null), "VideoTest/images/");
            if(!file.exists()){
                boolean success = file.mkdirs();

                Log.i("VideoTest", "mkdirs: " + success);
            }
            else{
                Log.i("VideoTest", "file exists.");
            }

            Log.i("VideoTest", "starting recording: " + file.getAbsolutePath());

            matVideoWriter.start(file);
        }

    }

}

相机视图:

package com.example.videotest;

import java.io.FileOutputStream;
import java.util.List;

import org.opencv.android.JavaCameraView;

import android.content.Context;
import android.hardware.Camera;
import android.hardware.Camera.PictureCallback;
import android.util.AttributeSet;
import android.util.Log;

public class CameraView extends JavaCameraView{

    private String mPictureFileName;

    public CameraView(Context context, AttributeSet attrs) {
        super(context, attrs);
    }

    public List<String> getEffectList() {
        return mCamera.getParameters().getSupportedColorEffects();
    }

    public boolean isEffectSupported() {
        return (mCamera.getParameters().getColorEffect() != null);
    }

    public String getEffect() {
        return mCamera.getParameters().getColorEffect();
    }

    public void setEffect(String effect) {
        Camera.Parameters params = mCamera.getParameters();
        params.setColorEffect(effect);
        mCamera.setParameters(params);
    }

    public List<android.hardware.Camera.Size> getResolutionList() {
        return mCamera.getParameters().getSupportedPreviewSizes();
    }

    public void setResolution(int width, int height) {
        disconnectCamera();
        mMaxHeight = height;
        mMaxWidth = width;
        connectCamera(getWidth(), getHeight());
    }

    public android.hardware.Camera.Size getResolution() {
        return mCamera.getParameters().getPreviewSize();
    }

}

MatVideoWriter:

package com.example.videotest;

import java.io.File;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;

import org.jcodec.api.android.SequenceEncoder;
import org.opencv.core.Mat;
import org.opencv.highgui.Highgui;
import org.opencv.imgproc.Imgproc;

import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.util.Log;

public class MatVideoWriter {

    boolean recording;
    File dir;
    int imageIndex = 0;

    public void start(File dir){
        this.dir = dir;
        recording = true;
    }

    public void stop(){
        recording = false;

        try{
            File file = new File(dir, "video.mp4");
            SequenceEncoder encoder = new SequenceEncoder(file);

            List<File> files = Arrays.asList(dir.listFiles());
            Collections.sort(files, new Comparator<File>(){
                @Override
                public int compare(File lhs, File rhs) {
                    return lhs.getName().compareTo(rhs.getName());
                }
            });

            for(File f : files){
                Log.i("VideoTest", "Encoding image: " + f.getAbsolutePath());
                try{
                    Bitmap frame = BitmapFactory.decodeFile(f.getAbsolutePath());
                    encoder.encodeImage(frame);
                }
                catch(Exception e){
                    e.printStackTrace();
                }

            }
            encoder.finish();
        }
        catch(Exception e){
            e.printStackTrace();
        }
    }

    public void write(Mat mat){

        //convert from BGR to RGB
        Mat rgbMat = new Mat();
        Imgproc.cvtColor(mat, rgbMat, Imgproc.COLOR_BGR2RGB);

        File file = new File(dir, "img" + imageIndex + ".png");

        String filename = file.toString();
        boolean success = Highgui.imwrite(filename, rgbMat);

        Log.i("VideoTest", "Success writing img" + imageIndex +".png: " + success);

        imageIndex++;
    }

    public boolean isRecording() {
        return recording;
    }
}

编辑:我没有收到任何评论或答案,所以我在这里交叉发布到OpenCV论坛。


答案 1

我通过创建一个并将其传递给OpenCV来解决类似的问题,我对其进行了如下修改。MediaRecorderCameraBridgeViewBase

protected MediaRecorder mRecorder;
protected Surface mSurface = null;

public void setRecorder(MediaRecorder rec) {
    mRecorder = rec;
    if (mRecorder != null) {
        mSurface = mRecorder.getSurface();
    }

protected void deliverAndDrawFrame(CvCameraViewFrame frame) {
    Mat modified;

    if (mListener != null) {
        modified = mListener.onCameraFrame(frame);
    } else {
        modified = frame.rgba();
    }

    boolean bmpValid = true;
    if (modified != null) {
        try {
            Utils.matToBitmap(modified, mCacheBitmap);
        } catch(Exception e) {
            Log.e(TAG, "Mat type: " + modified);
            Log.e(TAG, "Bitmap type: " + mCacheBitmap.getWidth() + "*" + mCacheBitmap.getHeight());
            Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
            bmpValid = false;
        }
    }

    if (bmpValid && mCacheBitmap != null) {
        Canvas canvas;

        if (mRecorder != null) {
            canvas = mSurface.lockCanvas(null);

            canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
            Log.d(TAG, "mStretch value: " + mScale);

            if (mScale != 0) {
                canvas.drawBitmap(mCacheBitmap, new Rect(0,0,mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
                     new Rect((int)((canvas.getWidth() - mScale*mCacheBitmap.getWidth()) / 2),
                     (int)((canvas.getHeight() - mScale*mCacheBitmap.getHeight()) / 2),
                     (int)((canvas.getWidth() - mScale*mCacheBitmap.getWidth()) / 2 + mScale*mCacheBitmap.getWidth()),
                     (int)((canvas.getHeight() - mScale*mCacheBitmap.getHeight()) / 2 + mScale*mCacheBitmap.getHeight())), null);
            } else {
                 canvas.drawBitmap(mCacheBitmap, new Rect(0,0,mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
                     new Rect((canvas.getWidth() - mCacheBitmap.getWidth()) / 2,
                     (canvas.getHeight() - mCacheBitmap.getHeight()) / 2,
                     (canvas.getWidth() - mCacheBitmap.getWidth()) / 2 + mCacheBitmap.getWidth(),
                     (canvas.getHeight() - mCacheBitmap.getHeight()) / 2 + mCacheBitmap.getHeight()), null);
            }

            if (mFpsMeter != null) {
                mFpsMeter.measure();
                mFpsMeter.draw(canvas, 20, 30);
            }
            mSurface.unlockCanvasAndPost(canvas);
        } 

    }

    ...

}

我保留了原始部分,以便它继续将输出显示到原始表面。通过这种方式,我可以通过在中实现并将生成的图像保存到视频来处理来自相机的图像,而无需.deliverAndDrawFrameonCameraFrameMainActivityffmpeg

编辑我已按如下方式设置MediaRecorder

recorder.setAudioSource(MediaRecorder.AudioSource.MIC);
recorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);

CamcorderProfile cpHigh = CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH);
recorder.setProfile(cpHigh);
recorder.setOutputFile("out.mp4");
recorder.setVideoSize(mOpenCvCameraView.mFrameWidth, mOpenCvCameraView.mFrameHeight);

recorder.setOnInfoListener(this);
recorder.setOnErrorListener(this);
recorder.prepare();

注册到OpenCvCameraView

mOpenCvCameraView.setRecorder(recorder);

并开始录制

recorder.start();

答案 2

@HaDang指向这些链接:

http://www.walking-productions.com/notslop/2013/01/16/android-live-streaming-courtesy-of-javacv-and-ffmpeg/

https://code.google.com/p/javacv/source/browse/samples/RecordActivity.java

该示例使用FFMPEG的Java包装器进行视频录制。对于任何想要做同样的事情的人来说,这个项目都是一个非常有用的起点:https://github.com/vanevery/JavaCV_0.3_stream_test

我把上面的项目锤炼成我的例子。它非常混乱,但它的工作原理:

package com.example.videotest;


import java.io.File;
import java.io.IOException;
import java.nio.ShortBuffer;
import java.util.List;

import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2;
import org.opencv.core.Mat;
import org.opencv.core.Scalar;
import org.opencv.imgproc.Imgproc;

import com.googlecode.javacv.FFmpegFrameRecorder;
import com.googlecode.javacv.FrameRecorder.Exception;
import com.googlecode.javacv.cpp.opencv_core.IplImage;

import android.app.Activity;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.media.MediaScannerConnection;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.SurfaceView;
import android.view.View;
import android.view.WindowManager;
import android.widget.Toast;

public class MainActivity extends Activity implements CvCameraViewListener2{

    private CameraView cameraView;
    private Mat edgesMat;
    private final Scalar greenScalar = new Scalar(0,255,0);
    private int resolutionIndex = 0;

    private IplImage videoImage = null;

    boolean recording = false;
    private volatile FFmpegFrameRecorder recorder;

    private int sampleAudioRateInHz = 44100;
    private int imageWidth = 320;
    private int imageHeight = 240;
    private int frameRate = 30;

    private Thread audioThread;
    volatile boolean runAudioThread = true;
    private AudioRecord audioRecord;
    private AudioRecordRunnable audioRecordRunnable;

    private String ffmpeg_link;

    long startTime = 0;

    private String LOG_TAG = "VideoTest";

    private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
        @Override
        public void onManagerConnected(int status) {
            switch (status) {
            case LoaderCallbackInterface.SUCCESS:
                Log.i("VideoTest", "OpenCV loaded successfully");
                cameraView.enableView();
                break;
            default:
                super.onManagerConnected(status);
                break;
            }
        }
    };


    public void onCreate(Bundle savedInstanceState) {

        super.onCreate(savedInstanceState);
        getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);

        setContentView(R.layout.activity_main);

        cameraView = (CameraView) findViewById(R.id.cameraView);
        cameraView.setVisibility(SurfaceView.VISIBLE);
        cameraView.setCvCameraViewListener(this);
    }

    private void initRecorder() {
        Log.w(LOG_TAG,"initRecorder");

        int depth = com.googlecode.javacv.cpp.opencv_core.IPL_DEPTH_8U;
        int channels = 4;

        // if (yuvIplimage == null) {
        // Recreated after frame size is set in surface change method
        videoImage = IplImage.create(imageWidth, imageHeight, depth, channels);
        //yuvIplimage = IplImage.create(imageWidth, imageHeight, IPL_DEPTH_32S, 2);

        Log.v(LOG_TAG, "IplImage.create");
        // }

        File videoFile = new File(getExternalFilesDir(null), "VideoTest/images/video.mp4");
        boolean mk = videoFile.getParentFile().mkdirs();
        Log.v(LOG_TAG, "Mkdir: " + mk);

        boolean del = videoFile.delete();
        Log.v(LOG_TAG, "del: " + del);

        try {
            boolean created = videoFile.createNewFile();
            Log.v(LOG_TAG, "Created: " + created);
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }

        ffmpeg_link = videoFile.getAbsolutePath();
        recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1);
        Log.v(LOG_TAG, "FFmpegFrameRecorder: " + ffmpeg_link + " imageWidth: " + imageWidth + " imageHeight " + imageHeight);

        recorder.setFormat("mp4");
        Log.v(LOG_TAG, "recorder.setFormat(\"mp4\")");

        recorder.setSampleRate(sampleAudioRateInHz);
        Log.v(LOG_TAG, "recorder.setSampleRate(sampleAudioRateInHz)");

        // re-set in the surface changed method as well
        recorder.setFrameRate(frameRate);
        Log.v(LOG_TAG, "recorder.setFrameRate(frameRate)");

        // Create audio recording thread
        audioRecordRunnable = new AudioRecordRunnable();
        audioThread = new Thread(audioRecordRunnable);
    }

    @Override
    public void onPause()
    {
        super.onPause();
        if (cameraView != null){
            cameraView.disableView();
        }
    }

    @Override
    public void onResume()
    {
        super.onResume();
        OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_3, this, mLoaderCallback);
    }

    public void onDestroy() {
        super.onDestroy();
        if (cameraView != null)
            cameraView.disableView();
    }

    public void onCameraViewStarted(int width, int height) {
        edgesMat = new Mat();
    }

    public void onCameraViewStopped() {
        if (edgesMat != null)
            edgesMat.release();

        edgesMat = null;
    }

    public Mat onCameraFrame(CvCameraViewFrame inputFrame) {

        Mat rgba = inputFrame.rgba();
        org.opencv.core.Size sizeRgba = rgba.size();

        int rows = (int) sizeRgba.height;
        int cols = (int) sizeRgba.width;

        int left = cols / 8;
        int top = rows / 8;
        int width = cols * 3 / 4;
        int height = rows * 3 / 4;

        //get sub-image
        Mat rgbaInnerWindow = rgba.submat(top, top + height, left, left + width);

        //create edgesMat from sub-image
        Imgproc.Canny(rgbaInnerWindow, edgesMat, 100, 100);

        Mat colorEdges = new Mat();
        Mat killMe = colorEdges;
        edgesMat.copyTo(colorEdges);
        Imgproc.cvtColor(colorEdges, colorEdges, Imgproc.COLOR_GRAY2BGRA);


        colorEdges = colorEdges.setTo(greenScalar, edgesMat);
        colorEdges.copyTo(rgbaInnerWindow, edgesMat);

        killMe.release();
        colorEdges.release();

        rgbaInnerWindow.release();

        if(recording){
            byte[] byteFrame = new byte[(int) (rgba.total() * rgba.channels())];
            rgba.get(0, 0, byteFrame);
            onFrame(byteFrame);
        }

        return rgba;
    }

    public void stopRecording() {
        // This should stop the audio thread from running
        runAudioThread = false;

        if (recorder != null) {
            Log.v(LOG_TAG,"Finishing recording, calling stop and release on recorder");
            try {
                recorder.stop();
                recorder.release();
            } catch (FFmpegFrameRecorder.Exception e) {
                e.printStackTrace();
            }
            recorder = null;
        }

        MediaScannerConnection.scanFile(MainActivity.this, new String[]{ffmpeg_link}, null, null);
    }


    public void changeResolution(View v){
        List<android.hardware.Camera.Size> cameraResolutionList = cameraView.getResolutionList();
        resolutionIndex++;
        if(resolutionIndex >= cameraResolutionList.size()){
            resolutionIndex = 0;
        }

        android.hardware.Camera.Size resolution = cameraResolutionList.get(resolutionIndex);
        cameraView.setResolution(resolution.width, resolution.height);
        resolution = cameraView.getResolution();
        String caption = Integer.valueOf(resolution.width).toString() + "x" + Integer.valueOf(resolution.height).toString();
        Toast.makeText(this, caption, Toast.LENGTH_SHORT).show();

        imageWidth = resolution.width;
        imageHeight = resolution.height;

        frameRate = cameraView.getFrameRate();

        initRecorder();
    }

    int frames = 0;

    private void onFrame(byte[] data){

        if (videoImage != null && recording) {
            long videoTimestamp = 1000 * (System.currentTimeMillis() - startTime);

            // Put the camera preview frame right into the yuvIplimage object
            videoImage.getByteBuffer().put(data);

            try {

                // Get the correct time
                recorder.setTimestamp(videoTimestamp);

                // Record the image into FFmpegFrameRecorder
                recorder.record(videoImage);

                frames++;

                Log.i(LOG_TAG, "Wrote Frame: " + frames);

            } 
            catch (FFmpegFrameRecorder.Exception e) {
                Log.v(LOG_TAG,e.getMessage());
                e.printStackTrace();
            }
        }

    }

    public void startVideo(View v){

        recording = !recording;

        Log.i(LOG_TAG, "Recording: " + recording);

        if(recording){
            startTime = System.currentTimeMillis();
            try {
                recorder.start();

                Log.i(LOG_TAG, "STARTED RECORDING.");

            } catch (Exception e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
        }
        else{
            stopRecording();
        }
    }



    class AudioRecordRunnable implements Runnable {

        @Override
        public void run() {
            // Set the thread priority
            android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);

            // Audio
            int bufferSize;
            short[] audioData;
            int bufferReadResult;

            bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz, 
                    AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT);
            audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz, 
                    AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);

            audioData = new short[bufferSize];

            Log.d(LOG_TAG, "audioRecord.startRecording()");
            audioRecord.startRecording();

            // Audio Capture/Encoding Loop
            while (runAudioThread) {
                // Read from audioRecord
                bufferReadResult = audioRecord.read(audioData, 0, audioData.length);
                if (bufferReadResult > 0) {
                    //Log.v(LOG_TAG,"audioRecord bufferReadResult: " + bufferReadResult);

                    // Changes in this variable may not be picked up despite it being "volatile"
                    if (recording) {
                        try {
                            // Write to FFmpegFrameRecorder
                            recorder.record(ShortBuffer.wrap(audioData, 0, bufferReadResult));
                        } catch (FFmpegFrameRecorder.Exception e) {
                            Log.v(LOG_TAG,e.getMessage());
                            e.printStackTrace();
                        }
                    }
                }
            }
            Log.v(LOG_TAG,"AudioThread Finished");

            /* Capture/Encoding finished, release recorder */
            if (audioRecord != null) {
                audioRecord.stop();
                audioRecord.release();
                audioRecord = null;

                MediaScannerConnection.scanFile(MainActivity.this, new String[]{ffmpeg_link}, null, null);

                Log.v(LOG_TAG,"audioRecord released");
            }
        }
    }

}

推荐