Logo Questions Linux Laravel Mysql Ubuntu Git Menu
 

Recording Live OpenCV Processing on Android

My goal is to do a couple things:

  1. Use OpenCV and the JavaCameraView to process frames from the phone's camera feed
  2. Enable recording of that processed video as it happens

I have both of them working, but the way I had to implement number 2 is ridiculous:

  1. For each frame, write the processed Mat as an image file.
  2. When the recording stops, use JCodec's Android library to stitch them together into a video file.

That works, but it comes with a ton of drawbacks: the framerate drops unbearably low during a recording, and the stitching step takes about half a second per frame, and runs out of memory for videos more than a couple seconds long- and that's after I lower my camera's resolution to make sure the images are as small as possible. Even then, the video framerate is way out of whack with reality, and the video looks insanely sped up.

This seems ridiculous for a lot of reasons, so my question is: is there a better way to do this?

Here's a little example if anybody wants to run it. This requires the OpenCV Android project available here, and the JCodec Android project available here.

Manifest.xml:

<uses-sdk
    android:minSdkVersion="8"
    android:targetSdkVersion="22"
/>

<application
    android:allowBackup="true"
    android:icon="@drawable/ic_launcher"
    android:label="@string/app_name"
    android:theme="@android:style/Theme.NoTitleBar.Fullscreen" >

    <activity
        android:name=".MainActivity"
        android:screenOrientation="landscape"
        android:configChanges="orientation|keyboardHidden|screenSize"
        android:label="@string/app_name" >
        <intent-filter>
            <action android:name="android.intent.action.MAIN" />
            <category android:name="android.intent.category.LAUNCHER" />
        </intent-filter>
    </activity>

</application>

<uses-permission android:name="android.permission.CAMERA"/>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />

MainActivity:

package com.example.videotest;

import java.io.File;
import java.util.List;

import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2;
import org.opencv.core.Mat;
import org.opencv.core.Scalar;
import org.opencv.imgproc.Imgproc;

import android.app.Activity;
import android.media.MediaScannerConnection;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.SurfaceView;
import android.view.View;
import android.view.WindowManager;
import android.widget.Toast;

public class MainActivity extends Activity implements CvCameraViewListener2{

    private CameraView cameraView;
    private Mat edgesMat;
    private final Scalar greenScalar = new Scalar(0,255,0);
    private int resolutionIndex = 0;
    private MatVideoWriter matVideoWriter = new MatVideoWriter();


    private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
        @Override
        public void onManagerConnected(int status) {
            switch (status) {
            case LoaderCallbackInterface.SUCCESS:
            {
                Log.i("VideoTest", "OpenCV loaded successfully");

                cameraView.enableView();

            } break;
            default:
            {
                super.onManagerConnected(status);
            } break;
            }
        }
    };


    public void onCreate(Bundle savedInstanceState) {

        super.onCreate(savedInstanceState);
        getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);

        setContentView(R.layout.activity_main);

        cameraView = (CameraView) findViewById(R.id.cameraView);
        cameraView.setVisibility(SurfaceView.VISIBLE);
        cameraView.setCvCameraViewListener(this);
    }

    @Override
    public void onPause()
    {
        super.onPause();
        if (cameraView != null){
            cameraView.disableView();
        }
    }

    @Override
    public void onResume()
    {
        super.onResume();
        OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_3, this, mLoaderCallback);
    }

    public void onDestroy() {
        super.onDestroy();
        if (cameraView != null)
            cameraView.disableView();
    }

    public void onCameraViewStarted(int width, int height) {
        edgesMat = new Mat();
    }

    public void onCameraViewStopped() {
        if (edgesMat != null)
            edgesMat.release();

        edgesMat = null;
    }

    public Mat onCameraFrame(CvCameraViewFrame inputFrame) {

        Mat rgba = inputFrame.rgba();
        org.opencv.core.Size sizeRgba = rgba.size();

        int rows = (int) sizeRgba.height;
        int cols = (int) sizeRgba.width;

        int left = cols / 8;
        int top = rows / 8;
        int width = cols * 3 / 4;
        int height = rows * 3 / 4;

        //get sub-image
        Mat rgbaInnerWindow = rgba.submat(top, top + height, left, left + width);

        //create edgesMat from sub-image
        Imgproc.Canny(rgbaInnerWindow, edgesMat, 100, 100);

        Mat colorEdges = new Mat();
        Mat killMe = colorEdges;
        edgesMat.copyTo(colorEdges);
        Imgproc.cvtColor(colorEdges, colorEdges, Imgproc.COLOR_GRAY2BGRA);


        colorEdges = colorEdges.setTo(greenScalar, edgesMat);
        colorEdges.copyTo(rgbaInnerWindow, edgesMat);

        killMe.release();
        colorEdges.release();

        rgbaInnerWindow.release();

        if(matVideoWriter.isRecording()){
            matVideoWriter.write(rgba);
        }

        return rgba;
    }


    public void changeResolution(View v){
        List<android.hardware.Camera.Size> cameraResolutionList = cameraView.getResolutionList();
        resolutionIndex++;
        if(resolutionIndex >= cameraResolutionList.size()){
            resolutionIndex = 0;
        }

        android.hardware.Camera.Size resolution = cameraResolutionList.get(resolutionIndex);
        cameraView.setResolution(resolution.width, resolution.height);
        resolution = cameraView.getResolution();
        String caption = Integer.valueOf(resolution.width).toString() + "x" + Integer.valueOf(resolution.height).toString();
        Toast.makeText(this, caption, Toast.LENGTH_SHORT).show();
    }

    public void startVideo(View v){

        if(matVideoWriter.isRecording()){
            matVideoWriter.stop();
            File file = new File(getExternalFilesDir(null), "VideoTest/images/");
            for(String img : file.list()){
                String scanMe = new File(file, img).getAbsolutePath();
                MediaScannerConnection.scanFile(this, new String[]{scanMe}, null, null);
                Log.i("VideoTest", "Scanning: " +scanMe);
            }

            file = new File(file, "video.mp4");
            MediaScannerConnection.scanFile(this, new String[]{file.getAbsolutePath()}, null, null);

        }
        else{ 

            String state = Environment.getExternalStorageState();
            Log.i("VideoTest", "state: " + state);

            File ext = getExternalFilesDir(null);
            Log.i("VideoTest", "ext: " + ext.getAbsolutePath());


            File file = new File(getExternalFilesDir(null), "VideoTest/images/");
            if(!file.exists()){
                boolean success = file.mkdirs();

                Log.i("VideoTest", "mkdirs: " + success);
            }
            else{
                Log.i("VideoTest", "file exists.");
            }

            Log.i("VideoTest", "starting recording: " + file.getAbsolutePath());

            matVideoWriter.start(file);
        }

    }

}

CameraView:

package com.example.videotest;

import java.io.FileOutputStream;
import java.util.List;

import org.opencv.android.JavaCameraView;

import android.content.Context;
import android.hardware.Camera;
import android.hardware.Camera.PictureCallback;
import android.util.AttributeSet;
import android.util.Log;

public class CameraView extends JavaCameraView{

    private String mPictureFileName;

    public CameraView(Context context, AttributeSet attrs) {
        super(context, attrs);
    }

    public List<String> getEffectList() {
        return mCamera.getParameters().getSupportedColorEffects();
    }

    public boolean isEffectSupported() {
        return (mCamera.getParameters().getColorEffect() != null);
    }

    public String getEffect() {
        return mCamera.getParameters().getColorEffect();
    }

    public void setEffect(String effect) {
        Camera.Parameters params = mCamera.getParameters();
        params.setColorEffect(effect);
        mCamera.setParameters(params);
    }

    public List<android.hardware.Camera.Size> getResolutionList() {
        return mCamera.getParameters().getSupportedPreviewSizes();
    }

    public void setResolution(int width, int height) {
        disconnectCamera();
        mMaxHeight = height;
        mMaxWidth = width;
        connectCamera(getWidth(), getHeight());
    }

    public android.hardware.Camera.Size getResolution() {
        return mCamera.getParameters().getPreviewSize();
    }

}

MatVideoWriter:

package com.example.videotest;

import java.io.File;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;

import org.jcodec.api.android.SequenceEncoder;
import org.opencv.core.Mat;
import org.opencv.highgui.Highgui;
import org.opencv.imgproc.Imgproc;

import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.util.Log;

public class MatVideoWriter {

    boolean recording;
    File dir;
    int imageIndex = 0;

    public void start(File dir){
        this.dir = dir;
        recording = true;
    }

    public void stop(){
        recording = false;

        try{
            File file = new File(dir, "video.mp4");
            SequenceEncoder encoder = new SequenceEncoder(file);

            List<File> files = Arrays.asList(dir.listFiles());
            Collections.sort(files, new Comparator<File>(){
                @Override
                public int compare(File lhs, File rhs) {
                    return lhs.getName().compareTo(rhs.getName());
                }
            });

            for(File f : files){
                Log.i("VideoTest", "Encoding image: " + f.getAbsolutePath());
                try{
                    Bitmap frame = BitmapFactory.decodeFile(f.getAbsolutePath());
                    encoder.encodeImage(frame);
                }
                catch(Exception e){
                    e.printStackTrace();
                }

            }
            encoder.finish();
        }
        catch(Exception e){
            e.printStackTrace();
        }
    }

    public void write(Mat mat){

        //convert from BGR to RGB
        Mat rgbMat = new Mat();
        Imgproc.cvtColor(mat, rgbMat, Imgproc.COLOR_BGR2RGB);

        File file = new File(dir, "img" + imageIndex + ".png");

        String filename = file.toString();
        boolean success = Highgui.imwrite(filename, rgbMat);

        Log.i("VideoTest", "Success writing img" + imageIndex +".png: " + success);

        imageIndex++;
    }

    public boolean isRecording() {
        return recording;
    }
}

Edit: I haven't received any comments or answers, so I've crossposted to the OpenCV forum here.

like image 643
Kevin Workman Avatar asked Feb 22 '15 22:02

Kevin Workman


People also ask

Can OpenCV be used on Android?

To work with the OpenCV Android library, you have to add it to your app module as a dependency. To easily do this on Android Studio, click on File -> Project Structure. When the project structure dialog opens, click on the app module or any other module that you want to use OpenCV library in.

How use OpenCV image processing in Android?

You need to access Project Files from the drop-down menu. Edit the path in the screenshot below to match the path of your local OpenCV source code (which is part of this repo). When you run the project, it will start a simulator. Test out the processing with the included sample images.

Does OpenCV work on mobile?

You'll see a couple of sample applications, which you can use as a basis for your own developments. “Android development with OpenCV” shows you how to add OpenCV functionality into your Android application. For those who want to reuse their C++ code, we've created a special section: “Native/C++”.


2 Answers

I've solved a similar problem by creating a MediaRecorder and passing it to a OpenCV CameraBridgeViewBase, which I've modified as follows.

protected MediaRecorder mRecorder;
protected Surface mSurface = null;

public void setRecorder(MediaRecorder rec) {
    mRecorder = rec;
    if (mRecorder != null) {
        mSurface = mRecorder.getSurface();
    }

and

protected void deliverAndDrawFrame(CvCameraViewFrame frame) {
    Mat modified;

    if (mListener != null) {
        modified = mListener.onCameraFrame(frame);
    } else {
        modified = frame.rgba();
    }

    boolean bmpValid = true;
    if (modified != null) {
        try {
            Utils.matToBitmap(modified, mCacheBitmap);
        } catch(Exception e) {
            Log.e(TAG, "Mat type: " + modified);
            Log.e(TAG, "Bitmap type: " + mCacheBitmap.getWidth() + "*" + mCacheBitmap.getHeight());
            Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
            bmpValid = false;
        }
    }

    if (bmpValid && mCacheBitmap != null) {
        Canvas canvas;

        if (mRecorder != null) {
            canvas = mSurface.lockCanvas(null);

            canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
            Log.d(TAG, "mStretch value: " + mScale);

            if (mScale != 0) {
                canvas.drawBitmap(mCacheBitmap, new Rect(0,0,mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
                     new Rect((int)((canvas.getWidth() - mScale*mCacheBitmap.getWidth()) / 2),
                     (int)((canvas.getHeight() - mScale*mCacheBitmap.getHeight()) / 2),
                     (int)((canvas.getWidth() - mScale*mCacheBitmap.getWidth()) / 2 + mScale*mCacheBitmap.getWidth()),
                     (int)((canvas.getHeight() - mScale*mCacheBitmap.getHeight()) / 2 + mScale*mCacheBitmap.getHeight())), null);
            } else {
                 canvas.drawBitmap(mCacheBitmap, new Rect(0,0,mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
                     new Rect((canvas.getWidth() - mCacheBitmap.getWidth()) / 2,
                     (canvas.getHeight() - mCacheBitmap.getHeight()) / 2,
                     (canvas.getWidth() - mCacheBitmap.getWidth()) / 2 + mCacheBitmap.getWidth(),
                     (canvas.getHeight() - mCacheBitmap.getHeight()) / 2 + mCacheBitmap.getHeight()), null);
            }

            if (mFpsMeter != null) {
                mFpsMeter.measure();
                mFpsMeter.draw(canvas, 20, 30);
            }
            mSurface.unlockCanvasAndPost(canvas);
        } 

    }

    ...

}

I've left the original part of deliverAndDrawFrame as is, so that it keeps displaying the output to the original surface. This way I can process images from a camera by implementing onCameraFrame in MainActivity and save the resulting images to a video, without the need for ffmpeg.

EDIT I've set up the MediaRecorder as follows

recorder.setAudioSource(MediaRecorder.AudioSource.MIC);
recorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);

CamcorderProfile cpHigh = CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH);
recorder.setProfile(cpHigh);
recorder.setOutputFile("out.mp4");
recorder.setVideoSize(mOpenCvCameraView.mFrameWidth, mOpenCvCameraView.mFrameHeight);

recorder.setOnInfoListener(this);
recorder.setOnErrorListener(this);
recorder.prepare();

register it with the OpenCvCameraView

mOpenCvCameraView.setRecorder(recorder);

and start recording

recorder.start();
like image 108
SimonFojtu Avatar answered Oct 21 '22 05:10

SimonFojtu


@HaDang pointed me to these links:

http://www.walking-productions.com/notslop/2013/01/16/android-live-streaming-courtesy-of-javacv-and-ffmpeg/

https://code.google.com/p/javacv/source/browse/samples/RecordActivity.java

That example uses a Java wrapper of FFMPEG to do the video recording. This project is a pretty useful starting point for anybody wanting to do the same: https://github.com/vanevery/JavaCV_0.3_stream_test

I took that above project and hammered it into my example. It's very messy, but it works:

package com.example.videotest;


import java.io.File;
import java.io.IOException;
import java.nio.ShortBuffer;
import java.util.List;

import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2;
import org.opencv.core.Mat;
import org.opencv.core.Scalar;
import org.opencv.imgproc.Imgproc;

import com.googlecode.javacv.FFmpegFrameRecorder;
import com.googlecode.javacv.FrameRecorder.Exception;
import com.googlecode.javacv.cpp.opencv_core.IplImage;

import android.app.Activity;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.media.MediaScannerConnection;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.SurfaceView;
import android.view.View;
import android.view.WindowManager;
import android.widget.Toast;

public class MainActivity extends Activity implements CvCameraViewListener2{

    private CameraView cameraView;
    private Mat edgesMat;
    private final Scalar greenScalar = new Scalar(0,255,0);
    private int resolutionIndex = 0;

    private IplImage videoImage = null;

    boolean recording = false;
    private volatile FFmpegFrameRecorder recorder;

    private int sampleAudioRateInHz = 44100;
    private int imageWidth = 320;
    private int imageHeight = 240;
    private int frameRate = 30;

    private Thread audioThread;
    volatile boolean runAudioThread = true;
    private AudioRecord audioRecord;
    private AudioRecordRunnable audioRecordRunnable;

    private String ffmpeg_link;

    long startTime = 0;

    private String LOG_TAG = "VideoTest";

    private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
        @Override
        public void onManagerConnected(int status) {
            switch (status) {
            case LoaderCallbackInterface.SUCCESS:
                Log.i("VideoTest", "OpenCV loaded successfully");
                cameraView.enableView();
                break;
            default:
                super.onManagerConnected(status);
                break;
            }
        }
    };


    public void onCreate(Bundle savedInstanceState) {

        super.onCreate(savedInstanceState);
        getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);

        setContentView(R.layout.activity_main);

        cameraView = (CameraView) findViewById(R.id.cameraView);
        cameraView.setVisibility(SurfaceView.VISIBLE);
        cameraView.setCvCameraViewListener(this);
    }

    private void initRecorder() {
        Log.w(LOG_TAG,"initRecorder");

        int depth = com.googlecode.javacv.cpp.opencv_core.IPL_DEPTH_8U;
        int channels = 4;

        // if (yuvIplimage == null) {
        // Recreated after frame size is set in surface change method
        videoImage = IplImage.create(imageWidth, imageHeight, depth, channels);
        //yuvIplimage = IplImage.create(imageWidth, imageHeight, IPL_DEPTH_32S, 2);

        Log.v(LOG_TAG, "IplImage.create");
        // }

        File videoFile = new File(getExternalFilesDir(null), "VideoTest/images/video.mp4");
        boolean mk = videoFile.getParentFile().mkdirs();
        Log.v(LOG_TAG, "Mkdir: " + mk);

        boolean del = videoFile.delete();
        Log.v(LOG_TAG, "del: " + del);

        try {
            boolean created = videoFile.createNewFile();
            Log.v(LOG_TAG, "Created: " + created);
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }

        ffmpeg_link = videoFile.getAbsolutePath();
        recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1);
        Log.v(LOG_TAG, "FFmpegFrameRecorder: " + ffmpeg_link + " imageWidth: " + imageWidth + " imageHeight " + imageHeight);

        recorder.setFormat("mp4");
        Log.v(LOG_TAG, "recorder.setFormat(\"mp4\")");

        recorder.setSampleRate(sampleAudioRateInHz);
        Log.v(LOG_TAG, "recorder.setSampleRate(sampleAudioRateInHz)");

        // re-set in the surface changed method as well
        recorder.setFrameRate(frameRate);
        Log.v(LOG_TAG, "recorder.setFrameRate(frameRate)");

        // Create audio recording thread
        audioRecordRunnable = new AudioRecordRunnable();
        audioThread = new Thread(audioRecordRunnable);
    }

    @Override
    public void onPause()
    {
        super.onPause();
        if (cameraView != null){
            cameraView.disableView();
        }
    }

    @Override
    public void onResume()
    {
        super.onResume();
        OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_3, this, mLoaderCallback);
    }

    public void onDestroy() {
        super.onDestroy();
        if (cameraView != null)
            cameraView.disableView();
    }

    public void onCameraViewStarted(int width, int height) {
        edgesMat = new Mat();
    }

    public void onCameraViewStopped() {
        if (edgesMat != null)
            edgesMat.release();

        edgesMat = null;
    }

    public Mat onCameraFrame(CvCameraViewFrame inputFrame) {

        Mat rgba = inputFrame.rgba();
        org.opencv.core.Size sizeRgba = rgba.size();

        int rows = (int) sizeRgba.height;
        int cols = (int) sizeRgba.width;

        int left = cols / 8;
        int top = rows / 8;
        int width = cols * 3 / 4;
        int height = rows * 3 / 4;

        //get sub-image
        Mat rgbaInnerWindow = rgba.submat(top, top + height, left, left + width);

        //create edgesMat from sub-image
        Imgproc.Canny(rgbaInnerWindow, edgesMat, 100, 100);

        Mat colorEdges = new Mat();
        Mat killMe = colorEdges;
        edgesMat.copyTo(colorEdges);
        Imgproc.cvtColor(colorEdges, colorEdges, Imgproc.COLOR_GRAY2BGRA);


        colorEdges = colorEdges.setTo(greenScalar, edgesMat);
        colorEdges.copyTo(rgbaInnerWindow, edgesMat);

        killMe.release();
        colorEdges.release();

        rgbaInnerWindow.release();

        if(recording){
            byte[] byteFrame = new byte[(int) (rgba.total() * rgba.channels())];
            rgba.get(0, 0, byteFrame);
            onFrame(byteFrame);
        }

        return rgba;
    }

    public void stopRecording() {
        // This should stop the audio thread from running
        runAudioThread = false;

        if (recorder != null) {
            Log.v(LOG_TAG,"Finishing recording, calling stop and release on recorder");
            try {
                recorder.stop();
                recorder.release();
            } catch (FFmpegFrameRecorder.Exception e) {
                e.printStackTrace();
            }
            recorder = null;
        }

        MediaScannerConnection.scanFile(MainActivity.this, new String[]{ffmpeg_link}, null, null);
    }


    public void changeResolution(View v){
        List<android.hardware.Camera.Size> cameraResolutionList = cameraView.getResolutionList();
        resolutionIndex++;
        if(resolutionIndex >= cameraResolutionList.size()){
            resolutionIndex = 0;
        }

        android.hardware.Camera.Size resolution = cameraResolutionList.get(resolutionIndex);
        cameraView.setResolution(resolution.width, resolution.height);
        resolution = cameraView.getResolution();
        String caption = Integer.valueOf(resolution.width).toString() + "x" + Integer.valueOf(resolution.height).toString();
        Toast.makeText(this, caption, Toast.LENGTH_SHORT).show();

        imageWidth = resolution.width;
        imageHeight = resolution.height;

        frameRate = cameraView.getFrameRate();

        initRecorder();
    }

    int frames = 0;

    private void onFrame(byte[] data){

        if (videoImage != null && recording) {
            long videoTimestamp = 1000 * (System.currentTimeMillis() - startTime);

            // Put the camera preview frame right into the yuvIplimage object
            videoImage.getByteBuffer().put(data);

            try {

                // Get the correct time
                recorder.setTimestamp(videoTimestamp);

                // Record the image into FFmpegFrameRecorder
                recorder.record(videoImage);

                frames++;

                Log.i(LOG_TAG, "Wrote Frame: " + frames);

            } 
            catch (FFmpegFrameRecorder.Exception e) {
                Log.v(LOG_TAG,e.getMessage());
                e.printStackTrace();
            }
        }

    }

    public void startVideo(View v){

        recording = !recording;

        Log.i(LOG_TAG, "Recording: " + recording);

        if(recording){
            startTime = System.currentTimeMillis();
            try {
                recorder.start();

                Log.i(LOG_TAG, "STARTED RECORDING.");

            } catch (Exception e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
        }
        else{
            stopRecording();
        }
    }



    class AudioRecordRunnable implements Runnable {

        @Override
        public void run() {
            // Set the thread priority
            android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);

            // Audio
            int bufferSize;
            short[] audioData;
            int bufferReadResult;

            bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz, 
                    AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT);
            audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz, 
                    AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);

            audioData = new short[bufferSize];

            Log.d(LOG_TAG, "audioRecord.startRecording()");
            audioRecord.startRecording();

            // Audio Capture/Encoding Loop
            while (runAudioThread) {
                // Read from audioRecord
                bufferReadResult = audioRecord.read(audioData, 0, audioData.length);
                if (bufferReadResult > 0) {
                    //Log.v(LOG_TAG,"audioRecord bufferReadResult: " + bufferReadResult);

                    // Changes in this variable may not be picked up despite it being "volatile"
                    if (recording) {
                        try {
                            // Write to FFmpegFrameRecorder
                            recorder.record(ShortBuffer.wrap(audioData, 0, bufferReadResult));
                        } catch (FFmpegFrameRecorder.Exception e) {
                            Log.v(LOG_TAG,e.getMessage());
                            e.printStackTrace();
                        }
                    }
                }
            }
            Log.v(LOG_TAG,"AudioThread Finished");

            /* Capture/Encoding finished, release recorder */
            if (audioRecord != null) {
                audioRecord.stop();
                audioRecord.release();
                audioRecord = null;

                MediaScannerConnection.scanFile(MainActivity.this, new String[]{ffmpeg_link}, null, null);

                Log.v(LOG_TAG,"audioRecord released");
            }
        }
    }

}
like image 22
Kevin Workman Avatar answered Oct 21 '22 04:10

Kevin Workman