Logo Questions Linux Laravel Mysql Ubuntu Git Menu
 

MediaCodec get all frames from video

I'm trying to use the MediaCodec to retrive all the frames from a video for image processing stuff, I'm trying to render the video and to capture the frame from the outBuffers but I can't initiate a bitmap instance from the received bytes.

I've tried to render it to a surface or to nothing(null), because I've notice that when you rendering to null then the outBuffers are getting the bytes of the rendered frames.

This is the code:

    private static final String SAMPLE = Environment.getExternalStorageDirectory() + "/test_videos/sample2.mp4";
private PlayerThread mPlayer = null;

@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);

    SurfaceView sv = new SurfaceView(this);
    sv.getHolder().addCallback(this);
    setContentView(sv);
}

protected void onDestroy() {
    super.onDestroy();
}

@Override
public void surfaceCreated(SurfaceHolder holder) {
}

@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
    if (mPlayer == null) {
        mPlayer = new PlayerThread(holder.getSurface());
        mPlayer.start();
    }
}

@Override
public void surfaceDestroyed(SurfaceHolder holder) {
    if (mPlayer != null) {
        mPlayer.interrupt();
    }
}

private void writeFrameToSDCard(byte[] bytes, int i, int sampleSize) {
    try {
        Bitmap bmp = BitmapFactory.decodeByteArray(bytes, 0, sampleSize);

        File file = new File(Environment.getExternalStorageDirectory() + "/test_videos/sample" + i + ".png");
        if (file.exists())
            file.delete();

        file.createNewFile();

        FileOutputStream out = new FileOutputStream(file.getAbsoluteFile());

        bmp.compress(Bitmap.CompressFormat.PNG, 90, out);
        out.close();

    } catch (Exception e) {
        e.printStackTrace();
    }
}

private class PlayerThread extends Thread {
    private MediaExtractor extractor;
    private MediaCodec decoder;
    private Surface surface;

    public PlayerThread(Surface surface) {
        this.surface = surface;
    }

    @Override
    public void run() {
        extractor = new MediaExtractor();
        extractor.setDataSource(SAMPLE);

        int index = extractor.getTrackCount();
        Log.d("MediaCodecTag", "Track count: " + index);

        for (int i = 0; i < extractor.getTrackCount(); i++) {
            MediaFormat format = extractor.getTrackFormat(i);
            String mime = format.getString(MediaFormat.KEY_MIME);
            if (mime.startsWith("video/")) {
                extractor.selectTrack(i);
                decoder = MediaCodec.createDecoderByType(mime);
                decoder.configure(format, surface, null, 0);
                break;
            }
        }

        if (decoder == null) {
            Log.e("DecodeActivity", "Can't find video info!");
            return;
        }

        decoder.start();

        ByteBuffer[] inputBuffers = decoder.getInputBuffers();
        ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
        BufferInfo info = new BufferInfo();
        boolean isEOS = false;
        long startMs = System.currentTimeMillis();

        int i = 0;
        while (!Thread.interrupted()) {
            if (!isEOS) {
                int inIndex = decoder.dequeueInputBuffer(10000);
                if (inIndex >= 0) {
                    ByteBuffer buffer = inputBuffers[inIndex];

                    int sampleSize = extractor.readSampleData(buffer, 0);

                    if (sampleSize < 0) {
                        decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                        isEOS = true;
                    } else {
                        decoder.queueInputBuffer(inIndex, 0, sampleSize, extractor.getSampleTime(), 0);
                        extractor.advance();
                    }
                }
            }

            /* saves frame to sdcard */
            int outIndex = decoder.dequeueOutputBuffer(info, 10000); // outIndex most of the times null

            switch (outIndex) {
            case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
                Log.d("DecodeActivity", "INFO_OUTPUT_BUFFERS_CHANGED");
                outputBuffers = decoder.getOutputBuffers();
                break;
            case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
                Log.d("DecodeActivity", "New format " + decoder.getOutputFormat());
                break;
            case MediaCodec.INFO_TRY_AGAIN_LATER:
                Log.d("DecodeActivity", "dequeueOutputBuffer timed out!");
                break;
            default:
                ByteBuffer buffer = outputBuffers[outIndex];
                Log.v("DecodeActivity", "We can't use this buffer but render it due to the API limit, " + buffer);

                // We use a very simple clock to keep the video FPS, or the video
                // playback will be too fast
                while (info.presentationTimeUs / 1000 > System.currentTimeMillis() - startMs) {
                    try {
                        sleep(10);
                    } catch (InterruptedException e) {
                        e.printStackTrace();
                        break;
                    }
                }
                decoder.releaseOutputBuffer(outIndex, true);
                try {
                    byte[] dst = new byte[outputBuffers[outIndex].capacity()];
                    outputBuffers[outIndex].get(dst);
                    writeFrameToSDCard(dst, i, dst.length);
                    i++;
                } catch (Exception e) {
                    Log.d("iDecodeActivity", "Error while creating bitmap with: " + e.getMessage());
                }

                break;
            }

            // All decoded frames have been rendered, we can stop playing now
            if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                Log.d("DecodeActivity", "OutputBuffer BUFFER_FLAG_END_OF_STREAM");
                break;
            }
        }

        decoder.stop();
        decoder.release();
        extractor.release();
    }
}

Any help would be much appriciated

like image 687
Nativ Avatar asked Nov 03 '13 15:11

Nativ


Video Answer


1 Answers

You can decode to a Surface or to a ByteBuffer, but not both. Because you are configuring a Surface, there will always be zero bytes of data in the output buffer.

If you configure for ByteBuffer decoding, the data format will vary, but to my knowledge will never be an ARGB format that Bitmap understands. You can see examples of two YUV formats being examined in the buffer-to-buffer tests in the CTS EncodeDecodeTest in method checkFrame(). Note, however, that the first thing it does is check the format and return immediately if it's not recognized.

At present (Android 4.4), the only reliable way to do this is to decode to a SurfaceTexture, render that with GLES, and extract RGB data with glReadPixels(). Sample code is available on bigflake -- see ExtractMpegFramesTest (requires API 16+).

like image 100
fadden Avatar answered Oct 13 '22 04:10

fadden