Logo Questions Linux Laravel Mysql Ubuntu Git Menu
 

How to merge audio and video file in android

Thanks for the great mp4parser lib, i have few queries related to audio video muxing.

We used the below code in android and tried, but we are not getting the expected output, We have kept a working mp4 file in the specific directory and trying but no luck.

Here we get the merged audio and video, but audio gets appended to video. And the appended audio will not play but simply it increases the width of video.

Any help from geeks.

Here is the code,

    File sdCard = Environment.getDataDirectory();

    String videofilepath = Environment.getExternalStorageDirectory().toString()+"/video.mp4";
    String audiofilepath = Environment.getExternalStorageDirectory().toString()+"/audio.aac";
    File file=new File(videofilepath);

    H264TrackImpl h264Track = new H264TrackImpl(new FileDataSourceImpl(videofilepath));
    AACTrackImpl aacTrack = new AACTrackImpl(new FileDataSourceImpl(audiofilepath));

    Movie movie = new Movie();
    movie.addTrack(h264Track);
    movie.addTrack(aacTrack);


    Container mp4file = new DefaultMp4Builder().build(movie);

    FileChannel fc = new FileOutputStream(new File(Environment.getExternalStorageDirectory().toString() + "/video.mp4")).getChannel();
    mp4file.writeContainer(fc);
    fc.close();
like image 547
Naruto Avatar asked Oct 31 '22 00:10

Naruto


1 Answers

  1. We can Merge .m4a, .aac, .3gp audio files with an .mp4 video file in android. 2) .mp3 audio files cannot be merged with video files.

The below code works for merging .mp4 audio and blank video (no sound):

public class Search extends FragmentActivity implements LoaderCallbacks<Boolean> {

    private final Search self = this;
    private ProgressDialog mProgressDialog;
    VideoView mVideoView;
    ImageView back, next;
    String path;
    String root = Environment.getExternalStorageDirectory().toString();
    String songpath, songname;
    String TAG = "Logcat";
    String[] fileList;

    String audiopath = "/sdcard/dubmash/audio.m4a";
    String videopath = "/sdcard/dubmash/video.`mp4`";
    String output;

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.search);

        Intent i = getIntent();
        if (getIntent().getExtras() != null) {
            songpath = i.getStringExtra("songpath");
            songname = i.getStringExtra("songname");
            Log.e("Search Class :song name", songname);
            Log.e("Search Class :song path", songpath);
        }


        mVideoView = (VideoView) findViewById(R.id.videoview);
        root = Environment.getExternalStorageDirectory().toString();

        back = (ImageView) findViewById(R.id.back);
        next = (ImageView) findViewById(R.id.next);
        String audio = audiopath;
        String video = videopath;
        output = "/sdcard/dubmash/" + songname + ".mp4";

        Log.e("FILE", "audio:" + audio + " video:" + video + " out:" + output);

        try {
            mux(video, audio, output);
        } catch (NullPointerException e) {
            Toast.makeText(Search.this, "Please Record Again ", Toast.LENGTH_SHORT).show();
        }

        try {
            //Delete video file
            File file = new File(videopath);
            boolean deleted = file.delete();
            //Delete Audio file
            File files = new File(audiopath);
            boolean deleted1 = files.delete();


            mVideoView.setVideoPath(output);
            mVideoView.requestFocus();
            mVideoView.start();
        } catch (NullPointerException e) {
            Toast.makeText(Search.this, "File Not Find", Toast.LENGTH_SHORT).show();
        }


        Button play = (Button) findViewById(R.id.playvideoplayer);
        play.setOnClickListener(new OnClickListener() {
            @Override
            public void onClick(View view) {

                mVideoView.setVideoPath(output);
                mVideoView.requestFocus();
                mVideoView.start();

            }
        });

        next.setOnClickListener(new OnClickListener() {
            @Override
            public void onClick(View view) {
                Intent i = new Intent(Search.this, HomePage.class);
                i.putExtra("showdubs", "dubs");
                startActivity(i);
                finish();
            }
        });

        back.setOnClickListener(new OnClickListener() {
            @Override
            public void onClick(View view) {
                Intent i = new Intent(Search.this, AlbumDetailVideo.class);
                i.putExtra("songpath", songpath);
                i.putExtra("songname", songname);
                startActivity(i);
                finish();
            }
        });

    }

    @Override
    public Loader<Boolean> onCreateLoader(int id, Bundle args) {
        return new EditMovieTask(self, args.getInt("type"));
    }

    @Override
    public void onLoadFinished(Loader<Boolean> loader, Boolean succeed) {
        getSupportLoaderManager().destroyLoader(loader.getId());
        mProgressDialog.dismiss();
    }

    @Override
    public void onLoaderReset(Loader<Boolean> loader) {
    }

    public static class EditMovieTask extends AsyncTaskLoader<Boolean> {

        private int mType;

        public EditMovieTask(Context context, int type) {
            super(context);
            mType = type;
            forceLoad();
        }

        @Override
        public Boolean loadInBackground() {

            switch (mType) {
                case 0:
                    //         return append();
                case 1:
                    //       return crop();
                case 2:
                    //     return subTitle();
            }

            return false;
        }


    }

    public boolean mux(String videoFile, String audioFile, String outputFile) {
        Movie video;
        try {
            video = new MovieCreator().build(videoFile);
            Log.e("Audio Video", "1");
        } catch (RuntimeException e) {
            e.printStackTrace();
            Log.e("Audio Video", "2");
            return false;
        } catch (IOException e) {
            e.printStackTrace();
            Log.e("Audio Video", "3");
            return false;
        }

        Movie audio;
        try {

            audio = new MovieCreator().build(audioFile);
            Log.e("Audio Video", "4");
        } catch (IOException e) {
            e.printStackTrace();
            Log.e("Audio Video", "5");
            return false;
        } catch (NullPointerException e) {
            e.printStackTrace();
            Log.e("Audio Video", "6");
            return false;
        }

        Track audioTrack = audio.getTracks().get(0);
        video.addTrack(audioTrack);

        Container out = new DefaultMp4Builder().build(video);
        Log.e("Audio Video", "8");
        FileOutputStream fos;
        try {
            Log.e("Audio Video", "9");
            fos = new FileOutputStream(outputFile);
        } catch (FileNotFoundException e) {
            e.printStackTrace();
            return false;
        }
        BufferedWritableFileByteChannel byteBufferByteChannel = new BufferedWritableFileByteChannel(fos);
        try {
            Log.e("Audio Video", "10");
            out.writeContainer(byteBufferByteChannel);
            byteBufferByteChannel.close();
            Log.e("Audio Video", "11");
            fos.close();
        } catch (IOException e) {
            e.printStackTrace();
            return false;
        }
        return true;
    }

    private static class BufferedWritableFileByteChannel implements WritableByteChannel {
        //    private static final int BUFFER_CAPACITY = 1000000;
        private static final int BUFFER_CAPACITY = 10000000;

        private boolean isOpen = true;
        private final OutputStream outputStream;
        private final ByteBuffer byteBuffer;
        private final byte[] rawBuffer = new byte[BUFFER_CAPACITY];

        private BufferedWritableFileByteChannel(OutputStream outputStream) {
            this.outputStream = outputStream;
            this.byteBuffer = ByteBuffer.wrap(rawBuffer);
            Log.e("Audio Video", "13");
        }

        @Override
        public int write(ByteBuffer inputBuffer) throws IOException {
            int inputBytes = inputBuffer.remaining();

            if (inputBytes > byteBuffer.remaining()) {
                Log.e("Size ok ", "song size is ok");
                dumpToFile();
                byteBuffer.clear();

                if (inputBytes > byteBuffer.remaining()) {
                    Log.e("Size ok ", "song size is not okssss ok");
                    throw new BufferOverflowException();
                }
            }

            byteBuffer.put(inputBuffer);

            return inputBytes;
        }

        @Override
        public boolean isOpen() {
            return isOpen;
        }

        @Override
        public void close() throws IOException {
            dumpToFile();
            isOpen = false;
        }

        private void dumpToFile() {
            try {
                outputStream.write(rawBuffer, 0, byteBuffer.position());
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
        }
    }


    private void muxing() {

        String outputFile = "";

        try {

            root = Environment.getExternalStorageDirectory().toString();
            String audio = root + "/" + "audio.mp3";
            String video = root + "/" + "rohit.mp4";


            File file = new File(Environment.getExternalStorageDirectory() + File.separator + "final2.mp4");
            file.createNewFile();
            outputFile = file.getAbsolutePath();


            MediaExtractor videoExtractor = new MediaExtractor();
            videoExtractor.setDataSource(video);

            MediaExtractor audioExtractor = new MediaExtractor();
            audioExtractor.setDataSource(audio);

            Log.d(TAG, "Video Extractor Track Count " + videoExtractor.getTrackCount());
            Log.d(TAG, "Audio Extractor Track Count " + audioExtractor.getTrackCount());

            MediaMuxer muxer = new MediaMuxer(outputFile, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);

            videoExtractor.selectTrack(0);
            MediaFormat videoFormat = videoExtractor.getTrackFormat(0);
            int videoTrack = muxer.addTrack(videoFormat);

            audioExtractor.selectTrack(0);
            MediaFormat audioFormat = audioExtractor.getTrackFormat(0);
            int audioTrack = muxer.addTrack(audioFormat);

            Log.d(TAG, "Video Format " + videoFormat.toString());
            Log.d(TAG, "Audio Format " + audioFormat.toString());

            boolean sawEOS = false;
            int frameCount = 0;
            int offset = 100;
            int sampleSize = 256 * 1024;
            ByteBuffer videoBuf = ByteBuffer.allocate(sampleSize);
            ByteBuffer audioBuf = ByteBuffer.allocate(sampleSize);
            MediaCodec.BufferInfo videoBufferInfo = new MediaCodec.BufferInfo();
            MediaCodec.BufferInfo audioBufferInfo = new MediaCodec.BufferInfo();


            videoExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
            audioExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);

            muxer.start();

            while (!sawEOS) {
                videoBufferInfo.offset = offset;
                videoBufferInfo.size = videoExtractor.readSampleData(videoBuf, offset);


                if (videoBufferInfo.size < 0 || audioBufferInfo.size < 0) {
                    Log.d(TAG, "saw input EOS.");
                    sawEOS = true;
                    videoBufferInfo.size = 0;

                } else {
                    videoBufferInfo.presentationTimeUs = videoExtractor.getSampleTime();
                    videoBufferInfo.flags = MediaCodec.BUFFER_FLAG_KEY_FRAME;
                    muxer.writeSampleData(videoTrack, videoBuf, videoBufferInfo);
                    videoExtractor.advance();


                    frameCount++;
                    Log.d(TAG, "Frame (" + frameCount + ") Video PresentationTimeUs:" + videoBufferInfo.presentationTimeUs + " Flags:" + videoBufferInfo.flags + " Size(KB) " + videoBufferInfo.size / 1024);
                    Log.d(TAG, "Frame (" + frameCount + ") Audio PresentationTimeUs:" + audioBufferInfo.presentationTimeUs + " Flags:" + audioBufferInfo.flags + " Size(KB) " + audioBufferInfo.size / 1024);

                }
            }

            Toast.makeText(getApplicationContext(), "frame:" + frameCount, Toast.LENGTH_SHORT).show();


            boolean sawEOS2 = false;
            int frameCount2 = 0;
            while (!sawEOS2) {
                frameCount2++;

                audioBufferInfo.offset = offset;
                audioBufferInfo.size = audioExtractor.readSampleData(audioBuf, offset);

                if (videoBufferInfo.size < 0 || audioBufferInfo.size < 0) {
                    Log.d(TAG, "saw input EOS.");
                    sawEOS2 = true;
                    audioBufferInfo.size = 0;
                } else {
                    audioBufferInfo.presentationTimeUs = audioExtractor.getSampleTime();
                    audioBufferInfo.flags = MediaCodec.BUFFER_FLAG_KEY_FRAME;
                    muxer.writeSampleData(audioTrack, audioBuf, audioBufferInfo);
                    audioExtractor.advance();


                    Log.d(TAG, "Frame (" + frameCount + ") Video PresentationTimeUs:" + videoBufferInfo.presentationTimeUs + " Flags:" + videoBufferInfo.flags + " Size(KB) " + videoBufferInfo.size / 1024);
                    Log.d(TAG, "Frame (" + frameCount + ") Audio PresentationTimeUs:" + audioBufferInfo.presentationTimeUs + " Flags:" + audioBufferInfo.flags + " Size(KB) " + audioBufferInfo.size / 1024);

                }
            }

            Toast.makeText(getApplicationContext(), "frame:" + frameCount2, Toast.LENGTH_SHORT).show();

            muxer.stop();
            muxer.release();


        } catch (IOException e) {
            Log.d(TAG, "Mixer Error 1 " + e.getMessage());
        } catch (Exception e) {
            Log.d(TAG, "Mixer Error 2 " + e.getMessage());
        }
    }
}
like image 67
Yogesh Borhade Avatar answered Nov 15 '22 04:11

Yogesh Borhade