I have integrated ffmpeg lib in my project and I can also get the information of media files. But now i have to play mp3 files using AudioTrack class in android using ffmpeg lib.
For this I have to pass byte buffer to AudioTrack but I dont know how to get byte buffer from ffmpeg and use it with AudioTrack. I also want to play file instantly without delay.
Here is my audio track code in java :
AudioTrack track;
bufferSize = AudioTrack.getMinBufferSize(44100,AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT)
track = new AudioTrack(AudioManager.STREAM_MUSIC, 44100, AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize, mode);
//Play audio clip
track.play();
while(stream_is_over){
//Copy the decoded raw buffer from native code to "buffer" .....
............
track.write(buffer, 0, readBytes);
}
Can anyone please give me the working code to play mp3 files with audio track. I have searched a lot but haven't find any correct answer.
I managed this by creating buffer of audio files and then playing it with AudioTrack class on the fly. Now i'm trying to pause/stop the audio file cause stopping or pausing AudioTrack is not working.
Here is my code to pass byte buffer to my java class :
#include <assert.h>
#include <jni.h>
#include <string.h>
#include <android/log.h>
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#define LOG_TAG "mylib"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)
#define AUDIO_INBUF_SIZE 20480
#define AUDIO_REFILL_THRESH 4096
void Java_ru_dzakhov_ffmpeg_test_MainActivity_createEngine(JNIEnv* env,
jclass clazz) {
avcodec_init();
av_register_all();
}
jstring Java_ru_dzakhov_ffmpeg_test_MainActivity_loadFile(JNIEnv* env,
jobject obj, jstring file, jbyteArray array) {
{
jboolean isfilenameCopy;
const char * filename = (*env)->GetStringUTFChars(env, file,
&isfilenameCopy);
int audioStreamIndex;
AVCodec *codec;
AVCodecContext *c = NULL;
AVFormatContext * pFormatCtx;
AVCodecContext * aCodecCtx;
int out_size, len, audioStream = -1, i, err;
FILE *f, *outfile;
uint8_t *outbuf;
uint8_t inbuf[AUDIO_INBUF_SIZE + FF_INPUT_BUFFER_PADDING_SIZE];
AVPacket avpkt;
jclass cls = (*env)->GetObjectClass(env, obj);
jmethodID play = (*env)->GetMethodID(env, cls, "playSound", "([BI)V"); //At the begining of your main function
LOGE("source file name is %s", filename);
avcodec_init();
av_register_all();
LOGE("Stage 1");
/* get format somthing of source file to AVFormatContext */
int lError;
if ((lError = av_open_input_file(&pFormatCtx, filename, NULL, 0, NULL))
!= 0) {
LOGE("Error open source file: %d", lError);
exit(1);
}
if ((lError = av_find_stream_info(pFormatCtx)) < 0) {
LOGE("Error find stream information: %d", lError);
exit(1);
}
LOGE("Stage 1.5");
LOGE("audio format: %s", pFormatCtx->iformat->name);
LOGE("audio bitrate: %d", pFormatCtx->bit_rate);
audioStreamIndex = av_find_best_stream(pFormatCtx, AVMEDIA_TYPE_AUDIO,
-1, -1, &codec, 0);
LOGE("audio codec: %s", codec->name);
/* get codec somthing of audio stream to AVCodecContext */
aCodecCtx = pFormatCtx->streams[audioStreamIndex]->codec;
if (avcodec_open(aCodecCtx, codec) < 0) {
LOGE("cannot open the audio codec!");
exit(1);
}
printf("Audio decoding\n");
LOGE("Stage 1.7");
LOGE("S");
codec = avcodec_find_decoder(aCodecCtx->codec_id);
LOGE("Stage 1.8");
if (!codec) {
LOGE("codec not found\n");
exit(1);
}
LOGE("Stage 2");
// c= avcodec_alloc_context();
LOGE("Stage 3");
/* open it */
if (avcodec_open(aCodecCtx, codec) < 0) {
LOGE("could upper");
fprintf(stderr, "could not open codec\n");
LOGE("could not open codec");
}
LOGE("Stage 4");
outbuf = malloc(AVCODEC_MAX_AUDIO_FRAME_SIZE);
f = fopen(filename, "rb");
if (!f) {
fprintf(stderr, "could not open %s\n", filename);
LOGE("could not open");
exit(1);
}
/* decode until eof */
avpkt.data = inbuf;
avpkt.size = fread(inbuf, 1, AUDIO_INBUF_SIZE, f);
LOGE("Stage 5");
while (avpkt.size > 0) {
// LOGE("Stage 6");
out_size = (AVCODEC_MAX_AUDIO_FRAME_SIZE / 3) * 2;
len = avcodec_decode_audio3(aCodecCtx, (int16_t *) outbuf,
&out_size, &avpkt);
LOGE("data_size %d len %d", out_size, len);
if (len < 0) {
fprintf(stderr, "Error while decoding\n");
LOGE("DECODING ERROR");
LOGE("DECODING ERROR %d", len);
exit(1);
}
// LOGE("Stage 7");
if (out_size > 0) {
/* if a frame has been decoded, output it */
// LOGE("Stage 8");
jbyte *bytes = (*env)->GetByteArrayElements(env, array, NULL);
memcpy(bytes, outbuf, out_size); //
(*env)->ReleaseByteArrayElements(env, array, bytes, 0);
(*env)->CallVoidMethod(env, obj, play, array, out_size);
// LOGE("DECODING ERROR5");
}
LOGE("Stage 9");
avpkt.size -= len;
avpkt.data += len;
if (avpkt.size < AUDIO_REFILL_THRESH) {
/* Refill the input buffer, to avoid trying to decode
* incomplete frames. Instead of this, one could also use
* a parser, or use a proper container format through
* libavformat. */
memmove(inbuf, avpkt.data, avpkt.size);
avpkt.data = inbuf;
len = fread(avpkt.data + avpkt.size, 1,
AUDIO_INBUF_SIZE - avpkt.size, f);
if (len > 0)
avpkt.size += len;
}
}
LOGE("Stage 12");
fclose(f);
free(outbuf);
avcodec_close(c);
av_free(c);
}
}
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With