I achieved IP Camera streaming using ffmpeg with the help of this library : https://github.com/kolyvan/kxmovie also I am recording IPCamera streaming coming and same is done using ffmpeg.
Now I am facing one issue as soon as I add R5ProStreaming.framework in project and run application in real device application is getting crashed at if (avformat_find_stream_info(formatCtx, NULL) < 0)
here.
And when I remove that framework and move it to trash and run again then all working fine.
- (kxMovieError) openInput: (NSString *) path
{
AVFormatContext *formatCtx = NULL;
if (_interruptCallback) {
formatCtx = avformat_alloc_context();
if (!formatCtx)
return kxMovieErrorOpenFile;
AVIOInterruptCB cb = {interrupt_callback, (__bridge void *)(self)};
formatCtx->interrupt_callback = cb;
}
AVDictionary *opts = 0;
//
av_dict_set(&opts, "rtsp_transport", "tcp", 0);
if (avformat_open_input(&formatCtx, [path cStringUsingEncoding: NSUTF8StringEncoding], NULL, &opts) < 0) {
av_log(NULL, AV_LOG_ERROR, "Couldn't open file\n");
return kxMovieErrorStreamInfoNotFound;
}
if (avformat_open_input(&formatCtx, [path cStringUsingEncoding: NSUTF8StringEncoding], NULL, NULL) < 0) {
if (formatCtx)
avformat_free_context(formatCtx);
return kxMovieErrorOpenFile;
}
//-----APP IS GETTING CRASHED HERE AND GIVING EXC_BAD_ACCESS---//
if (avformat_find_stream_info(formatCtx, NULL) < 0)
{
avformat_close_input(&formatCtx);
return kxMovieErrorStreamInfoNotFound;
}
//
av_dump_format(formatCtx, 0, [path.lastPathComponent cStringUsingEncoding: NSUTF8StringEncoding], false);
_formatCtx = formatCtx;
inputFormatCtx = _formatCtx;
NSString *filePath = [[NSBundle mainBundle] pathForResource:@"newdemo" ofType:@".mov"];
if (filePath)
{
NSLog(@"%s - %d # File found", __PRETTY_FUNCTION__, __LINE__);
}
else
{
NSLog(@"%s - %d # File NOT found", __PRETTY_FUNCTION__, __LINE__);
}
/*
* av_find_input_format(const char *short_name)
*
* Find AVInputFormat based on the short name of the input format.
*/
AVInputFormat *inputFormat = av_find_input_format([@"mpeg" UTF8String]);
if (inputFormat)
{
NSLog(@"%s - %d # inputFormat identifed", __PRETTY_FUNCTION__, __LINE__);
}
else
{
NSLog(@"%s - %d # inputFormat NOT identifed", __PRETTY_FUNCTION__, __LINE__) ;
}
const char *utf8FilePath = [filePath UTF8String];
NSLog(@"%s - %d # utf8FilePath = %s", __PRETTY_FUNCTION__, __LINE__, utf8FilePath);
/*
* avformat_open_input(AVFormatContext **ps, const char *filename, AVInputFormat *fmt, AVDictionary **options)
*
* Open an input stream and read the header. The codecs are not opened.
*/
int openInputValue =0;
NSLog(@"%s - %d # openInputValue = %d", __PRETTY_FUNCTION__, __LINE__, openInputValue);
if (openInputValue == 0)
{
NSLog(@"%s - %d # Can open the file", __PRETTY_FUNCTION__, __LINE__);
}
else
{
NSLog(@"%s - %d # Cannot open the file", __PRETTY_FUNCTION__, __LINE__);
avformat_close_input(&inputFormatCtx);
}
/*
* Read packets of a media file to get stream information.
*
* avformat_find_stream_info(AVFormatContext *ic, AVDictionary **options)
*/
// int streamInfoValue = avformat_find_stream_info(inputFormatCtx, NULL);
// NSLog(@"%s - %d # streamInfoValue = %d", __PRETTY_FUNCTION__, __LINE__, streamInfoValue);
//
// if (streamInfoValue < 0)
// {
// NSLog(@"%s - %d # streamInfoValue Error", __PRETTY_FUNCTION__, __LINE__);
// avformat_close_input(&inputFormatCtx);
// }
/*
* nb_streams : Number of Audio and Video streams of the input file
*/
NSUInteger inputStreamCount = inputFormatCtx->nb_streams;
NSLog(@"%s - %d # inputStreamCount = %lu", __PRETTY_FUNCTION__, __LINE__, (unsigned long)inputStreamCount);
for(unsigned int i = 0; i<inputStreamCount; i++)
{
if(inputFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
{
NSLog(@"%s - %d # Found Video Stream", __PRETTY_FUNCTION__, __LINE__);
inputVideoStreamIndex = i;
inputVideoStream = inputFormatCtx->streams[i];
}
if(inputFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO)
{
NSLog(@"%s - %d # Found Audio Stream", __PRETTY_FUNCTION__, __LINE__);
inputAudioStreamIndex = i;
inputAudioStream = inputFormatCtx->streams[i];
}
}
if(inputVideoStreamIndex == -1 && inputAudioStreamIndex == -1)
{
NSLog(@"%s - %d # Have not found any Video or Audio stream", __PRETTY_FUNCTION__, __LINE__);
}
/*
* Finding duration of the stream
*/
if(inputFormatCtx->duration == AV_NOPTS_VALUE)
{
NSLog(@"%s - %d # Undefined timestamp value", __PRETTY_FUNCTION__, __LINE__);
if(_videoStream != -1 && inputFormatCtx->streams[_videoStream])
{
// if(inputFormatCtx->streams[_videoStream]->duration != AV_NOPTS_VALUE)
// {
inputEndtimeInt64 = (inputFormatCtx->streams[_videoStream]->duration)/(inputFormatCtx->streams[_videoStream]->time_base.den/inputFormatCtx->streams[_videoStream]->time_base.num);
// }
// else
// {
// inputEndtimeInt64 = (inputFormatCtx->duration)/(AV_TIME_BASE);
//
// }
}
else if(_audioStream != -1 && inputFormatCtx->streams[_audioStream])
{
// if(inputFormatCtx->streams[_audioStream]->duration != AV_NOPTS_VALUE)
// {
inputEndtimeInt64 = (inputFormatCtx->streams[_audioStream]->duration)/(AV_TIME_BASE);
// }
// else
// {
// inputEndtimeInt64 = (inputFormatCtx->duration)/(AV_TIME_BASE);
//
// }
}
}
else
{
NSLog(@"%s - %d # Defined timestamp value", __PRETTY_FUNCTION__, __LINE__);
inputEndtimeInt64 = (inputFormatCtx->duration)/(AV_TIME_BASE);
}
NSLog(@"%s - %d # inputEndtimeInt64 = %lld", __PRETTY_FUNCTION__, __LINE__, inputEndtimeInt64);
/*
* Finding out the frame rate
*/
if(_videoStream != -1 && inputFormatCtx->streams[_videoStream])
{
framesPerSec = (inputFormatCtx->streams[_videoStream]->r_frame_rate.num)/ (inputFormatCtx->streams[_videoStream]->r_frame_rate.den);
}
else
{
framesPerSec = 24;
}
numberOfFrames = framesPerSec * (int) inputEndtimeInt64;
NSLog(@"%s - %d # numberOfFrames = %d", __PRETTY_FUNCTION__, __LINE__, numberOfFrames);
/*
* Seek to timestamp ts.
*
* avformat_seek_file(AVFormatContext *s, int stream_index, int64_t min_ts, int64_t ts, int64_t max_ts, int flags)
*/
if(avformat_seek_file(inputFormatCtx, inputAudioStreamIndex, INT64_MIN, outputStartTimeInt64, INT64_MAX, AVSEEK_FLAG_FRAME) < 0)
{
NSLog(@"%s - %d # Seek OK", __PRETTY_FUNCTION__, __LINE__);
}
else
{
NSLog(@"%s - %d # Seek ERROR", __PRETTY_FUNCTION__, __LINE__);
}
/*
* Creating output file path1
*/
// NSString * timestamp = [NSString stringWithFormat:@"%f",[[NSDate date] timeIntervalSince1970] * 1000];
//
// NSArray *directoryPathsArray = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
// NSString *documentsDirectory = [directoryPathsArray objectAtIndex:0];
// NSString *outputFilePath = [NSString stringWithFormat:@"%@/%@.mov",documentsDirectory,timestamp]; // Not working if we replace .avi with .mp4
NSString* filename = [NSString stringWithFormat:@"IPCamera%d.mov", _currentFile];
NSString* outputFilePath = [NSTemporaryDirectory() stringByAppendingPathComponent:filename];
_url = [NSURL fileURLWithPath:outputFilePath];
/*
* Return the output format in the list of registered output formats
* which best matches the provided parameters, or return NULL if
* there is no match.
*
* av_guess_format(const char *short_name, const char *filename, const char *mime_type)
*/
outputFormat = av_guess_format(NULL, [outputFilePath UTF8String], NULL);
NSLog(@"%s - %d # outputFormat->name = %s", __PRETTY_FUNCTION__, __LINE__, outputFormat->name);
if(outputFormat == NULL)
{
NSLog(@"%s - %d # outputFormat == NULL", __PRETTY_FUNCTION__, __LINE__);
}
else
{
/*
* Allocate an AVFormatContext.
*/
outputContext = avformat_alloc_context();
if(outputContext)
{
outputContext->oformat = outputFormat; // The output container format.
snprintf(outputContext->filename, sizeof(outputContext->filename), "%s", [outputFilePath UTF8String]);
}
else
{
NSLog(@"%s - %d # outputContext == NULL", __PRETTY_FUNCTION__, __LINE__);
}
}
outputVideoCodec = outputAudioCodec = NULL;
/*
* video_codec = default video codec
*/
if(outputFormat->video_codec != AV_CODEC_ID_NONE && inputVideoStream != NULL)
{
/*
* Find a registered encoder with a matching codec ID.
*
* avcodec_find_encoder(enum AVCodecID id)
*/
outputVideoCodec = avcodec_find_encoder(outputFormat->video_codec);
if(NULL == outputVideoCodec)
{
NSLog(@"%s - %d # Could Not Find Vid Encoder", __PRETTY_FUNCTION__, __LINE__);
}
else
{
NSLog(@"%s - %d # Found Out Vid Encoder", __PRETTY_FUNCTION__, __LINE__);
/*
* Add a new stream to a media file.
*
* avformat_new_stream(AVFormatContext *s, const AVCodec *c)
*/
outputVideoStream = avformat_new_stream(outputContext, outputVideoCodec);
if(NULL == outputVideoStream)
{
NSLog(@"%s - %d # Failed to Allocate Output Vid Strm", __PRETTY_FUNCTION__, __LINE__);
}
else
{
NSLog(@"%s - %d # Allocated Video Stream", __PRETTY_FUNCTION__, __LINE__);
/*
* Copy the settings of the source AVCodecContext into the destination AVCodecContext.
*
* avcodec_copy_context(AVCodecContext *dest, const AVCodecContext *src)
*/
if(avcodec_copy_context(outputVideoStream->codec, inputFormatCtx->streams[inputVideoStreamIndex]->codec) != 0)
{
NSLog(@"%s - %d # Failed to Copy Context", __PRETTY_FUNCTION__, __LINE__);
}
else
{
AVStream *st = _formatCtx->streams[_videoStream];
outputVideoStream->sample_aspect_ratio.den = outputVideoStream->codec->sample_aspect_ratio.den; // denominator
outputVideoStream->sample_aspect_ratio.num = st->codec->sample_aspect_ratio.num; // numerator
NSLog(@"%s - %d # Copied Context 1", __PRETTY_FUNCTION__, __LINE__);
outputVideoStream->codec->codec_id = st->codec->codec_id;
outputVideoStream->codec->time_base.num = st->codec->time_base.num;
outputVideoStream->codec->time_base.den = STREAM_FRAME_RATE;
outputVideoStream->time_base.num = st->time_base.num;
outputVideoStream->time_base.den = st->time_base.den;
outputVideoStream->r_frame_rate.num =st->r_frame_rate.num;
outputVideoStream->nb_frames = STREAM_NB_FRAMES;
outputVideoStream->r_frame_rate.den = st->r_frame_rate.den;
outputVideoStream->avg_frame_rate.den = st->avg_frame_rate.num;
outputVideoStream->avg_frame_rate.num = st->avg_frame_rate.num;
// outputVideoStream->duration = st->duration;
}
}
}
}
if(outputFormat->audio_codec != AV_CODEC_ID_NONE && inputAudioStream != NULL)
{
outputAudioCodec = avcodec_find_encoder(outputFormat->audio_codec);
if(NULL == outputAudioCodec)
{
NSLog(@"%s - %d # Could Not Find Out Aud Encoder", __PRETTY_FUNCTION__, __LINE__);
}
else
{
NSLog(@"%s - %d # Found Out Aud Encoder", __PRETTY_FUNCTION__, __LINE__);
outputAudioStream = avformat_new_stream(outputContext, outputAudioCodec);
if(NULL == outputAudioStream)
{
NSLog(@"%s - %d # Failed to Allocate Out Vid Strm", __PRETTY_FUNCTION__, __LINE__);
}
else
{
if(avcodec_copy_context(outputAudioStream->codec, inputFormatCtx->streams[inputAudioStreamIndex]->codec) != 0)
{
NSLog(@"%s - %d # Failed to Copy Context", __PRETTY_FUNCTION__, __LINE__);
}
else
{
// AVStream *st = _formatCtx->streams[_audioStream];
NSLog(@"%s - %d # Copied Context 2", __PRETTY_FUNCTION__, __LINE__);
outputAudioStream->codec->codec_id = inputAudioStream->codec->codec_id;
outputAudioStream->codec->codec_tag = 0;
// outputAudioStream->pts = inputAudioStream->pts;
// outputAudioStream->duration = inputAudioStream->duration;
outputAudioStream->time_base.num = inputAudioStream->time_base.num;
outputAudioStream->time_base.den = inputAudioStream->time_base.den;
}
}
}
}
if (!(outputFormat->flags & AVFMT_NOFILE))
{
/*
* Create and initialize a AVIOContext for accessing the resource indicated by url.
*
* avio_open2(AVIOContext **s, const char *url, int flags, const AVIOInterruptCB *int_cb, AVDictionary **options)
*/
if (avio_open2(&outputContext->pb, [outputFilePath UTF8String], AVIO_FLAG_WRITE, NULL, NULL) < 0)
{
NSLog(@"%s - %d # Could Not Open File", __PRETTY_FUNCTION__, __LINE__);
}
}
/* Write the stream header, if any. */
/*
* Allocate the stream private data and write the stream header to an output media file.
*
* avformat_write_header(AVFormatContext *s, AVDictionary **options);
*/
if (avformat_write_header(outputContext, NULL) < 0)
{
NSLog(@"%s - %d # Error Occurred While Writing Header", __PRETTY_FUNCTION__, __LINE__);
}
else
{
NSLog(@"%s - %d # Written Output header", __PRETTY_FUNCTION__, __LINE__);
initDone = true;
}
return kxMovieErrorNone;
}
More over I contacted Red5Pro team and asked them regarding that along with video demo. They replied me like this
What's most likely happening is that the version of FFMPEG that's being loaded by that project is incompatible with the customized version of it that's embedded in our SDK, and some duplicate definition error is causing the wrong version to be loaded. It could also be any number of conflicts between one of the libraries in that project with the SDK, or one of the support libraries that the sdk requires ( I have to assume that since it compiled, that you did add the libraries listed in step four here: https://www.red5pro.com/docs/streaming/ios.html#project-setup ) and if that's the case, I don't know of a good way to correct the issue as chasing down individual third-party libraries that raise incompatibilities with our SDKs to correct them is beyond the reach of our team. (edited)
Can anybody have idea where to look?
Thanks
Request the source code to the customized version of FFMPEG as embedded in Red5Pro SDK. Use that version in your code to eliminate the incompatibilities. They are required to give you the modified source code under GPL/LGPL licensing of FFMPEG.
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With