I am in need of seeking around an audio file and pull out chunks. I am trying to use the AVAssetReader. The bug I am seeing is if I read the audio over a period of time from different offsets the average value (chunk) I get is different.
For example if I am reading the audio from 0.1s to 0.5s I get different the chunks I receive are different if I read from 0.2 to 0.5s
The following is a code sample that demonstrates it
#import <AudioToolbox/AudioToolbox.h>
#import <AVFoundation/AVFoundation.h>
#import <MediaPlayer/MediaPlayer.h>
+ (void) test
{
NSURL* path = [[NSBundle mainBundle] URLForResource:@"music" withExtension:@"mp3"];
[self test:path sample:1 showChunks:5];
[self test:path sample:2 showChunks:4];
[self test:path sample:3 showChunks:3];
}
+(void) test:(NSURL*) url sample:(NSInteger) sample showChunks:(NSInteger) chunkCount
{
#define CHUNK 800
#define SAMPLE_RATE 8000
AVURLAsset* asset = [AVURLAsset URLAssetWithURL:url options:nil];
NSError *assetError = nil;
AVAssetReader* assetReader = [AVAssetReader assetReaderWithAsset:asset error:&assetError];
CMTime startTime = CMTimeMake(sample*CHUNK, SAMPLE_RATE);
CMTimeShow(startTime);
CMTimeRange timeRange = CMTimeRangeMake(startTime, kCMTimePositiveInfinity);
assetReader.timeRange = timeRange;
NSDictionary* dict = nil;
dict = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInteger:SAMPLE_RATE], AVSampleRateKey, [NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey, nil];
AVAssetReaderAudioMixOutput* assetReaderOutput = [AVAssetReaderAudioMixOutput assetReaderAudioMixOutputWithAudioTracks:asset.tracks audioSettings: dict];
if (! [assetReader canAddOutput: assetReaderOutput]) {
NSLog (@"error: Cannot add output reader");
assetReader = nil;
return;
}
[assetReader addOutput: assetReaderOutput];
[assetReader startReading];
CMSampleBufferRef nextBuffer;
if (!(nextBuffer = [assetReaderOutput copyNextSampleBuffer]))
{
return;
}
CMSampleBufferGetTotalSampleSize (nextBuffer);
// Extract bytes from buffer
CMBlockBufferRef dataBuffer = CMSampleBufferGetDataBuffer(nextBuffer);
NSInteger len = CMBlockBufferGetDataLength(dataBuffer);
if (len < chunkCount*CHUNK)
{
printf("CHUNK is to big");
return;
}
UInt8* buf = malloc(len);
CMBlockBufferCopyDataBytes(dataBuffer, 0, len, buf);
for (int ii = 0; ii < chunkCount*CHUNK; ii+=CHUNK)
{
CGFloat av = 0;
for (int jj = 0; jj < CHUNK; jj++)
{
av += (CGFloat) buf[jj+ii];
}
printf("Time: %f av: %f\n", (CGFloat)(ii+CHUNK*sample)/(CGFloat)SAMPLE_RATE, av/(CGFloat)CHUNK);
}
printf("\n");
free(buf);
}
This is the output
{800/8000 = 0.100}
Time: 0.100000 av: 149.013748
Time: 0.200000 av: 100.323753
Time: 0.300000 av: 146.991257
Time: 0.400000 av: 106.763748
Time: 0.500000 av: 145.020004
{1600/8000 = 0.200}
Time: 0.200000 av: 145.011246
Time: 0.300000 av: 110.718750
Time: 0.400000 av: 154.543747
Time: 0.500000 av: 112.025002
{2400/8000 = 0.300}
Time: 0.300000 av: 149.278748
Time: 0.400000 av: 104.477501
Time: 0.500000 av: 158.162506
Help please
It appears to me that you problem is with assuming the following code accurately seeks to startTime:
CMTimeRange timeRange = CMTimeRangeMake(startTime, kCMTimePositiveInfinity);
assetReader.timeRange = timeRange;
You can test this using a call to
CMSampleBufferGetOutputPresentationTimeStamp(nextBuffer);
From this you will be able to see the exact time (in seconds) of start of the buffer.
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With