I'm using the following code to record with AVAudioRecorder. I'm also accessing the recorded audio live (after 4 seconds) as well. However, the data is NULL until 6 seconds.
Is there a way to forcefully ask the recorder to write to file?
NSDictionary * recordSetting;
recordSetting = [[NSMutableDictionary alloc] init];
[recordSetting setValue :[NSNumber numberWithInt:kAudioFormatMPEG4AAC] forKey:AVFormatIDKey];
[recordSetting setValue:[NSNumber numberWithFloat:11025] forKey:AVSampleRateKey];
[recordSetting setValue:[NSNumber numberWithInt:1] forKey:AVNumberOfChannelsKey];
self.fileURL = [NSURL fileURLWithPath:[NSString stringWithFormat:@"%@/recorded.aac", [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) firstObject]]];
self.recorder = [[AVAudioRecorder alloc] initWithURL:self.fileURL settings:recordSetting error:nil];
self.recorder.meteringEnabled = YES;
self.recorder.delegate = self;
[self.recorder record];
And then after 4 seconds, I tried to read the file using NSDate.
NSData *songData = [NSData dataWithContentsOfURL:self.recorder.url];
But the songData is nil until after 6 seconds.
I know it's possible to do so using AudioQueue, however, I do not wish to get into AudioQueue because I guess it would be an overkill.
Thanks.
Disclaimer: the proper way to do this would be to record your input audio (via AudioUnit/AVAudioEngine/AudioQueue) and encode it using an AudioConverter, although recording on iOS is push and AudioConverter is pull which doesn't make for very good SO reading. As you've seen, relying on a high level API, like AVAudioRecorder to write its output in a timely fashion to its file while you rummage around in it is not very robust. The following code sample (AVAudioEngine + ExtendedAudioFile) should get you audio data after ~1 second.
#import <AVFoundation/AVFoundation.h>
#import <AudioToolbox/ExtendedAudioFile.h>
// ...
@property (nonatomic) AVAudioEngine *engine;
// ...
NSURL *fileURL = [NSURL fileURLWithPath:[NSString stringWithFormat:@"%@/recorded.aac", [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) firstObject]]];
const Float64 sampleRate = 11025;
AudioStreamBasicDescription aacDesc = { 0 };
aacDesc.mSampleRate = sampleRate;
aacDesc.mFormatID = kAudioFormatMPEG4AAC;
aacDesc.mFramesPerPacket = 1024;
aacDesc.mChannelsPerFrame = 1;
ExtAudioFileRef eaf;
OSStatus err = ExtAudioFileCreateWithURL((__bridge CFURLRef)fileURL, kAudioFileAAC_ADTSType, &aacDesc, NULL, kAudioFileFlags_EraseFile, &eaf);
assert(noErr == err);
self.engine = [[AVAudioEngine alloc] init];
AVAudioInputNode *input = self.engine.inputNode;
const AVAudioNodeBus bus = 0;
AVAudioFormat *micFormat = [input inputFormatForBus:bus];
err = ExtAudioFileSetProperty(eaf, kExtAudioFileProperty_ClientDataFormat, sizeof(AudioStreamBasicDescription), micFormat.streamDescription);
assert(noErr == err);
[input installTapOnBus:bus bufferSize:512 format:micFormat block:^(AVAudioPCMBuffer *buffer, AVAudioTime *when) {
const AudioBufferList *abl = buffer.audioBufferList;
OSStatus err = ExtAudioFileWrite(eaf, buffer.frameLength, abl);
assert(noErr == err);
}];
NSError *error;
if (![self.engine startAndReturnError:&error]) {
NSLog(@"Engine start: %@", error);
}
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With