I am using following code to record audio and writing it in file
- (void) initializeOutputUnit
{
OSStatus status;
// Describe audio component
AudioComponentDescription desc;
desc.componentType = kAudioUnitType_Output;
desc.componentSubType = kAudioUnitSubType_RemoteIO;
desc.componentFlags = 0;
desc.componentFlagsMask = 0;
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
// Get component
AudioComponent inputComponent = AudioComponentFindNext(NULL, &desc);
// Get audio units
status = AudioComponentInstanceNew(inputComponent, &mAudioUnit);
// Enable IO for recording
UInt32 flag = 1;
//bhoomi
status = AudioUnitSetProperty(mAudioUnit,
kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Input,
kInputBus,
&flag,
sizeof(flag));
// Enable IO for playback
status = AudioUnitSetProperty(mAudioUnit,
kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Output,
kOutputBus,
&flag,
sizeof(flag));
// Describe format
AudioStreamBasicDescription audioFormat={0};
audioFormat.mSampleRate = kSampleRate;
audioFormat.mFormatID = kAudioFormatLinearPCM;
audioFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
audioFormat.mFramesPerPacket = 1;
audioFormat.mChannelsPerFrame = 1;
audioFormat.mBitsPerChannel = 16;
audioFormat.mBytesPerPacket = 2;
audioFormat.mBytesPerFrame = 2;
// Apply format
status = AudioUnitSetProperty(mAudioUnit,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Output,
kInputBus,
&audioFormat,
sizeof(audioFormat));
status = AudioUnitSetProperty(mAudioUnit,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Input,
kOutputBus,
&audioFormat,
sizeof(audioFormat));
// Set input callback
AURenderCallbackStruct callbackStruct;
callbackStruct.inputProc = recordingCallback;
callbackStruct.inputProcRefCon = (__bridge void *)self;
status = AudioUnitSetProperty(mAudioUnit,
kAudioOutputUnitProperty_SetInputCallback,
kAudioUnitScope_Global,
kInputBus,
&callbackStruct,
sizeof(callbackStruct));
// Disable buffer allocation for the recorder (optional - do this if we want to pass in our own)
flag = 0;
status = AudioUnitSetProperty(mAudioUnit,
kAudioUnitProperty_ShouldAllocateBuffer,
kAudioUnitScope_Output,
kInputBus,
&flag,
sizeof(flag));
// On initialise le fichier audio
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *destinationFilePath = [[NSString alloc] initWithFormat: @"%@/output.caf", documentsDirectory];
NSLog(@">>> %@\n", destinationFilePath);
CFURLRef destinationURL = CFURLCreateWithFileSystemPath(kCFAllocatorDefault, (__bridge CFStringRef)destinationFilePath, kCFURLPOSIXPathStyle, false);
OSStatus setupErr = ExtAudioFileCreateWithURL(destinationURL, kAudioFileCAFType, &audioFormat, NULL, kAudioFileFlags_EraseFile, &mAudioFileRef);
CFRelease(destinationURL);
NSAssert(setupErr == noErr, @"Couldn't create file for writing");
setupErr = ExtAudioFileSetProperty(mAudioFileRef, kExtAudioFileProperty_ClientDataFormat, sizeof(AudioStreamBasicDescription), &audioFormat);
NSAssert(setupErr == noErr, @"Couldn't create file for format");
setupErr = ExtAudioFileWriteAsync(mAudioFileRef, 0, NULL);
NSAssert(setupErr == noErr, @"Couldn't initialize write buffers for audio file");
CheckError(AudioUnitInitialize(mAudioUnit), "AudioUnitInitialize");
CheckError(AudioOutputUnitStart(mAudioUnit), "AudioOutputUnitStart");
// [NSTimer scheduledTimerWithTimeInterval:5 target:self selector:@selector(stopRecording:) userInfo:nil repeats:NO];
}
In my application I have video call functionality using webRTC. Above code is working fine with mic audio, mic audio is recorded properly but i am not able to record speaker sound(remote audio stream). Is it possible to record both sound (mic and speaker) in same file?.If yes, then how to modify code so it will also record sound from internal iphone speakers.