AudioUnit 用法
1、描述音频单元
AudioComponentDescription desc; desc.componentType = kAudioUnitType_Output; desc.componentSubType = kAudioUnitSubType_RemoteIO; desc.componentFlags = 0; desc.componentFlagsMask = 0; desc.componentManufacturer = kAudioUnitManufacturer_Apple;
2、查找音频单元
AudioComponent inputComponent = AudioComponentFindNext(NULL, &desc);
3、获取音频单元实例
status = AudioComponentInstanceNew(inputComponent, &audioUnit);
4、启用录制功能、启用播放功能
UInt32 flag = 1; status = AudioUnitSetProperty(audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, kInputBus, &flag, sizeof(flag)); status = AudioUnitSetProperty(audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Output, kOutputBus, &flag, sizeof(flag));
5、音频流描述
AudioStreamBasicDescription audioFormat; audioFormat.mSampleRate = 44100.00; audioFormat.mFormatID = kAudioFormatLinearPCM; audioFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked; audioFormat.mFramesPerPacket = 1; audioFormat.mChannelsPerFrame = 1; audioFormat.mBitsPerChannel = 16; audioFormat.mBytesPerPacket = 2; audioFormat.mBytesPerFrame = 2;
6、应用录制和播放的音频流描述
status = AudioUnitSetProperty(audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, kInputBus, &audioFormat, sizeof(audioFormat)); status = AudioUnitSetProperty(audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, kOutputBus, &audioFormat, sizeof(audioFormat));
7、设置回调
AURenderCallbackStruct callbackStruct; callbackStruct.inputProc = recordingCallback; callbackStruct.inputProcRefCon = self; status = AudioUnitSetProperty(audioUnit, kAudioOutputUnitProperty_SetInputCallback, kAudioUnitScope_Global, kInputBus, &callbackStruct, sizeof(callbackStruct)); callbackStruct.inputProc = playbackCallback; callbackStruct.inputProcRefCon = self; status = AudioUnitSetProperty(audioUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Global, kOutputBus, &callbackStruct, sizeof(callbackStruct));
8、回调方法
static OSStatus recordingCallback(void *inRefCon, AudioUnitRenderActionFlags*ioActionFlags, const AudioTimeStamp*inTimeStamp, UInt32 inBusNumber, UInt32 inNumberFrames, AudioBufferList *ioData) { AudioBufferList *bufferList; OSStatus status = AudioUnitRender([(shockmanViewController*)inRefCon audioUnit], ioActionFlags, inTimeStamp, inBusNumber, inNumberFrames, bufferList); return noErr; } static OSStatus playbackCallback(void *inRefCon, AudioUnitRenderActionFlags*ioActionFlags, const AudioTimeStamp*inTimeStamp, UInt32 inBusNumber, UInt32 inNumberFrames, AudioBufferList *ioData) { // This is a mono tone generator so we only need the first buffer UInt16 *buffer = (SAMPLE *)ioData->mBuffers[0].mData; // Generate the samples for (UInt32 frame = 0; frame < inNumberFrames; frame++) { if(THIS->isPlaying) { // 通过修改buffer数组的值,输出自己的音频数据 buffer[frame] = audioProtocol.getEncode()[THIS->qIndex++]; if(THIS->qIndex >= THIS->qSize) { //[THIS stopToPlay]; [THIS performSelectorOnMainThread:@selector(stopToPlay) withObject:nil waitUntilDone:NO]; break; } } } return noErr; }