
但它没有按预期工作.
以下是我使用的代码:
- (ID) init { self = [super init]; Osstatus status; // Describe audio component AudioComponentDescription desc; desc.componentType = kAudioUnitType_Output; desc.componentSubType = kAudioUnitSubType_RemoteIO; desc.componentFlags = 0; desc.componentFlagsMask = 0; desc.componentManufacturer = kAudioUnitManufacturer_Apple; // Get component AudioComponent inputComponent = AudioComponentFindNext(NulL,&desc); // Get audio units status = AudioComponentInstanceNew(inputComponent,&audioUnit); checkStatus(status); // Enable IO for recording UInt32 flag = 1; status = AudioUnitSetProperty(audioUnit,kAudioOutputUnitProperty_EnableIO,kAudioUnitScope_input,kinputBus,&flag,sizeof(flag)); checkStatus(status); // Enable IO for playback status = AudioUnitSetProperty(audioUnit,kAudioUnitScope_Output,kOutputBus,sizeof(flag)); checkStatus(status); // Describe format AudioStreamBasicDescription audioFormat; audioFormat.mSampleRate = 44100.00; audioFormat.mFormatID = kAudioFormatlinearPCM; audioFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked; audioFormat.mFramesPerPacket = 1; audioFormat.mChannelsPerFrame = 1; audioFormat.mBitsPerChannel = 16; audioFormat.mBytesPerPacket = 2; audioFormat.mBytesPerFrame = 2; // Apply format status = AudioUnitSetProperty(audioUnit,kAudioUnitProperty_StreamFormat,&audioFormat,sizeof(audioFormat)); checkStatus(status); status = AudioUnitSetProperty(audioUnit,sizeof(audioFormat)); checkStatus(status); // Set input callback AURenderCallbackStruct callbackStruct; callbackStruct.inputProc = recordingCallback; callbackStruct.inputProcRefCon = self; status = AudioUnitSetProperty(audioUnit,kAudioOutputUnitProperty_SetinputCallback,kAudioUnitScope_Global,&callbackStruct,sizeof(callbackStruct)); checkStatus(status); // Set output callback callbackStruct.inputProc = playbackCallback; callbackStruct.inputProcRefCon = self; status = AudioUnitSetProperty(audioUnit,kAudioUnitProperty_SetRenderCallback,sizeof(callbackStruct)); checkStatus(status); // disable buffer allocation for the recorder (optional - do this if we want to pass in our own) flag = 0; status = AudioUnitSetProperty(audioUnit,kAudioUnitProperty_ShouldAllocateBuffer,sizeof(flag)); NSLog(@"%ld",(long)status); // Allocate our own buffers (1 channel,16 bits per sample,thus 16 bits per frame,thus 2 bytes per frame). // Practice learns the buffers used contain 512 frames,if this changes it will be fixed in processAudio. tempBuffer.mNumberChannels = 1; tempBuffer.mDataByteSize = 512 * 2; tempBuffer.mData = malloc( 512 * 2 ); UInt32 audiocategory = kAudioSessioncategory_PlayAndRecord; status = AudioSessionSetProperty(kAudioSessionProperty_Audiocategory,sizeof(audiocategory),&audiocategory); NSLog(@"%ld",(long)status); UInt32 allowMixing = true; status = AudioSessionSetProperty(kAudioSessionProperty_OverrIDecategoryMixWithOthers,sizeof(allowMixing),&allowMixing); UInt32 audioRouteOverrIDe = kAudioSessionOutputRoute_BluetoothA2DP; //kAudioSessionOverrIDeAudioRoute_Speaker; AudioSessionSetProperty (kAudioSessionProperty_OverrIDeAudioRoute,sizeof (audioRouteOverrIDe),&audioRouteOverrIDe); status = AudioUnitinitialize(audioUnit); checkStatus(status); return self;}解决方法 ust用Apple确认了.这是不可能实现的.蓝牙总是成对出现麦克风和麦克风.扬声器在一起;) 总结 以上是内存溢出为你收集整理的iOS输入麦克风输出蓝牙设备全部内容,希望文章能够帮你解决iOS输入麦克风输出蓝牙设备所遇到的程序开发问题。
如果觉得内存溢出网站内容还不错,欢迎将内存溢出网站推荐给程序员好友。
欢迎分享,转载请注明来源:内存溢出
微信扫一扫
支付宝扫一扫
评论列表(0条)