/** * @file src/platform/macos/av_audio.m * @brief Definitions for audio capture on macOS. */ // local includes #import "av_audio.h" @implementation AVAudio + (NSArray *)microphones { if ([[NSProcessInfo processInfo] isOperatingSystemAtLeastVersion:((NSOperatingSystemVersion) {10, 15, 0})]) { // This will generate a warning about AVCaptureDeviceDiscoverySession being // unavailable before macOS 10.15, but we have a guard to prevent it from // being called on those earlier systems. // Unfortunately the supported way to silence this warning, using @available, // produces linker errors for __isPlatformVersionAtLeast, so we have to use // a different method. #pragma clang diagnostic push #pragma clang diagnostic ignored "-Wunguarded-availability-new" AVCaptureDeviceDiscoverySession *discoverySession = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:@[AVCaptureDeviceTypeBuiltInMicrophone, AVCaptureDeviceTypeExternalUnknown] mediaType:AVMediaTypeAudio position:AVCaptureDevicePositionUnspecified]; return discoverySession.devices; #pragma clang diagnostic pop } else { // We're intentionally using a deprecated API here specifically for versions // of macOS where it's not deprecated, so we can ignore any deprecation // warnings: #pragma clang diagnostic push #pragma clang diagnostic ignored "-Wdeprecated-declarations" return [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio]; #pragma clang diagnostic pop } } + (NSArray *)microphoneNames { NSMutableArray *result = [[NSMutableArray alloc] init]; for (AVCaptureDevice *device in [AVAudio microphones]) { [result addObject:[device localizedName]]; } return result; } + (AVCaptureDevice *)findMicrophone:(NSString *)name { for (AVCaptureDevice *device in [AVAudio microphones]) { if ([[device localizedName] isEqualToString:name]) { return device; } } return nil; } - (void)dealloc { // make sure we don't process any further samples self.audioConnection = nil; // make sure nothing gets stuck on this signal [self.samplesArrivedSignal signal]; [self.samplesArrivedSignal release]; TPCircularBufferCleanup(&audioSampleBuffer); [super dealloc]; } - (int)setupMicrophone:(AVCaptureDevice *)device sampleRate:(UInt32)sampleRate frameSize:(UInt32)frameSize channels:(UInt8)channels { self.audioCaptureSession = [[AVCaptureSession alloc] init]; NSError *error; AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error]; if (audioInput == nil) { return -1; } if ([self.audioCaptureSession canAddInput:audioInput]) { [self.audioCaptureSession addInput:audioInput]; } else { [audioInput dealloc]; return -1; } AVCaptureAudioDataOutput *audioOutput = [[AVCaptureAudioDataOutput alloc] init]; [audioOutput setAudioSettings:@{ (NSString *) AVFormatIDKey: [NSNumber numberWithUnsignedInt:kAudioFormatLinearPCM], (NSString *) AVSampleRateKey: [NSNumber numberWithUnsignedInt:sampleRate], (NSString *) AVNumberOfChannelsKey: [NSNumber numberWithUnsignedInt:channels], (NSString *) AVLinearPCMBitDepthKey: [NSNumber numberWithUnsignedInt:32], (NSString *) AVLinearPCMIsFloatKey: @YES, (NSString *) AVLinearPCMIsNonInterleaved: @NO }]; dispatch_queue_attr_t qos = dispatch_queue_attr_make_with_qos_class(DISPATCH_QUEUE_CONCURRENT, QOS_CLASS_USER_INITIATED, DISPATCH_QUEUE_PRIORITY_HIGH); dispatch_queue_t recordingQueue = dispatch_queue_create("audioSamplingQueue", qos); [audioOutput setSampleBufferDelegate:self queue:recordingQueue]; if ([self.audioCaptureSession canAddOutput:audioOutput]) { [self.audioCaptureSession addOutput:audioOutput]; } else { [audioInput release]; [audioOutput release]; return -1; } self.audioConnection = [audioOutput connectionWithMediaType:AVMediaTypeAudio]; [self.audioCaptureSession startRunning]; [audioInput release]; [audioOutput release]; self.samplesArrivedSignal = [[NSCondition alloc] init]; TPCircularBufferInit(&self->audioSampleBuffer, kBufferLength * channels); return 0; } - (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection { if (connection == self.audioConnection) { AudioBufferList audioBufferList; CMBlockBufferRef blockBuffer; CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(sampleBuffer, NULL, &audioBufferList, sizeof(audioBufferList), NULL, NULL, 0, &blockBuffer); // NSAssert(audioBufferList.mNumberBuffers == 1, @"Expected interleaved PCM format but buffer contained %u streams", audioBufferList.mNumberBuffers); // this is safe, because an interleaved PCM stream has exactly one buffer, // and we don't want to do sanity checks in a performance critical exec path AudioBuffer audioBuffer = audioBufferList.mBuffers[0]; TPCircularBufferProduceBytes(&self->audioSampleBuffer, audioBuffer.mData, audioBuffer.mDataByteSize); [self.samplesArrivedSignal signal]; } } @end