1/*
2 *  Copyright 2016 The WebRTC Project Authors. All rights reserved.
3 *
4 *  Use of this source code is governed by a BSD-style license
5 *  that can be found in the LICENSE file in the root of the source
6 *  tree. An additional intellectual property rights grant can be found
7 *  in the file PATENTS.  All contributing project authors may
8 *  be found in the AUTHORS file in the root of the source tree.
9 */
10
11#import "RTCAudioSession+Private.h"
12
13#import <UIKit/UIKit.h>
14
15#include <vector>
16
17#include "rtc_base/atomic_ops.h"
18#include "rtc_base/checks.h"
19#include "rtc_base/critical_section.h"
20
21#import "RTCAudioSessionConfiguration.h"
22#import "base/RTCLogging.h"
23
24
25NSString * const kRTCAudioSessionErrorDomain = @"org.webrtc.RTCAudioSession";
26NSInteger const kRTCAudioSessionErrorLockRequired = -1;
27NSInteger const kRTCAudioSessionErrorConfiguration = -2;
28NSString * const kRTCAudioSessionOutputVolumeSelector = @"outputVolume";
29
30@interface RTCAudioSession ()
31@property(nonatomic, readonly) std::vector<__weak id<RTCAudioSessionDelegate> > delegates;
32@end
33
34// This class needs to be thread-safe because it is accessed from many threads.
35// TODO(tkchin): Consider more granular locking. We're not expecting a lot of
36// lock contention so coarse locks should be fine for now.
37@implementation RTCAudioSession {
38  rtc::CriticalSection _crit;
39  AVAudioSession *_session;
40  volatile int _activationCount;
41  volatile int _lockRecursionCount;
42  volatile int _webRTCSessionCount;
43  BOOL _isActive;
44  BOOL _useManualAudio;
45  BOOL _isAudioEnabled;
46  BOOL _canPlayOrRecord;
47  BOOL _isInterrupted;
48}
49
50@synthesize session = _session;
51@synthesize delegates = _delegates;
52@synthesize ignoresPreferredAttributeConfigurationErrors =
53    _ignoresPreferredAttributeConfigurationErrors;
54
55+ (instancetype)sharedInstance {
56  static dispatch_once_t onceToken;
57  static RTCAudioSession *sharedInstance = nil;
58  dispatch_once(&onceToken, ^{
59    sharedInstance = [[self alloc] init];
60  });
61  return sharedInstance;
62}
63
64- (instancetype)init {
65  return [self initWithAudioSession:[AVAudioSession sharedInstance]];
66}
67
68/** This initializer provides a way for unit tests to inject a fake/mock audio session. */
69- (instancetype)initWithAudioSession:(id)audioSession {
70  if (self = [super init]) {
71    _session = audioSession;
72
73    NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
74    [center addObserver:self
75               selector:@selector(handleInterruptionNotification:)
76                   name:AVAudioSessionInterruptionNotification
77                 object:nil];
78    [center addObserver:self
79               selector:@selector(handleRouteChangeNotification:)
80                   name:AVAudioSessionRouteChangeNotification
81                 object:nil];
82    [center addObserver:self
83               selector:@selector(handleMediaServicesWereLost:)
84                   name:AVAudioSessionMediaServicesWereLostNotification
85                 object:nil];
86    [center addObserver:self
87               selector:@selector(handleMediaServicesWereReset:)
88                   name:AVAudioSessionMediaServicesWereResetNotification
89                 object:nil];
90    // Posted on the main thread when the primary audio from other applications
91    // starts and stops. Foreground applications may use this notification as a
92    // hint to enable or disable audio that is secondary.
93    [center addObserver:self
94               selector:@selector(handleSilenceSecondaryAudioHintNotification:)
95                   name:AVAudioSessionSilenceSecondaryAudioHintNotification
96                 object:nil];
97    // Also track foreground event in order to deal with interruption ended situation.
98    [center addObserver:self
99               selector:@selector(handleApplicationDidBecomeActive:)
100                   name:UIApplicationDidBecomeActiveNotification
101                 object:nil];
102    [_session addObserver:self
103               forKeyPath:kRTCAudioSessionOutputVolumeSelector
104                  options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld
105                  context:(__bridge void*)RTCAudioSession.class];
106
107    RTCLog(@"RTCAudioSession (%p): init.", self);
108  }
109  return self;
110}
111
112- (void)dealloc {
113  [[NSNotificationCenter defaultCenter] removeObserver:self];
114  [_session removeObserver:self
115                forKeyPath:kRTCAudioSessionOutputVolumeSelector
116                   context:(__bridge void*)RTCAudioSession.class];
117  RTCLog(@"RTCAudioSession (%p): dealloc.", self);
118}
119
120- (NSString *)description {
121  NSString *format =
122      @"RTCAudioSession: {\n"
123       "  category: %@\n"
124       "  categoryOptions: %ld\n"
125       "  mode: %@\n"
126       "  isActive: %d\n"
127       "  sampleRate: %.2f\n"
128       "  IOBufferDuration: %f\n"
129       "  outputNumberOfChannels: %ld\n"
130       "  inputNumberOfChannels: %ld\n"
131       "  outputLatency: %f\n"
132       "  inputLatency: %f\n"
133       "  outputVolume: %f\n"
134       "}";
135  NSString *description = [NSString stringWithFormat:format,
136      self.category, (long)self.categoryOptions, self.mode,
137      self.isActive, self.sampleRate, self.IOBufferDuration,
138      self.outputNumberOfChannels, self.inputNumberOfChannels,
139      self.outputLatency, self.inputLatency, self.outputVolume];
140  return description;
141}
142
143- (void)setIsActive:(BOOL)isActive {
144  @synchronized(self) {
145    _isActive = isActive;
146  }
147}
148
149- (BOOL)isActive {
150  @synchronized(self) {
151    return _isActive;
152  }
153}
154
155- (BOOL)isLocked {
156  return _lockRecursionCount > 0;
157}
158
159- (void)setUseManualAudio:(BOOL)useManualAudio {
160  @synchronized(self) {
161    if (_useManualAudio == useManualAudio) {
162      return;
163    }
164    _useManualAudio = useManualAudio;
165  }
166  [self updateCanPlayOrRecord];
167}
168
169- (BOOL)useManualAudio {
170  @synchronized(self) {
171    return _useManualAudio;
172  }
173}
174
175- (void)setIsAudioEnabled:(BOOL)isAudioEnabled {
176  @synchronized(self) {
177    if (_isAudioEnabled == isAudioEnabled) {
178      return;
179    }
180    _isAudioEnabled = isAudioEnabled;
181  }
182  [self updateCanPlayOrRecord];
183}
184
185- (BOOL)isAudioEnabled {
186  @synchronized(self) {
187    return _isAudioEnabled;
188  }
189}
190
191- (void)setIgnoresPreferredAttributeConfigurationErrors:
192    (BOOL)ignoresPreferredAttributeConfigurationErrors {
193  @synchronized(self) {
194    if (_ignoresPreferredAttributeConfigurationErrors ==
195        ignoresPreferredAttributeConfigurationErrors) {
196      return;
197    }
198    _ignoresPreferredAttributeConfigurationErrors = ignoresPreferredAttributeConfigurationErrors;
199  }
200}
201
202- (BOOL)ignoresPreferredAttributeConfigurationErrors {
203  @synchronized(self) {
204    return _ignoresPreferredAttributeConfigurationErrors;
205  }
206}
207
208// TODO(tkchin): Check for duplicates.
209- (void)addDelegate:(id<RTCAudioSessionDelegate>)delegate {
210  RTCLog(@"Adding delegate: (%p)", delegate);
211  if (!delegate) {
212    return;
213  }
214  @synchronized(self) {
215    _delegates.push_back(delegate);
216    [self removeZeroedDelegates];
217  }
218}
219
220- (void)removeDelegate:(id<RTCAudioSessionDelegate>)delegate {
221  RTCLog(@"Removing delegate: (%p)", delegate);
222  if (!delegate) {
223    return;
224  }
225  @synchronized(self) {
226    _delegates.erase(std::remove(_delegates.begin(),
227                                 _delegates.end(),
228                                 delegate),
229                     _delegates.end());
230    [self removeZeroedDelegates];
231  }
232}
233
234#pragma clang diagnostic push
235#pragma clang diagnostic ignored "-Wthread-safety-analysis"
236
237- (void)lockForConfiguration {
238  _crit.Enter();
239  rtc::AtomicOps::Increment(&_lockRecursionCount);
240}
241
242- (void)unlockForConfiguration {
243  // Don't let threads other than the one that called lockForConfiguration
244  // unlock.
245  if (_crit.TryEnter()) {
246    rtc::AtomicOps::Decrement(&_lockRecursionCount);
247    // One unlock for the tryLock, and another one to actually unlock. If this
248    // was called without anyone calling lock, we will hit an assertion.
249    _crit.Leave();
250    _crit.Leave();
251  }
252}
253
254#pragma clang diagnostic pop
255
256#pragma mark - AVAudioSession proxy methods
257
258- (NSString *)category {
259  return self.session.category;
260}
261
262- (AVAudioSessionCategoryOptions)categoryOptions {
263  return self.session.categoryOptions;
264}
265
266- (NSString *)mode {
267  return self.session.mode;
268}
269
270- (BOOL)secondaryAudioShouldBeSilencedHint {
271  return self.session.secondaryAudioShouldBeSilencedHint;
272}
273
274- (AVAudioSessionRouteDescription *)currentRoute {
275  return self.session.currentRoute;
276}
277
278- (NSInteger)maximumInputNumberOfChannels {
279  return self.session.maximumInputNumberOfChannels;
280}
281
282- (NSInteger)maximumOutputNumberOfChannels {
283  return self.session.maximumOutputNumberOfChannels;
284}
285
286- (float)inputGain {
287  return self.session.inputGain;
288}
289
290- (BOOL)inputGainSettable {
291  return self.session.inputGainSettable;
292}
293
294- (BOOL)inputAvailable {
295  return self.session.inputAvailable;
296}
297
298- (NSArray<AVAudioSessionDataSourceDescription *> *)inputDataSources {
299  return self.session.inputDataSources;
300}
301
302- (AVAudioSessionDataSourceDescription *)inputDataSource {
303  return self.session.inputDataSource;
304}
305
306- (NSArray<AVAudioSessionDataSourceDescription *> *)outputDataSources {
307  return self.session.outputDataSources;
308}
309
310- (AVAudioSessionDataSourceDescription *)outputDataSource {
311  return self.session.outputDataSource;
312}
313
314- (double)sampleRate {
315  return self.session.sampleRate;
316}
317
318- (double)preferredSampleRate {
319  return self.session.preferredSampleRate;
320}
321
322- (NSInteger)inputNumberOfChannels {
323  return self.session.inputNumberOfChannels;
324}
325
326- (NSInteger)outputNumberOfChannels {
327  return self.session.outputNumberOfChannels;
328}
329
330- (float)outputVolume {
331  return self.session.outputVolume;
332}
333
334- (NSTimeInterval)inputLatency {
335  return self.session.inputLatency;
336}
337
338- (NSTimeInterval)outputLatency {
339  return self.session.outputLatency;
340}
341
342- (NSTimeInterval)IOBufferDuration {
343  return self.session.IOBufferDuration;
344}
345
346- (NSTimeInterval)preferredIOBufferDuration {
347  return self.session.preferredIOBufferDuration;
348}
349
350// TODO(tkchin): Simplify the amount of locking happening here. Likely that we
351// can just do atomic increments / decrements.
352- (BOOL)setActive:(BOOL)active
353            error:(NSError **)outError {
354  if (![self checkLock:outError]) {
355    return NO;
356  }
357  int activationCount = _activationCount;
358  if (!active && activationCount == 0) {
359    RTCLogWarning(@"Attempting to deactivate without prior activation.");
360  }
361  [self notifyWillSetActive:active];
362  BOOL success = YES;
363  BOOL isActive = self.isActive;
364  // Keep a local error so we can log it.
365  NSError *error = nil;
366  BOOL shouldSetActive =
367      (active && !isActive) || (!active && isActive && activationCount == 1);
368  // Attempt to activate if we're not active.
369  // Attempt to deactivate if we're active and it's the last unbalanced call.
370  if (shouldSetActive) {
371    AVAudioSession *session = self.session;
372    // AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation is used to ensure
373    // that other audio sessions that were interrupted by our session can return
374    // to their active state. It is recommended for VoIP apps to use this
375    // option.
376    AVAudioSessionSetActiveOptions options =
377        active ? 0 : AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation;
378    success = [session setActive:active
379                     withOptions:options
380                           error:&error];
381    if (outError) {
382      *outError = error;
383    }
384  }
385  if (success) {
386    if (shouldSetActive) {
387      self.isActive = active;
388      if (active && self.isInterrupted) {
389        self.isInterrupted = NO;
390        [self notifyDidEndInterruptionWithShouldResumeSession:YES];
391      }
392    }
393    if (active) {
394      [self incrementActivationCount];
395    }
396    [self notifyDidSetActive:active];
397  } else {
398    RTCLogError(@"Failed to setActive:%d. Error: %@",
399                active, error.localizedDescription);
400    [self notifyFailedToSetActive:active error:error];
401  }
402  // Decrement activation count on deactivation whether or not it succeeded.
403  if (!active) {
404    [self decrementActivationCount];
405  }
406  RTCLog(@"Number of current activations: %d", _activationCount);
407  return success;
408}
409
410- (BOOL)setCategory:(NSString *)category
411        withOptions:(AVAudioSessionCategoryOptions)options
412              error:(NSError **)outError {
413  if (![self checkLock:outError]) {
414    return NO;
415  }
416  return [self.session setCategory:category withOptions:options error:outError];
417}
418
419- (BOOL)setMode:(NSString *)mode error:(NSError **)outError {
420  if (![self checkLock:outError]) {
421    return NO;
422  }
423  return [self.session setMode:mode error:outError];
424}
425
426- (BOOL)setInputGain:(float)gain error:(NSError **)outError {
427  if (![self checkLock:outError]) {
428    return NO;
429  }
430  return [self.session setInputGain:gain error:outError];
431}
432
433- (BOOL)setPreferredSampleRate:(double)sampleRate error:(NSError **)outError {
434  if (![self checkLock:outError]) {
435    return NO;
436  }
437  return [self.session setPreferredSampleRate:sampleRate error:outError];
438}
439
440- (BOOL)setPreferredIOBufferDuration:(NSTimeInterval)duration
441                               error:(NSError **)outError {
442  if (![self checkLock:outError]) {
443    return NO;
444  }
445  return [self.session setPreferredIOBufferDuration:duration error:outError];
446}
447
448- (BOOL)setPreferredInputNumberOfChannels:(NSInteger)count
449                                    error:(NSError **)outError {
450  if (![self checkLock:outError]) {
451    return NO;
452  }
453  return [self.session setPreferredInputNumberOfChannels:count error:outError];
454}
455- (BOOL)setPreferredOutputNumberOfChannels:(NSInteger)count
456                                     error:(NSError **)outError {
457  if (![self checkLock:outError]) {
458    return NO;
459  }
460  return [self.session setPreferredOutputNumberOfChannels:count error:outError];
461}
462
463- (BOOL)overrideOutputAudioPort:(AVAudioSessionPortOverride)portOverride
464                          error:(NSError **)outError {
465  if (![self checkLock:outError]) {
466    return NO;
467  }
468  return [self.session overrideOutputAudioPort:portOverride error:outError];
469}
470
471- (BOOL)setPreferredInput:(AVAudioSessionPortDescription *)inPort
472                    error:(NSError **)outError {
473  if (![self checkLock:outError]) {
474    return NO;
475  }
476  return [self.session setPreferredInput:inPort error:outError];
477}
478
479- (BOOL)setInputDataSource:(AVAudioSessionDataSourceDescription *)dataSource
480                     error:(NSError **)outError {
481  if (![self checkLock:outError]) {
482    return NO;
483  }
484  return [self.session setInputDataSource:dataSource error:outError];
485}
486
487- (BOOL)setOutputDataSource:(AVAudioSessionDataSourceDescription *)dataSource
488                      error:(NSError **)outError {
489  if (![self checkLock:outError]) {
490    return NO;
491  }
492  return [self.session setOutputDataSource:dataSource error:outError];
493}
494
495#pragma mark - Notifications
496
497- (void)handleInterruptionNotification:(NSNotification *)notification {
498  NSNumber* typeNumber =
499      notification.userInfo[AVAudioSessionInterruptionTypeKey];
500  AVAudioSessionInterruptionType type =
501      (AVAudioSessionInterruptionType)typeNumber.unsignedIntegerValue;
502  switch (type) {
503    case AVAudioSessionInterruptionTypeBegan:
504      RTCLog(@"Audio session interruption began.");
505      self.isActive = NO;
506      self.isInterrupted = YES;
507      [self notifyDidBeginInterruption];
508      break;
509    case AVAudioSessionInterruptionTypeEnded: {
510      RTCLog(@"Audio session interruption ended.");
511      self.isInterrupted = NO;
512      [self updateAudioSessionAfterEvent];
513      NSNumber *optionsNumber =
514          notification.userInfo[AVAudioSessionInterruptionOptionKey];
515      AVAudioSessionInterruptionOptions options =
516          optionsNumber.unsignedIntegerValue;
517      BOOL shouldResume =
518          options & AVAudioSessionInterruptionOptionShouldResume;
519      [self notifyDidEndInterruptionWithShouldResumeSession:shouldResume];
520      break;
521    }
522  }
523}
524
525- (void)handleRouteChangeNotification:(NSNotification *)notification {
526  // Get reason for current route change.
527  NSNumber* reasonNumber =
528      notification.userInfo[AVAudioSessionRouteChangeReasonKey];
529  AVAudioSessionRouteChangeReason reason =
530      (AVAudioSessionRouteChangeReason)reasonNumber.unsignedIntegerValue;
531  RTCLog(@"Audio route changed:");
532  switch (reason) {
533    case AVAudioSessionRouteChangeReasonUnknown:
534      RTCLog(@"Audio route changed: ReasonUnknown");
535      break;
536    case AVAudioSessionRouteChangeReasonNewDeviceAvailable:
537      RTCLog(@"Audio route changed: NewDeviceAvailable");
538      break;
539    case AVAudioSessionRouteChangeReasonOldDeviceUnavailable:
540      RTCLog(@"Audio route changed: OldDeviceUnavailable");
541      break;
542    case AVAudioSessionRouteChangeReasonCategoryChange:
543      RTCLog(@"Audio route changed: CategoryChange to :%@",
544             self.session.category);
545      break;
546    case AVAudioSessionRouteChangeReasonOverride:
547      RTCLog(@"Audio route changed: Override");
548      break;
549    case AVAudioSessionRouteChangeReasonWakeFromSleep:
550      RTCLog(@"Audio route changed: WakeFromSleep");
551      break;
552    case AVAudioSessionRouteChangeReasonNoSuitableRouteForCategory:
553      RTCLog(@"Audio route changed: NoSuitableRouteForCategory");
554      break;
555    case AVAudioSessionRouteChangeReasonRouteConfigurationChange:
556      RTCLog(@"Audio route changed: RouteConfigurationChange");
557      break;
558  }
559  AVAudioSessionRouteDescription* previousRoute =
560      notification.userInfo[AVAudioSessionRouteChangePreviousRouteKey];
561  // Log previous route configuration.
562  RTCLog(@"Previous route: %@\nCurrent route:%@",
563         previousRoute, self.session.currentRoute);
564  [self notifyDidChangeRouteWithReason:reason previousRoute:previousRoute];
565}
566
567- (void)handleMediaServicesWereLost:(NSNotification *)notification {
568  RTCLog(@"Media services were lost.");
569  [self updateAudioSessionAfterEvent];
570  [self notifyMediaServicesWereLost];
571}
572
573- (void)handleMediaServicesWereReset:(NSNotification *)notification {
574  RTCLog(@"Media services were reset.");
575  [self updateAudioSessionAfterEvent];
576  [self notifyMediaServicesWereReset];
577}
578
579- (void)handleSilenceSecondaryAudioHintNotification:(NSNotification *)notification {
580  // TODO(henrika): just adding logs here for now until we know if we are ever
581  // see this notification and might be affected by it or if further actions
582  // are required.
583  NSNumber *typeNumber =
584      notification.userInfo[AVAudioSessionSilenceSecondaryAudioHintTypeKey];
585  AVAudioSessionSilenceSecondaryAudioHintType type =
586      (AVAudioSessionSilenceSecondaryAudioHintType)typeNumber.unsignedIntegerValue;
587  switch (type) {
588    case AVAudioSessionSilenceSecondaryAudioHintTypeBegin:
589      RTCLog(@"Another application's primary audio has started.");
590      break;
591    case AVAudioSessionSilenceSecondaryAudioHintTypeEnd:
592      RTCLog(@"Another application's primary audio has stopped.");
593      break;
594  }
595}
596
597- (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
598  BOOL isInterrupted = self.isInterrupted;
599  RTCLog(@"Application became active after an interruption. Treating as interruption "
600          "end. isInterrupted changed from %d to 0.",
601         isInterrupted);
602  if (isInterrupted) {
603    self.isInterrupted = NO;
604    [self updateAudioSessionAfterEvent];
605  }
606  // Always treat application becoming active as an interruption end event.
607  [self notifyDidEndInterruptionWithShouldResumeSession:YES];
608}
609
610#pragma mark - Private
611
612+ (NSError *)lockError {
613  NSDictionary *userInfo = @{
614    NSLocalizedDescriptionKey:
615        @"Must call lockForConfiguration before calling this method."
616  };
617  NSError *error =
618      [[NSError alloc] initWithDomain:kRTCAudioSessionErrorDomain
619                                 code:kRTCAudioSessionErrorLockRequired
620                             userInfo:userInfo];
621  return error;
622}
623
624- (std::vector<__weak id<RTCAudioSessionDelegate> >)delegates {
625  @synchronized(self) {
626    // Note: this returns a copy.
627    return _delegates;
628  }
629}
630
631// TODO(tkchin): check for duplicates.
632- (void)pushDelegate:(id<RTCAudioSessionDelegate>)delegate {
633  @synchronized(self) {
634    _delegates.insert(_delegates.begin(), delegate);
635  }
636}
637
638- (void)removeZeroedDelegates {
639  @synchronized(self) {
640    _delegates.erase(
641        std::remove_if(_delegates.begin(),
642                       _delegates.end(),
643                       [](id delegate) -> bool { return delegate == nil; }),
644        _delegates.end());
645  }
646}
647
648- (int)activationCount {
649  return _activationCount;
650}
651
652- (int)incrementActivationCount {
653  RTCLog(@"Incrementing activation count.");
654  return rtc::AtomicOps::Increment(&_activationCount);
655}
656
657- (NSInteger)decrementActivationCount {
658  RTCLog(@"Decrementing activation count.");
659  return rtc::AtomicOps::Decrement(&_activationCount);
660}
661
662- (int)webRTCSessionCount {
663  return _webRTCSessionCount;
664}
665
666- (BOOL)canPlayOrRecord {
667  return !self.useManualAudio || self.isAudioEnabled;
668}
669
670- (BOOL)isInterrupted {
671  @synchronized(self) {
672    return _isInterrupted;
673  }
674}
675
676- (void)setIsInterrupted:(BOOL)isInterrupted {
677  @synchronized(self) {
678    if (_isInterrupted == isInterrupted) {
679      return;
680   }
681   _isInterrupted = isInterrupted;
682  }
683}
684
685- (BOOL)checkLock:(NSError **)outError {
686  // Check ivar instead of trying to acquire lock so that we won't accidentally
687  // acquire lock if it hasn't already been called.
688  if (!self.isLocked) {
689    if (outError) {
690      *outError = [RTCAudioSession lockError];
691    }
692    return NO;
693  }
694  return YES;
695}
696
697- (BOOL)beginWebRTCSession:(NSError **)outError {
698  if (outError) {
699    *outError = nil;
700  }
701  if (![self checkLock:outError]) {
702    return NO;
703  }
704  rtc::AtomicOps::Increment(&_webRTCSessionCount);
705  [self notifyDidStartPlayOrRecord];
706  return YES;
707}
708
709- (BOOL)endWebRTCSession:(NSError **)outError {
710  if (outError) {
711    *outError = nil;
712  }
713  if (![self checkLock:outError]) {
714    return NO;
715  }
716  rtc::AtomicOps::Decrement(&_webRTCSessionCount);
717  [self notifyDidStopPlayOrRecord];
718  return YES;
719}
720
721- (BOOL)configureWebRTCSession:(NSError **)outError {
722  if (outError) {
723    *outError = nil;
724  }
725  if (![self checkLock:outError]) {
726    return NO;
727  }
728  RTCLog(@"Configuring audio session for WebRTC.");
729
730  // Configure the AVAudioSession and activate it.
731  // Provide an error even if there isn't one so we can log it.
732  NSError *error = nil;
733  RTCAudioSessionConfiguration *webRTCConfig =
734      [RTCAudioSessionConfiguration webRTCConfiguration];
735  if (![self setConfiguration:webRTCConfig active:YES error:&error]) {
736    RTCLogError(@"Failed to set WebRTC audio configuration: %@",
737                error.localizedDescription);
738    // Do not call setActive:NO if setActive:YES failed.
739    if (outError) {
740      *outError = error;
741    }
742    return NO;
743  }
744
745  // Ensure that the device currently supports audio input.
746  // TODO(tkchin): Figure out if this is really necessary.
747  if (!self.inputAvailable) {
748    RTCLogError(@"No audio input path is available!");
749    [self unconfigureWebRTCSession:nil];
750    if (outError) {
751      *outError = [self configurationErrorWithDescription:@"No input path."];
752    }
753    return NO;
754  }
755
756  // It can happen (e.g. in combination with BT devices) that the attempt to set
757  // the preferred sample rate for WebRTC (48kHz) fails. If so, make a new
758  // configuration attempt using the sample rate that worked using the active
759  // audio session. A typical case is that only 8 or 16kHz can be set, e.g. in
760  // combination with BT headsets. Using this "trick" seems to avoid a state
761  // where Core Audio asks for a different number of audio frames than what the
762  // session's I/O buffer duration corresponds to.
763  // TODO(henrika): this fix resolves bugs.webrtc.org/6004 but it has only been
764  // tested on a limited set of iOS devices and BT devices.
765  double sessionSampleRate = self.sampleRate;
766  double preferredSampleRate = webRTCConfig.sampleRate;
767  if (sessionSampleRate != preferredSampleRate) {
768    RTCLogWarning(
769        @"Current sample rate (%.2f) is not the preferred rate (%.2f)",
770        sessionSampleRate, preferredSampleRate);
771    if (![self setPreferredSampleRate:sessionSampleRate
772                                error:&error]) {
773      RTCLogError(@"Failed to set preferred sample rate: %@",
774                  error.localizedDescription);
775      if (outError) {
776        *outError = error;
777      }
778    }
779  }
780
781  return YES;
782}
783
784- (BOOL)unconfigureWebRTCSession:(NSError **)outError {
785  if (outError) {
786    *outError = nil;
787  }
788  if (![self checkLock:outError]) {
789    return NO;
790  }
791  RTCLog(@"Unconfiguring audio session for WebRTC.");
792  [self setActive:NO error:outError];
793
794  return YES;
795}
796
797- (NSError *)configurationErrorWithDescription:(NSString *)description {
798  NSDictionary* userInfo = @{
799    NSLocalizedDescriptionKey: description,
800  };
801  return [[NSError alloc] initWithDomain:kRTCAudioSessionErrorDomain
802                                    code:kRTCAudioSessionErrorConfiguration
803                                userInfo:userInfo];
804}
805
806- (void)updateAudioSessionAfterEvent {
807  BOOL shouldActivate = self.activationCount > 0;
808  AVAudioSessionSetActiveOptions options = shouldActivate ?
809      0 : AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation;
810  NSError *error = nil;
811  if ([self.session setActive:shouldActivate
812                  withOptions:options
813                        error:&error]) {
814    self.isActive = shouldActivate;
815  } else {
816    RTCLogError(@"Failed to set session active to %d. Error:%@",
817                shouldActivate, error.localizedDescription);
818  }
819}
820
821- (void)updateCanPlayOrRecord {
822  BOOL canPlayOrRecord = NO;
823  BOOL shouldNotify = NO;
824  @synchronized(self) {
825    canPlayOrRecord = !self.useManualAudio || self.isAudioEnabled;
826    if (_canPlayOrRecord == canPlayOrRecord) {
827      return;
828    }
829    _canPlayOrRecord = canPlayOrRecord;
830    shouldNotify = YES;
831  }
832  if (shouldNotify) {
833    [self notifyDidChangeCanPlayOrRecord:canPlayOrRecord];
834  }
835}
836
837- (void)audioSessionDidActivate:(AVAudioSession *)session {
838  if (_session != session) {
839    RTCLogError(@"audioSessionDidActivate called on different AVAudioSession");
840  }
841  RTCLog(@"Audio session was externally activated.");
842  [self incrementActivationCount];
843  self.isActive = YES;
844  // When a CallKit call begins, it's possible that we receive an interruption
845  // begin without a corresponding end. Since we know that we have an activated
846  // audio session at this point, just clear any saved interruption flag since
847  // the app may never be foregrounded during the duration of the call.
848  if (self.isInterrupted) {
849    RTCLog(@"Clearing interrupted state due to external activation.");
850    self.isInterrupted = NO;
851  }
852  // Treat external audio session activation as an end interruption event.
853  [self notifyDidEndInterruptionWithShouldResumeSession:YES];
854}
855
856- (void)audioSessionDidDeactivate:(AVAudioSession *)session {
857  if (_session != session) {
858    RTCLogError(@"audioSessionDidDeactivate called on different AVAudioSession");
859  }
860  RTCLog(@"Audio session was externally deactivated.");
861  self.isActive = NO;
862  [self decrementActivationCount];
863}
864
865- (void)observeValueForKeyPath:(NSString *)keyPath
866                      ofObject:(id)object
867                        change:(NSDictionary *)change
868                       context:(void *)context {
869  if (context == (__bridge void*)RTCAudioSession.class) {
870    if (object == _session) {
871      NSNumber *newVolume = change[NSKeyValueChangeNewKey];
872      RTCLog(@"OutputVolumeDidChange to %f", newVolume.floatValue);
873      [self notifyDidChangeOutputVolume:newVolume.floatValue];
874    }
875  } else {
876    [super observeValueForKeyPath:keyPath
877                         ofObject:object
878                           change:change
879                          context:context];
880  }
881}
882
883- (void)notifyDidBeginInterruption {
884  for (auto delegate : self.delegates) {
885    SEL sel = @selector(audioSessionDidBeginInterruption:);
886    if ([delegate respondsToSelector:sel]) {
887      [delegate audioSessionDidBeginInterruption:self];
888    }
889  }
890}
891
892- (void)notifyDidEndInterruptionWithShouldResumeSession:
893    (BOOL)shouldResumeSession {
894  for (auto delegate : self.delegates) {
895    SEL sel = @selector(audioSessionDidEndInterruption:shouldResumeSession:);
896    if ([delegate respondsToSelector:sel]) {
897      [delegate audioSessionDidEndInterruption:self
898                           shouldResumeSession:shouldResumeSession];
899    }
900  }
901}
902
903- (void)notifyDidChangeRouteWithReason:(AVAudioSessionRouteChangeReason)reason
904    previousRoute:(AVAudioSessionRouteDescription *)previousRoute {
905  for (auto delegate : self.delegates) {
906    SEL sel = @selector(audioSessionDidChangeRoute:reason:previousRoute:);
907    if ([delegate respondsToSelector:sel]) {
908      [delegate audioSessionDidChangeRoute:self
909                                    reason:reason
910                             previousRoute:previousRoute];
911    }
912  }
913}
914
915- (void)notifyMediaServicesWereLost {
916  for (auto delegate : self.delegates) {
917    SEL sel = @selector(audioSessionMediaServerTerminated:);
918    if ([delegate respondsToSelector:sel]) {
919      [delegate audioSessionMediaServerTerminated:self];
920    }
921  }
922}
923
924- (void)notifyMediaServicesWereReset {
925  for (auto delegate : self.delegates) {
926    SEL sel = @selector(audioSessionMediaServerReset:);
927    if ([delegate respondsToSelector:sel]) {
928      [delegate audioSessionMediaServerReset:self];
929    }
930  }
931}
932
933- (void)notifyDidChangeCanPlayOrRecord:(BOOL)canPlayOrRecord {
934  for (auto delegate : self.delegates) {
935    SEL sel = @selector(audioSession:didChangeCanPlayOrRecord:);
936    if ([delegate respondsToSelector:sel]) {
937      [delegate audioSession:self didChangeCanPlayOrRecord:canPlayOrRecord];
938    }
939  }
940}
941
942- (void)notifyDidStartPlayOrRecord {
943  for (auto delegate : self.delegates) {
944    SEL sel = @selector(audioSessionDidStartPlayOrRecord:);
945    if ([delegate respondsToSelector:sel]) {
946      [delegate audioSessionDidStartPlayOrRecord:self];
947    }
948  }
949}
950
951- (void)notifyDidStopPlayOrRecord {
952  for (auto delegate : self.delegates) {
953    SEL sel = @selector(audioSessionDidStopPlayOrRecord:);
954    if ([delegate respondsToSelector:sel]) {
955      [delegate audioSessionDidStopPlayOrRecord:self];
956    }
957  }
958}
959
960- (void)notifyDidChangeOutputVolume:(float)volume {
961  for (auto delegate : self.delegates) {
962    SEL sel = @selector(audioSession:didChangeOutputVolume:);
963    if ([delegate respondsToSelector:sel]) {
964      [delegate audioSession:self didChangeOutputVolume:volume];
965    }
966  }
967}
968
969- (void)notifyDidDetectPlayoutGlitch:(int64_t)totalNumberOfGlitches {
970  for (auto delegate : self.delegates) {
971    SEL sel = @selector(audioSession:didDetectPlayoutGlitch:);
972    if ([delegate respondsToSelector:sel]) {
973      [delegate audioSession:self didDetectPlayoutGlitch:totalNumberOfGlitches];
974    }
975  }
976}
977
978- (void)notifyWillSetActive:(BOOL)active {
979  for (id delegate : self.delegates) {
980    SEL sel = @selector(audioSession:willSetActive:);
981    if ([delegate respondsToSelector:sel]) {
982      [delegate audioSession:self willSetActive:active];
983    }
984  }
985}
986
987- (void)notifyDidSetActive:(BOOL)active {
988  for (id delegate : self.delegates) {
989    SEL sel = @selector(audioSession:didSetActive:);
990    if ([delegate respondsToSelector:sel]) {
991      [delegate audioSession:self didSetActive:active];
992    }
993  }
994}
995
996- (void)notifyFailedToSetActive:(BOOL)active error:(NSError *)error {
997  for (id delegate : self.delegates) {
998    SEL sel = @selector(audioSession:failedToSetActive:error:);
999    if ([delegate respondsToSelector:sel]) {
1000      [delegate audioSession:self failedToSetActive:active error:error];
1001    }
1002  }
1003}
1004
1005@end
1006