1 /* 2 * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. 3 * 4 * Use of this source code is governed by a BSD-style license 5 * that can be found in the LICENSE file in the root of the source 6 * tree. An additional intellectual property rights grant can be found 7 * in the file PATENTS. All contributing project authors may 8 * be found in the AUTHORS file in the root of the source tree. 9 */ 10 11 #ifndef MODULES_AUDIO_DEVICE_ANDROID_AUDIO_MANAGER_H_ 12 #define MODULES_AUDIO_DEVICE_ANDROID_AUDIO_MANAGER_H_ 13 14 #include <SLES/OpenSLES.h> 15 #include <jni.h> 16 17 #include <memory> 18 19 #include "modules/audio_device/android/audio_common.h" 20 #include "modules/audio_device/android/opensles_common.h" 21 #include "modules/audio_device/audio_device_config.h" 22 #include "modules/audio_device/audio_device_generic.h" 23 #include "modules/audio_device/include/audio_device_defines.h" 24 #include "modules/utility/include/helpers_android.h" 25 #include "modules/utility/include/jvm_android.h" 26 #include "rtc_base/thread_checker.h" 27 28 namespace webrtc { 29 30 // Implements support for functions in the WebRTC audio stack for Android that 31 // relies on the AudioManager in android.media. It also populates an 32 // AudioParameter structure with native audio parameters detected at 33 // construction. This class does not make any audio-related modifications 34 // unless Init() is called. Caching audio parameters makes no changes but only 35 // reads data from the Java side. 36 class AudioManager { 37 public: 38 // Wraps the Java specific parts of the AudioManager into one helper class. 39 // Stores method IDs for all supported methods at construction and then 40 // allows calls like JavaAudioManager::Close() while hiding the Java/JNI 41 // parts that are associated with this call. 42 class JavaAudioManager { 43 public: 44 JavaAudioManager(NativeRegistration* native_registration, 45 std::unique_ptr<GlobalRef> audio_manager); 46 ~JavaAudioManager(); 47 48 bool Init(); 49 void Close(); 50 bool IsCommunicationModeEnabled(); 51 bool IsDeviceBlacklistedForOpenSLESUsage(); 52 53 private: 54 std::unique_ptr<GlobalRef> audio_manager_; 55 jmethodID init_; 56 jmethodID dispose_; 57 jmethodID is_communication_mode_enabled_; 58 jmethodID is_device_blacklisted_for_open_sles_usage_; 59 }; 60 61 AudioManager(); 62 ~AudioManager(); 63 64 // Sets the currently active audio layer combination. Must be called before 65 // Init(). 66 void SetActiveAudioLayer(AudioDeviceModule::AudioLayer audio_layer); 67 68 // Creates and realizes the main (global) Open SL engine object and returns 69 // a reference to it. The engine object is only created at the first call 70 // since OpenSL ES for Android only supports a single engine per application. 71 // Subsequent calls returns the already created engine. The SL engine object 72 // is destroyed when the AudioManager object is deleted. It means that the 73 // engine object will be the first OpenSL ES object to be created and last 74 // object to be destroyed. 75 // Note that NULL will be returned unless the audio layer is specified as 76 // AudioDeviceModule::kAndroidOpenSLESAudio or 77 // AudioDeviceModule::kAndroidJavaInputAndOpenSLESOutputAudio. 78 SLObjectItf GetOpenSLEngine(); 79 80 // Initializes the audio manager and stores the current audio mode. 81 bool Init(); 82 // Revert any setting done by Init(). 83 bool Close(); 84 85 // Returns true if current audio mode is AudioManager.MODE_IN_COMMUNICATION. 86 bool IsCommunicationModeEnabled() const; 87 88 // Native audio parameters stored during construction. 89 const AudioParameters& GetPlayoutAudioParameters(); 90 const AudioParameters& GetRecordAudioParameters(); 91 92 // Returns true if the device supports built-in audio effects for AEC, AGC 93 // and NS. Some devices can also be blacklisted for use in combination with 94 // platform effects and these devices will return false. 95 // Can currently only be used in combination with a Java based audio backend 96 // for the recoring side (i.e. using the android.media.AudioRecord API). 97 bool IsAcousticEchoCancelerSupported() const; 98 bool IsAutomaticGainControlSupported() const; 99 bool IsNoiseSuppressorSupported() const; 100 101 // Returns true if the device supports the low-latency audio paths in 102 // combination with OpenSL ES. 103 bool IsLowLatencyPlayoutSupported() const; 104 bool IsLowLatencyRecordSupported() const; 105 106 // Returns true if the device supports (and has been configured for) stereo. 107 // Call the Java API WebRtcAudioManager.setStereoOutput/Input() with true as 108 // paramter to enable stereo. Default is mono in both directions and the 109 // setting is set once and for all when the audio manager object is created. 110 // TODO(henrika): stereo is not supported in combination with OpenSL ES. 111 bool IsStereoPlayoutSupported() const; 112 bool IsStereoRecordSupported() const; 113 114 // Returns true if the device supports pro-audio features in combination with 115 // OpenSL ES. 116 bool IsProAudioSupported() const; 117 118 // Returns true if the device supports AAudio. 119 bool IsAAudioSupported() const; 120 121 // Returns the estimated total delay of this device. Unit is in milliseconds. 122 // The vaule is set once at construction and never changes after that. 123 // Possible values are webrtc::kLowLatencyModeDelayEstimateInMilliseconds and 124 // webrtc::kHighLatencyModeDelayEstimateInMilliseconds. 125 int GetDelayEstimateInMilliseconds() const; 126 127 private: 128 // Called from Java side so we can cache the native audio parameters. 129 // This method will be called by the WebRtcAudioManager constructor, i.e. 130 // on the same thread that this object is created on. 131 static void JNICALL CacheAudioParameters(JNIEnv* env, 132 jobject obj, 133 jint sample_rate, 134 jint output_channels, 135 jint input_channels, 136 jboolean hardware_aec, 137 jboolean hardware_agc, 138 jboolean hardware_ns, 139 jboolean low_latency_output, 140 jboolean low_latency_input, 141 jboolean pro_audio, 142 jboolean a_audio, 143 jint output_buffer_size, 144 jint input_buffer_size, 145 jlong native_audio_manager); 146 void OnCacheAudioParameters(JNIEnv* env, 147 jint sample_rate, 148 jint output_channels, 149 jint input_channels, 150 jboolean hardware_aec, 151 jboolean hardware_agc, 152 jboolean hardware_ns, 153 jboolean low_latency_output, 154 jboolean low_latency_input, 155 jboolean pro_audio, 156 jboolean a_audio, 157 jint output_buffer_size, 158 jint input_buffer_size); 159 160 // Stores thread ID in the constructor. 161 // We can then use ThreadChecker::IsCurrent() to ensure that 162 // other methods are called from the same thread. 163 rtc::ThreadChecker thread_checker_; 164 165 // Calls JavaVM::AttachCurrentThread() if this thread is not attached at 166 // construction. 167 // Also ensures that DetachCurrentThread() is called at destruction. 168 JvmThreadConnector attach_thread_if_needed_; 169 170 // Wraps the JNI interface pointer and methods associated with it. 171 std::unique_ptr<JNIEnvironment> j_environment_; 172 173 // Contains factory method for creating the Java object. 174 std::unique_ptr<NativeRegistration> j_native_registration_; 175 176 // Wraps the Java specific parts of the AudioManager. 177 std::unique_ptr<AudioManager::JavaAudioManager> j_audio_manager_; 178 179 // Contains the selected audio layer specified by the AudioLayer enumerator 180 // in the AudioDeviceModule class. 181 AudioDeviceModule::AudioLayer audio_layer_; 182 183 // This object is the global entry point of the OpenSL ES API. 184 // After creating the engine object, the application can obtain this object‘s 185 // SLEngineItf interface. This interface contains creation methods for all 186 // the other object types in the API. None of these interface are realized 187 // by this class. It only provides access to the global engine object. 188 webrtc::ScopedSLObjectItf engine_object_; 189 190 // Set to true by Init() and false by Close(). 191 bool initialized_; 192 193 // True if device supports hardware (or built-in) AEC. 194 bool hardware_aec_; 195 // True if device supports hardware (or built-in) AGC. 196 bool hardware_agc_; 197 // True if device supports hardware (or built-in) NS. 198 bool hardware_ns_; 199 200 // True if device supports the low-latency OpenSL ES audio path for output. 201 bool low_latency_playout_; 202 203 // True if device supports the low-latency OpenSL ES audio path for input. 204 bool low_latency_record_; 205 206 // True if device supports the low-latency OpenSL ES pro-audio path. 207 bool pro_audio_; 208 209 // True if device supports the low-latency AAudio audio path. 210 bool a_audio_; 211 212 // The delay estimate can take one of two fixed values depending on if the 213 // device supports low-latency output or not. 214 int delay_estimate_in_milliseconds_; 215 216 // Contains native parameters (e.g. sample rate, channel configuration). 217 // Set at construction in OnCacheAudioParameters() which is called from 218 // Java on the same thread as this object is created on. 219 AudioParameters playout_parameters_; 220 AudioParameters record_parameters_; 221 }; 222 223 } // namespace webrtc 224 225 #endif // MODULES_AUDIO_DEVICE_ANDROID_AUDIO_MANAGER_H_ 226