1 /*
2  *  Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10 
11 #ifndef WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_AUDIO_RECORD_JNI_H_
12 #define WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_AUDIO_RECORD_JNI_H_
13 
14 #include <jni.h>
15 
16 #include "webrtc/base/thread_checker.h"
17 #include "webrtc/modules/audio_device/android/audio_manager.h"
18 #include "webrtc/modules/audio_device/include/audio_device_defines.h"
19 #include "webrtc/modules/audio_device/audio_device_generic.h"
20 #include "webrtc/modules/utility/interface/helpers_android.h"
21 
22 namespace webrtc {
23 
24 class PlayoutDelayProvider;
25 
26 // Implements 16-bit mono PCM audio input support for Android using the Java
27 // AudioRecord interface. Most of the work is done by its Java counterpart in
28 // WebRtcAudioRecord.java. This class is created and lives on a thread in
29 // C++-land, but recorded audio buffers are delivered on a high-priority
30 // thread managed by the Java class.
31 //
32 // The Java class makes use of AudioEffect features (mainly AEC) which are
33 // first available in Jelly Bean. If it is instantiated running against earlier
34 // SDKs, the AEC provided by the APM in WebRTC must be used and enabled
35 // separately instead.
36 //
37 // An instance must be created and destroyed on one and the same thread.
38 // All public methods must also be called on the same thread. A thread checker
39 // will DCHECK if any method is called on an invalid thread.
40 // It is possible to call the two static methods (SetAndroidAudioDeviceObjects
41 // and ClearAndroidAudioDeviceObjects) from a different thread but both will
42 // CHECK that the calling thread is attached to a Java VM.
43 //
44 // All methods use AttachThreadScoped to attach to a Java VM if needed and then
45 // detach when method goes out of scope. We do so because this class does not
46 // own the thread is is created and called on and other objects on the same
47 // thread might put us in a detached state at any time.
48 class AudioRecordJni {
49  public:
50   // Use the invocation API to allow the native application to use the JNI
51   // interface pointer to access VM features.
52   // |jvm| denotes the Java VM and |context| corresponds to
53   // android.content.Context in Java.
54   // This method also sets a global jclass object, |g_audio_record_class| for
55   // the "org/webrtc/voiceengine/WebRtcAudioRecord"-class.
56   static void SetAndroidAudioDeviceObjects(void* jvm, void* context);
57   // Always call this method after the object has been destructed. It deletes
58   // existing global references and enables garbage collection.
59   static void ClearAndroidAudioDeviceObjects();
60 
61   AudioRecordJni(
62       PlayoutDelayProvider* delay_provider, AudioManager* audio_manager);
63   ~AudioRecordJni();
64 
65   int32_t Init();
66   int32_t Terminate();
67 
68   int32_t InitRecording();
RecordingIsInitialized()69   bool RecordingIsInitialized() const { return initialized_; }
70 
71   int32_t StartRecording();
72   int32_t StopRecording ();
Recording()73   bool Recording() const { return recording_; }
74 
75   int32_t RecordingDelay(uint16_t& delayMS) const;
76 
77   void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
78 
79   bool BuiltInAECIsAvailable() const;
80   int32_t EnableBuiltInAEC(bool enable);
81   int32_t RecordingDeviceName(uint16_t index,
82                               char name[kAdmMaxDeviceNameSize],
83                               char guid[kAdmMaxGuidSize]);
84 
85  private:
86   // Called from Java side so we can cache the address of the Java-manged
87   // |byte_buffer| in |direct_buffer_address_|. The size of the buffer
88   // is also stored in |direct_buffer_capacity_in_bytes_|.
89   // This method will be called by the WebRtcAudioRecord constructor, i.e.,
90   // on the same thread that this object is created on.
91   static void JNICALL CacheDirectBufferAddress(
92     JNIEnv* env, jobject obj, jobject byte_buffer, jlong nativeAudioRecord);
93   void OnCacheDirectBufferAddress(JNIEnv* env, jobject byte_buffer);
94 
95   // Called periodically by the Java based WebRtcAudioRecord object when
96   // recording has started. Each call indicates that there are |length| new
97   // bytes recorded in the memory area |direct_buffer_address_| and it is
98   // now time to send these to the consumer.
99   // This method is called on a high-priority thread from Java. The name of
100   // the thread is 'AudioRecordThread'.
101   static void JNICALL DataIsRecorded(
102     JNIEnv* env, jobject obj, jint length, jlong nativeAudioRecord);
103   void OnDataIsRecorded(int length);
104 
105   // Returns true if SetAndroidAudioDeviceObjects() has been called
106   // successfully.
107   bool HasDeviceObjects();
108 
109   // Called from the constructor. Defines the |j_audio_record_| member.
110   void CreateJavaInstance();
111 
112   // Stores thread ID in constructor.
113   // We can then use ThreadChecker::CalledOnValidThread() to ensure that
114   // other methods are called from the same thread.
115   // Currently only does DCHECK(thread_checker_.CalledOnValidThread()).
116   rtc::ThreadChecker thread_checker_;
117 
118   // Stores thread ID in first call to OnDataIsRecorded() from high-priority
119   // thread in Java. Detached during construction of this object.
120   rtc::ThreadChecker thread_checker_java_;
121 
122   // Returns the current playout delay.
123   // TODO(henrika): this value is currently fixed since initial tests have
124   // shown that the estimated delay varies very little over time. It might be
125   // possible to make improvements in this area.
126   PlayoutDelayProvider* delay_provider_;
127 
128   // Contains audio parameters provided to this class at construction by the
129   // AudioManager.
130   const AudioParameters audio_parameters_;
131 
132   // The Java WebRtcAudioRecord instance.
133   jobject j_audio_record_;
134 
135   // Cached copy of address to direct audio buffer owned by |j_audio_record_|.
136   void* direct_buffer_address_;
137 
138   // Number of bytes in the direct audio buffer owned by |j_audio_record_|.
139   int direct_buffer_capacity_in_bytes_;
140 
141   // Number audio frames per audio buffer. Each audio frame corresponds to
142   // one sample of PCM mono data at 16 bits per sample. Hence, each audio
143   // frame contains 2 bytes (given that the Java layer only supports mono).
144   // Example: 480 for 48000 Hz or 441 for 44100 Hz.
145   int frames_per_buffer_;
146 
147   bool initialized_;
148 
149   bool recording_;
150 
151   // Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the
152   // AudioDeviceModuleImpl class and called by AudioDeviceModuleImpl::Create().
153   AudioDeviceBuffer* audio_device_buffer_;
154 
155   // Contains a delay estimate from the playout side given by |delay_provider_|.
156   int playout_delay_in_milliseconds_;
157 };
158 
159 }  // namespace webrtc
160 
161 #endif  // WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_AUDIO_RECORD_JNI_H_
162