1 /*
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include "webrtc/modules/video_capture/android/video_capture_android.h"
12
13 #include "webrtc/base/common.h"
14 #include "webrtc/modules/utility/include/helpers_android.h"
15 #include "webrtc/modules/video_capture/android/device_info_android.h"
16 #include "webrtc/system_wrappers/include/critical_section_wrapper.h"
17 #include "webrtc/system_wrappers/include/logcat_trace_context.h"
18 #include "webrtc/system_wrappers/include/logging.h"
19 #include "webrtc/system_wrappers/include/trace.h"
20
21 #include "AndroidBridge.h"
22
23 static JavaVM* g_jvm_capture = NULL;
24 static jclass g_java_capturer_class = NULL; // VideoCaptureAndroid.class.
25 static jobject g_context = NULL; // Owned android.content.Context.
26
27 namespace webrtc {
28
29 // Called by Java to get the global application context.
GetContext(JNIEnv * env,jclass)30 jobject JNICALL GetContext(JNIEnv* env, jclass) {
31 assert(g_context);
32 return g_context;
33 }
34
35 // Called by Java when the camera has a new frame to deliver.
ProvideCameraFrame(JNIEnv * env,jobject,jbyteArray javaCameraFrame,jint length,jint rotation,jlong timeStamp,jlong context)36 void JNICALL ProvideCameraFrame(
37 JNIEnv* env,
38 jobject,
39 jbyteArray javaCameraFrame,
40 jint length,
41 jint rotation,
42 jlong timeStamp,
43 jlong context) {
44 if (!context) {
45 return;
46 }
47
48 webrtc::videocapturemodule::VideoCaptureAndroid* captureModule =
49 reinterpret_cast<webrtc::videocapturemodule::VideoCaptureAndroid*>(
50 context);
51 jbyte* cameraFrame = env->GetByteArrayElements(javaCameraFrame, NULL);
52 captureModule->OnIncomingFrame(
53 reinterpret_cast<uint8_t*>(cameraFrame), length, rotation, 0);
54 env->ReleaseByteArrayElements(javaCameraFrame, cameraFrame, JNI_ABORT);
55 }
56
SetCaptureAndroidVM(JavaVM * javaVM)57 int32_t SetCaptureAndroidVM(JavaVM* javaVM) {
58 if (g_java_capturer_class) {
59 return 0;
60 }
61
62 if (javaVM) {
63 assert(!g_jvm_capture);
64 g_jvm_capture = javaVM;
65 AttachThreadScoped ats(g_jvm_capture);
66
67 g_context = mozilla::AndroidBridge::Bridge()->GetGlobalContextRef();
68
69 videocapturemodule::DeviceInfoAndroid::Initialize(g_jvm_capture);
70
71 jclass clsRef = mozilla::jni::GetClassRef(
72 ats.env(), "org/webrtc/videoengine/VideoCaptureAndroid");
73 g_java_capturer_class =
74 static_cast<jclass>(ats.env()->NewGlobalRef(clsRef));
75 ats.env()->DeleteLocalRef(clsRef);
76 assert(g_java_capturer_class);
77
78 JNINativeMethod native_methods[] = {
79 {"GetContext",
80 "()Landroid/content/Context;",
81 reinterpret_cast<void*>(&GetContext)},
82 {"ProvideCameraFrame",
83 "([BIIJJ)V",
84 reinterpret_cast<void*>(&ProvideCameraFrame)}};
85 if (ats.env()->RegisterNatives(g_java_capturer_class,
86 native_methods, 2) != 0)
87 assert(false);
88 } else {
89 if (g_jvm_capture) {
90 AttachThreadScoped ats(g_jvm_capture);
91 ats.env()->UnregisterNatives(g_java_capturer_class);
92 ats.env()->DeleteGlobalRef(g_java_capturer_class);
93 g_java_capturer_class = NULL;
94 g_context = NULL;
95 videocapturemodule::DeviceInfoAndroid::DeInitialize();
96 g_jvm_capture = NULL;
97 }
98 }
99
100 return 0;
101 }
102
103 namespace videocapturemodule {
104
Create(const char * deviceUniqueIdUTF8)105 rtc::scoped_refptr<VideoCaptureModule> VideoCaptureImpl::Create(
106 const char* deviceUniqueIdUTF8) {
107 rtc::scoped_refptr<VideoCaptureAndroid> implementation(
108 new rtc::RefCountedObject<VideoCaptureAndroid>());
109 if (implementation->Init(deviceUniqueIdUTF8) != 0) {
110 implementation = nullptr;
111 }
112 return implementation;
113 }
114
OnIncomingFrame(uint8_t * videoFrame,size_t videoFrameLength,int32_t degrees,int64_t captureTime)115 int32_t VideoCaptureAndroid::OnIncomingFrame(uint8_t* videoFrame,
116 size_t videoFrameLength,
117 int32_t degrees,
118 int64_t captureTime) {
119 // _captureStarted is written on the controlling thread in
120 // StartCapture/StopCapture. This is the camera thread.
121 // CaptureStarted() will access it under a lock.
122 if (!CaptureStarted())
123 return 0;
124
125 VideoRotation current_rotation =
126 (degrees <= 45 || degrees > 315) ? kVideoRotation_0 :
127 (degrees > 45 && degrees <= 135) ? kVideoRotation_90 :
128 (degrees > 135 && degrees <= 225) ? kVideoRotation_180 :
129 (degrees > 225 && degrees <= 315) ? kVideoRotation_270 :
130 kVideoRotation_0; // Impossible.
131 if (_rotation != current_rotation) {
132 LOG(LS_INFO) << "New camera rotation: " << degrees;
133 _rotation = current_rotation;
134 int32_t status = VideoCaptureImpl::SetCaptureRotation(_rotation);
135 if (status != 0)
136 return status;
137 }
138 return IncomingFrame(
139 videoFrame, videoFrameLength, _captureCapability, captureTime);
140 }
141
VideoCaptureAndroid()142 VideoCaptureAndroid::VideoCaptureAndroid()
143 : VideoCaptureImpl(),
144 _deviceInfo(),
145 _jCapturer(NULL),
146 _captureStarted(false) {
147 }
148
Init(const char * deviceUniqueIdUTF8)149 int32_t VideoCaptureAndroid::Init(const char* deviceUniqueIdUTF8) {
150 const int nameLength = strlen(deviceUniqueIdUTF8);
151 if (nameLength >= kVideoCaptureUniqueNameLength)
152 return -1;
153
154 // Store the device name
155 LOG(LS_INFO) << "VideoCaptureAndroid::Init: " << deviceUniqueIdUTF8;
156 size_t camera_id = 0;
157 if (!_deviceInfo.FindCameraIndex(deviceUniqueIdUTF8, &camera_id))
158 return -1;
159 _deviceUniqueId = new char[nameLength + 1];
160 memcpy(_deviceUniqueId, deviceUniqueIdUTF8, nameLength + 1);
161
162 AttachThreadScoped ats(g_jvm_capture);
163 JNIEnv* env = ats.env();
164 jmethodID ctor = env->GetMethodID(g_java_capturer_class, "<init>", "(IJ)V");
165 assert(ctor);
166 jlong j_this = reinterpret_cast<intptr_t>(this);
167 _jCapturer = env->NewGlobalRef(
168 env->NewObject(g_java_capturer_class, ctor, camera_id, j_this));
169 assert(_jCapturer);
170 _rotation = kVideoRotation_0;
171 return 0;
172 }
173
~VideoCaptureAndroid()174 VideoCaptureAndroid::~VideoCaptureAndroid() {
175 // Ensure Java camera is released even if our caller didn't explicitly Stop.
176 if (_captureStarted)
177 StopCapture();
178 AttachThreadScoped ats(g_jvm_capture);
179 JNIEnv* env = ats.env();
180
181 // Avoid callbacks into ourself even if the above stopCapture fails.
182 jmethodID j_unlink =
183 env->GetMethodID(g_java_capturer_class, "unlinkCapturer", "()V");
184 env->CallVoidMethod(_jCapturer, j_unlink);
185
186 env->DeleteGlobalRef(_jCapturer);
187 }
188
StartCapture(const VideoCaptureCapability & capability)189 int32_t VideoCaptureAndroid::StartCapture(
190 const VideoCaptureCapability& capability) {
191 _apiCs.Enter();
192 AttachThreadScoped ats(g_jvm_capture);
193 JNIEnv* env = ats.env();
194
195 if (_deviceInfo.GetBestMatchedCapability(
196 _deviceUniqueId, capability, _captureCapability) < 0) {
197 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
198 "%s: GetBestMatchedCapability failed: %dx%d",
199 __FUNCTION__, capability.width, capability.height);
200 // Manual exit of critical section
201 _apiCs.Leave();
202 return -1;
203 }
204
205 _captureDelay = _captureCapability.expectedCaptureDelay;
206
207 int width = _captureCapability.width;
208 int height = _captureCapability.height;
209 int min_mfps = 0;
210 int max_mfps = 0;
211 _deviceInfo.GetMFpsRange(_deviceUniqueId, _captureCapability.maxFPS,
212 &min_mfps, &max_mfps);
213
214 // Exit critical section to avoid blocking camera thread inside
215 // onIncomingFrame() call.
216 _apiCs.Leave();
217
218 jmethodID j_start =
219 env->GetMethodID(g_java_capturer_class, "startCapture", "(IIII)Z");
220 assert(j_start);
221 bool started = env->CallBooleanMethod(_jCapturer, j_start,
222 width, height,
223 min_mfps, max_mfps);
224 if (started) {
225 CriticalSectionScoped cs(&_apiCs);
226 _requestedCapability = capability;
227 _captureStarted = true;
228 }
229 return started ? 0 : -1;
230 }
231
StopCapture()232 int32_t VideoCaptureAndroid::StopCapture() {
233 _apiCs.Enter();
234 AttachThreadScoped ats(g_jvm_capture);
235 JNIEnv* env = ats.env();
236
237 memset(&_requestedCapability, 0, sizeof(_requestedCapability));
238 memset(&_captureCapability, 0, sizeof(_captureCapability));
239 _captureStarted = false;
240 // Exit critical section to avoid blocking camera thread inside
241 // onIncomingFrame() call.
242 _apiCs.Leave();
243
244 // try to stop the capturer.
245 jmethodID j_stop =
246 env->GetMethodID(g_java_capturer_class, "stopCapture", "()Z");
247 return env->CallBooleanMethod(_jCapturer, j_stop) ? 0 : -1;
248 }
249
CaptureStarted()250 bool VideoCaptureAndroid::CaptureStarted() {
251 CriticalSectionScoped cs(&_apiCs);
252 return _captureStarted;
253 }
254
CaptureSettings(VideoCaptureCapability & settings)255 int32_t VideoCaptureAndroid::CaptureSettings(
256 VideoCaptureCapability& settings) {
257 CriticalSectionScoped cs(&_apiCs);
258 settings = _requestedCapability;
259 return 0;
260 }
261
SetCaptureRotation(VideoRotation rotation)262 int32_t VideoCaptureAndroid::SetCaptureRotation(VideoRotation rotation) {
263 // Our only caller is ProvideCameraFrame, which is called
264 // from a synchronized Java method. If we'd take this lock,
265 // any call going from C++ to Java will deadlock.
266 // CriticalSectionScoped cs(&_apiCs);
267 VideoCaptureImpl::SetCaptureRotation(rotation);
268 return 0;
269 }
270
271 } // namespace videocapturemodule
272 } // namespace webrtc
273