1 /*
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include "video_capture_android.h"
12
13 #include "device_info_android.h"
14 #include "modules/utility/include/helpers_android.h"
15 #include "rtc_base/criticalsection.h"
16 #include "rtc_base/logging.h"
17 #include "rtc_base/refcountedobject.h"
18
19 #include "AndroidBridge.h"
20
21 static JavaVM* g_jvm_capture = NULL;
22 static jclass g_java_capturer_class = NULL; // VideoCaptureAndroid.class.
23 static jobject g_context = NULL; // Owned android.content.Context.
24
25 namespace webrtc {
26
JniCommon_allocateNativeByteBuffer(JNIEnv * env,jclass,jint size)27 jobject JniCommon_allocateNativeByteBuffer(JNIEnv* env, jclass, jint size) {
28 void* new_data = ::operator new(size);
29 jobject byte_buffer = env->NewDirectByteBuffer(new_data, size);
30 return byte_buffer;
31 }
32
JniCommon_freeNativeByteBuffer(JNIEnv * env,jclass,jobject byte_buffer)33 void JniCommon_freeNativeByteBuffer(JNIEnv* env, jclass, jobject byte_buffer) {
34 void* data = env->GetDirectBufferAddress(byte_buffer);
35 ::operator delete(data);
36 }
37
38 // Called by Java to get the global application context.
GetContext(JNIEnv * env,jclass)39 jobject JNICALL GetContext(JNIEnv* env, jclass) {
40 assert(g_context);
41 return g_context;
42 }
43
44 // Called by Java when the camera has a new frame to deliver.
ProvideCameraFrame(JNIEnv * env,jobject,jint width,jint height,jobject javaDataY,jint strideY,jobject javaDataU,jint strideU,jobject javaDataV,jint strideV,jint rotation,jlong timeStamp,jlong context)45 void JNICALL ProvideCameraFrame(JNIEnv* env, jobject, jint width, jint height,
46 jobject javaDataY, jint strideY,
47 jobject javaDataU, jint strideU,
48 jobject javaDataV, jint strideV, jint rotation,
49 jlong timeStamp, jlong context) {
50 if (!context) {
51 return;
52 }
53
54 webrtc::videocapturemodule::VideoCaptureAndroid* captureModule =
55 reinterpret_cast<webrtc::videocapturemodule::VideoCaptureAndroid*>(
56 context);
57 uint8_t* dataY =
58 reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(javaDataY));
59 uint8_t* dataU =
60 reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(javaDataU));
61 uint8_t* dataV =
62 reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(javaDataV));
63
64 rtc::scoped_refptr<I420Buffer> i420Buffer = I420Buffer::Copy(
65 width, height, dataY, strideY, dataU, strideU, dataV, strideV);
66
67 captureModule->OnIncomingFrame(i420Buffer, rotation, timeStamp);
68 }
69
SetCaptureAndroidVM(JavaVM * javaVM)70 int32_t SetCaptureAndroidVM(JavaVM* javaVM) {
71 if (g_java_capturer_class) {
72 return 0;
73 }
74
75 if (javaVM) {
76 assert(!g_jvm_capture);
77 g_jvm_capture = javaVM;
78 AttachThreadScoped ats(g_jvm_capture);
79
80 g_context = mozilla::AndroidBridge::Bridge()->GetGlobalContextRef();
81
82 videocapturemodule::DeviceInfoAndroid::Initialize(g_jvm_capture);
83
84 {
85 jclass clsRef = mozilla::jni::GetClassRef(
86 ats.env(), "org/webrtc/videoengine/VideoCaptureAndroid");
87 g_java_capturer_class =
88 static_cast<jclass>(ats.env()->NewGlobalRef(clsRef));
89 ats.env()->DeleteLocalRef(clsRef);
90 assert(g_java_capturer_class);
91
92 JNINativeMethod native_methods[] = {
93 {"GetContext", "()Landroid/content/Context;",
94 reinterpret_cast<void*>(&GetContext)},
95 {"ProvideCameraFrame",
96 "(IILjava/nio/ByteBuffer;ILjava/nio/ByteBuffer;ILjava/nio/"
97 "ByteBuffer;IIJJ)V",
98 reinterpret_cast<void*>(&ProvideCameraFrame)}};
99 if (ats.env()->RegisterNatives(g_java_capturer_class, native_methods,
100 2) != 0)
101 assert(false);
102 }
103
104 {
105 jclass clsRef =
106 mozilla::jni::GetClassRef(ats.env(), "org/webrtc/JniCommon");
107
108 JNINativeMethod native_methods[] = {
109 {"nativeAllocateByteBuffer", "(I)Ljava/nio/ByteBuffer;",
110 reinterpret_cast<void*>(&JniCommon_allocateNativeByteBuffer)},
111 {"nativeFreeByteBuffer", "(Ljava/nio/ByteBuffer;)V",
112 reinterpret_cast<void*>(&JniCommon_freeNativeByteBuffer)}};
113 if (ats.env()->RegisterNatives(clsRef, native_methods, 2) != 0)
114 assert(false);
115 }
116 } else {
117 if (g_jvm_capture) {
118 AttachThreadScoped ats(g_jvm_capture);
119 ats.env()->UnregisterNatives(g_java_capturer_class);
120 ats.env()->DeleteGlobalRef(g_java_capturer_class);
121 g_java_capturer_class = NULL;
122 g_context = NULL;
123 videocapturemodule::DeviceInfoAndroid::DeInitialize();
124 g_jvm_capture = NULL;
125 }
126 }
127
128 return 0;
129 }
130
131 namespace videocapturemodule {
132
Create(const char * deviceUniqueIdUTF8)133 rtc::scoped_refptr<VideoCaptureModule> VideoCaptureImpl::Create(
134 const char* deviceUniqueIdUTF8) {
135 rtc::scoped_refptr<VideoCaptureAndroid> implementation(
136 new rtc::RefCountedObject<VideoCaptureAndroid>());
137 if (implementation->Init(deviceUniqueIdUTF8) != 0) {
138 implementation = nullptr;
139 }
140 return implementation;
141 }
142
OnIncomingFrame(rtc::scoped_refptr<I420Buffer> buffer,int32_t degrees,int64_t captureTime)143 void VideoCaptureAndroid::OnIncomingFrame(rtc::scoped_refptr<I420Buffer> buffer,
144 int32_t degrees,
145 int64_t captureTime) {
146 rtc::CritScope cs(&_apiCs);
147
148 VideoRotation rotation =
149 (degrees <= 45 || degrees > 315) ? kVideoRotation_0
150 : (degrees > 45 && degrees <= 135) ? kVideoRotation_90
151 : (degrees > 135 && degrees <= 225) ? kVideoRotation_180
152 : (degrees > 225 && degrees <= 315) ? kVideoRotation_270
153 : kVideoRotation_0; // Impossible.
154
155 // Historically, we have ignored captureTime. Why?
156 VideoFrame captureFrame(I420Buffer::Rotate(*buffer, rotation), 0,
157 rtc::TimeMillis(), rotation);
158
159 DeliverCapturedFrame(captureFrame);
160 }
161
VideoCaptureAndroid()162 VideoCaptureAndroid::VideoCaptureAndroid()
163 : VideoCaptureImpl(),
164 _deviceInfo(),
165 _jCapturer(NULL),
166 _captureStarted(false) {}
167
Init(const char * deviceUniqueIdUTF8)168 int32_t VideoCaptureAndroid::Init(const char* deviceUniqueIdUTF8) {
169 const int nameLength = strlen(deviceUniqueIdUTF8);
170 if (nameLength >= kVideoCaptureUniqueNameSize) return -1;
171
172 // Store the device name
173 RTC_LOG(LS_INFO) << "VideoCaptureAndroid::Init: " << deviceUniqueIdUTF8;
174 _deviceUniqueId = new char[nameLength + 1];
175 memcpy(_deviceUniqueId, deviceUniqueIdUTF8, nameLength + 1);
176
177 AttachThreadScoped ats(g_jvm_capture);
178 JNIEnv* env = ats.env();
179 jmethodID ctor = env->GetMethodID(g_java_capturer_class, "<init>",
180 "(Ljava/lang/String;)V");
181 assert(ctor);
182 jstring j_deviceName = env->NewStringUTF(_deviceUniqueId);
183 _jCapturer = env->NewGlobalRef(
184 env->NewObject(g_java_capturer_class, ctor, j_deviceName));
185 assert(_jCapturer);
186 return 0;
187 }
188
~VideoCaptureAndroid()189 VideoCaptureAndroid::~VideoCaptureAndroid() {
190 // Ensure Java camera is released even if our caller didn't explicitly Stop.
191 if (_captureStarted) StopCapture();
192 AttachThreadScoped ats(g_jvm_capture);
193 JNIEnv* env = ats.env();
194 env->DeleteGlobalRef(_jCapturer);
195 }
196
StartCapture(const VideoCaptureCapability & capability)197 int32_t VideoCaptureAndroid::StartCapture(
198 const VideoCaptureCapability& capability) {
199 _apiCs.Enter();
200 AttachThreadScoped ats(g_jvm_capture);
201 JNIEnv* env = ats.env();
202
203 if (_deviceInfo.GetBestMatchedCapability(_deviceUniqueId, capability,
204 _captureCapability) < 0) {
205 RTC_LOG(LS_ERROR) << __FUNCTION__ << "s: GetBestMatchedCapability failed: "
206 << capability.width << "x" << capability.height;
207 // Manual exit of critical section
208 _apiCs.Leave();
209 return -1;
210 }
211
212 int width = _captureCapability.width;
213 int height = _captureCapability.height;
214 int min_mfps = 0;
215 int max_mfps = 0;
216 _deviceInfo.GetMFpsRange(_deviceUniqueId, _captureCapability.maxFPS,
217 &min_mfps, &max_mfps);
218
219 // Exit critical section to avoid blocking camera thread inside
220 // onIncomingFrame() call.
221 _apiCs.Leave();
222
223 jmethodID j_start =
224 env->GetMethodID(g_java_capturer_class, "startCapture", "(IIIIJ)Z");
225 assert(j_start);
226 jlong j_this = reinterpret_cast<intptr_t>(this);
227 bool started = env->CallBooleanMethod(_jCapturer, j_start, width, height,
228 min_mfps, max_mfps, j_this);
229 if (started) {
230 rtc::CritScope cs(&_apiCs);
231 _requestedCapability = capability;
232 _captureStarted = true;
233 }
234 return started ? 0 : -1;
235 }
236
StopCapture()237 int32_t VideoCaptureAndroid::StopCapture() {
238 _apiCs.Enter();
239 AttachThreadScoped ats(g_jvm_capture);
240 JNIEnv* env = ats.env();
241
242 memset(&_requestedCapability, 0, sizeof(_requestedCapability));
243 memset(&_captureCapability, 0, sizeof(_captureCapability));
244 _captureStarted = false;
245 // Exit critical section to avoid blocking camera thread inside
246 // onIncomingFrame() call.
247 _apiCs.Leave();
248
249 // try to stop the capturer.
250 jmethodID j_stop =
251 env->GetMethodID(g_java_capturer_class, "stopCapture", "()Z");
252 return env->CallBooleanMethod(_jCapturer, j_stop) ? 0 : -1;
253 }
254
CaptureStarted()255 bool VideoCaptureAndroid::CaptureStarted() {
256 rtc::CritScope cs(&_apiCs);
257 return _captureStarted;
258 }
259
CaptureSettings(VideoCaptureCapability & settings)260 int32_t VideoCaptureAndroid::CaptureSettings(VideoCaptureCapability& settings) {
261 rtc::CritScope cs(&_apiCs);
262 settings = _requestedCapability;
263 return 0;
264 }
265
266 } // namespace videocapturemodule
267 } // namespace webrtc
268