1 /*
2  *  Copyright 2016 The WebRTC project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10 
11 package org.webrtc;
12 
13 import android.content.Context;
14 import android.os.Handler;
15 import android.os.SystemClock;
16 import java.io.IOException;
17 import java.nio.ByteBuffer;
18 import java.util.List;
19 import java.util.concurrent.TimeUnit;
20 import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
21 
22 @SuppressWarnings("deprecation")
23 class Camera1Session implements CameraSession {
24   private static final String TAG = "Camera1Session";
25   private static final int NUMBER_OF_CAPTURE_BUFFERS = 3;
26 
27   private static final Histogram camera1StartTimeMsHistogram =
28       Histogram.createCounts("WebRTC.Android.Camera1.StartTimeMs", 1, 10000, 50);
29   private static final Histogram camera1StopTimeMsHistogram =
30       Histogram.createCounts("WebRTC.Android.Camera1.StopTimeMs", 1, 10000, 50);
31   private static final Histogram camera1ResolutionHistogram = Histogram.createEnumeration(
32       "WebRTC.Android.Camera1.Resolution", CameraEnumerationAndroid.COMMON_RESOLUTIONS.size());
33 
34   private static enum SessionState { RUNNING, STOPPED }
35 
36   private final Handler cameraThreadHandler;
37   private final Events events;
38   private final boolean captureToTexture;
39   private final Context applicationContext;
40   private final SurfaceTextureHelper surfaceTextureHelper;
41   private final int cameraId;
42   private final android.hardware.Camera camera;
43   private final android.hardware.Camera.CameraInfo info;
44   private final CaptureFormat captureFormat;
45   // Used only for stats. Only used on the camera thread.
46   private final long constructionTimeNs; // Construction time of this class.
47 
48   private SessionState state;
49   private boolean firstFrameReported;
50 
51   // TODO(titovartem) make correct fix during webrtc:9175
52   @SuppressWarnings("ByteBufferBackingArray")
create(final CreateSessionCallback callback, final Events events, final boolean captureToTexture, final Context applicationContext, final SurfaceTextureHelper surfaceTextureHelper, final int cameraId, final int width, final int height, final int framerate)53   public static void create(final CreateSessionCallback callback, final Events events,
54       final boolean captureToTexture, final Context applicationContext,
55       final SurfaceTextureHelper surfaceTextureHelper, final int cameraId, final int width,
56       final int height, final int framerate) {
57     final long constructionTimeNs = System.nanoTime();
58     Logging.d(TAG, "Open camera " + cameraId);
59     events.onCameraOpening();
60 
61     final android.hardware.Camera camera;
62     try {
63       camera = android.hardware.Camera.open(cameraId);
64     } catch (RuntimeException e) {
65       callback.onFailure(FailureType.ERROR, e.getMessage());
66       return;
67     }
68 
69     if (camera == null) {
70       callback.onFailure(FailureType.ERROR,
71           "android.hardware.Camera.open returned null for camera id = " + cameraId);
72       return;
73     }
74 
75     try {
76       camera.setPreviewTexture(surfaceTextureHelper.getSurfaceTexture());
77     } catch (IOException | RuntimeException e) {
78       camera.release();
79       callback.onFailure(FailureType.ERROR, e.getMessage());
80       return;
81     }
82 
83     final android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
84     android.hardware.Camera.getCameraInfo(cameraId, info);
85 
86     final CaptureFormat captureFormat;
87     try {
88       final android.hardware.Camera.Parameters parameters = camera.getParameters();
89       captureFormat = findClosestCaptureFormat(parameters, width, height, framerate);
90       final Size pictureSize = findClosestPictureSize(parameters, width, height);
91       updateCameraParameters(camera, parameters, captureFormat, pictureSize, captureToTexture);
92     } catch (RuntimeException e) {
93       camera.release();
94       callback.onFailure(FailureType.ERROR, e.getMessage());
95       return;
96     }
97 
98     if (!captureToTexture) {
99       final int frameSize = captureFormat.frameSize();
100       for (int i = 0; i < NUMBER_OF_CAPTURE_BUFFERS; ++i) {
101         final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize);
102         camera.addCallbackBuffer(buffer.array());
103       }
104     }
105 
106     // Calculate orientation manually and send it as CVO insted.
107     camera.setDisplayOrientation(0 /* degrees */);
108 
109     callback.onDone(new Camera1Session(events, captureToTexture, applicationContext,
110         surfaceTextureHelper, cameraId, camera, info, captureFormat, constructionTimeNs));
111   }
112 
updateCameraParameters(android.hardware.Camera camera, android.hardware.Camera.Parameters parameters, CaptureFormat captureFormat, Size pictureSize, boolean captureToTexture)113   private static void updateCameraParameters(android.hardware.Camera camera,
114       android.hardware.Camera.Parameters parameters, CaptureFormat captureFormat, Size pictureSize,
115       boolean captureToTexture) {
116     final List<String> focusModes = parameters.getSupportedFocusModes();
117 
118     parameters.setPreviewFpsRange(captureFormat.framerate.min, captureFormat.framerate.max);
119     parameters.setPreviewSize(captureFormat.width, captureFormat.height);
120     parameters.setPictureSize(pictureSize.width, pictureSize.height);
121     if (!captureToTexture) {
122       parameters.setPreviewFormat(captureFormat.imageFormat);
123     }
124 
125     if (parameters.isVideoStabilizationSupported()) {
126       parameters.setVideoStabilization(true);
127     }
128     if (focusModes.contains(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
129       parameters.setFocusMode(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
130     }
131     camera.setParameters(parameters);
132   }
133 
findClosestCaptureFormat( android.hardware.Camera.Parameters parameters, int width, int height, int framerate)134   private static CaptureFormat findClosestCaptureFormat(
135       android.hardware.Camera.Parameters parameters, int width, int height, int framerate) {
136     // Find closest supported format for |width| x |height| @ |framerate|.
137     final List<CaptureFormat.FramerateRange> supportedFramerates =
138         Camera1Enumerator.convertFramerates(parameters.getSupportedPreviewFpsRange());
139     Logging.d(TAG, "Available fps ranges: " + supportedFramerates);
140 
141     final CaptureFormat.FramerateRange fpsRange =
142         CameraEnumerationAndroid.getClosestSupportedFramerateRange(supportedFramerates, framerate);
143 
144     final Size previewSize = CameraEnumerationAndroid.getClosestSupportedSize(
145         Camera1Enumerator.convertSizes(parameters.getSupportedPreviewSizes()), width, height);
146     CameraEnumerationAndroid.reportCameraResolution(camera1ResolutionHistogram, previewSize);
147 
148     return new CaptureFormat(previewSize.width, previewSize.height, fpsRange);
149   }
150 
findClosestPictureSize( android.hardware.Camera.Parameters parameters, int width, int height)151   private static Size findClosestPictureSize(
152       android.hardware.Camera.Parameters parameters, int width, int height) {
153     return CameraEnumerationAndroid.getClosestSupportedSize(
154         Camera1Enumerator.convertSizes(parameters.getSupportedPictureSizes()), width, height);
155   }
156 
Camera1Session(Events events, boolean captureToTexture, Context applicationContext, SurfaceTextureHelper surfaceTextureHelper, int cameraId, android.hardware.Camera camera, android.hardware.Camera.CameraInfo info, CaptureFormat captureFormat, long constructionTimeNs)157   private Camera1Session(Events events, boolean captureToTexture, Context applicationContext,
158       SurfaceTextureHelper surfaceTextureHelper, int cameraId, android.hardware.Camera camera,
159       android.hardware.Camera.CameraInfo info, CaptureFormat captureFormat,
160       long constructionTimeNs) {
161     Logging.d(TAG, "Create new camera1 session on camera " + cameraId);
162 
163     this.cameraThreadHandler = new Handler();
164     this.events = events;
165     this.captureToTexture = captureToTexture;
166     this.applicationContext = applicationContext;
167     this.surfaceTextureHelper = surfaceTextureHelper;
168     this.cameraId = cameraId;
169     this.camera = camera;
170     this.info = info;
171     this.captureFormat = captureFormat;
172     this.constructionTimeNs = constructionTimeNs;
173 
174     surfaceTextureHelper.setTextureSize(captureFormat.width, captureFormat.height);
175 
176     startCapturing();
177   }
178 
179   @Override
stop()180   public void stop() {
181     Logging.d(TAG, "Stop camera1 session on camera " + cameraId);
182     checkIsOnCameraThread();
183     if (state != SessionState.STOPPED) {
184       final long stopStartTime = System.nanoTime();
185       stopInternal();
186       final int stopTimeMs = (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
187       camera1StopTimeMsHistogram.addSample(stopTimeMs);
188     }
189   }
190 
startCapturing()191   private void startCapturing() {
192     Logging.d(TAG, "Start capturing");
193     checkIsOnCameraThread();
194 
195     state = SessionState.RUNNING;
196 
197     camera.setErrorCallback(new android.hardware.Camera.ErrorCallback() {
198       @Override
199       public void onError(int error, android.hardware.Camera camera) {
200         String errorMessage;
201         if (error == android.hardware.Camera.CAMERA_ERROR_SERVER_DIED) {
202           errorMessage = "Camera server died!";
203         } else {
204           errorMessage = "Camera error: " + error;
205         }
206         Logging.e(TAG, errorMessage);
207         stopInternal();
208         if (error == android.hardware.Camera.CAMERA_ERROR_EVICTED) {
209           events.onCameraDisconnected(Camera1Session.this);
210         } else {
211           events.onCameraError(Camera1Session.this, errorMessage);
212         }
213       }
214     });
215 
216     if (captureToTexture) {
217       listenForTextureFrames();
218     } else {
219       listenForBytebufferFrames();
220     }
221     try {
222       camera.startPreview();
223     } catch (RuntimeException e) {
224       stopInternal();
225       events.onCameraError(this, e.getMessage());
226     }
227   }
228 
stopInternal()229   private void stopInternal() {
230     Logging.d(TAG, "Stop internal");
231     checkIsOnCameraThread();
232     if (state == SessionState.STOPPED) {
233       Logging.d(TAG, "Camera is already stopped");
234       return;
235     }
236 
237     state = SessionState.STOPPED;
238     surfaceTextureHelper.stopListening();
239     // Note: stopPreview or other driver code might deadlock. Deadlock in
240     // android.hardware.Camera._stopPreview(Native Method) has been observed on
241     // Nexus 5 (hammerhead), OS version LMY48I.
242     camera.stopPreview();
243     camera.release();
244     events.onCameraClosed(this);
245     Logging.d(TAG, "Stop done");
246   }
247 
listenForTextureFrames()248   private void listenForTextureFrames() {
249     surfaceTextureHelper.startListening((VideoFrame frame) -> {
250       checkIsOnCameraThread();
251 
252       if (state != SessionState.RUNNING) {
253         Logging.d(TAG, "Texture frame captured but camera is no longer running.");
254         return;
255       }
256 
257       if (!firstFrameReported) {
258         final int startTimeMs =
259             (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
260         camera1StartTimeMsHistogram.addSample(startTimeMs);
261         firstFrameReported = true;
262       }
263 
264       // Undo the mirror that the OS "helps" us with.
265       // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
266       final VideoFrame modifiedFrame = new VideoFrame(
267           CameraSession.createTextureBufferWithModifiedTransformMatrix(
268               (TextureBufferImpl) frame.getBuffer(),
269               /* mirror= */ info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT,
270               /* rotation= */ 0),
271           /* rotation= */ getFrameOrientation(), frame.getTimestampNs());
272       events.onFrameCaptured(Camera1Session.this, modifiedFrame);
273       modifiedFrame.release();
274     });
275   }
276 
listenForBytebufferFrames()277   private void listenForBytebufferFrames() {
278     camera.setPreviewCallbackWithBuffer(new android.hardware.Camera.PreviewCallback() {
279       @Override
280       public void onPreviewFrame(final byte[] data, android.hardware.Camera callbackCamera) {
281         checkIsOnCameraThread();
282 
283         if (callbackCamera != camera) {
284           Logging.e(TAG, "Callback from a different camera. This should never happen.");
285           return;
286         }
287 
288         if (state != SessionState.RUNNING) {
289           Logging.d(TAG, "Bytebuffer frame captured but camera is no longer running.");
290           return;
291         }
292 
293         final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
294 
295         if (!firstFrameReported) {
296           final int startTimeMs =
297               (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
298           camera1StartTimeMsHistogram.addSample(startTimeMs);
299           firstFrameReported = true;
300         }
301 
302         VideoFrame.Buffer frameBuffer = new NV21Buffer(
303             data, captureFormat.width, captureFormat.height, () -> cameraThreadHandler.post(() -> {
304               if (state == SessionState.RUNNING) {
305                 camera.addCallbackBuffer(data);
306               }
307             }));
308         final VideoFrame frame = new VideoFrame(frameBuffer, getFrameOrientation(), captureTimeNs);
309         events.onFrameCaptured(Camera1Session.this, frame);
310         frame.release();
311       }
312     });
313   }
314 
getFrameOrientation()315   private int getFrameOrientation() {
316     int rotation = CameraSession.getDeviceOrientation(applicationContext);
317     if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK) {
318       rotation = 360 - rotation;
319     }
320     return (info.orientation + rotation) % 360;
321   }
322 
checkIsOnCameraThread()323   private void checkIsOnCameraThread() {
324     if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
325       throw new IllegalStateException("Wrong thread");
326     }
327   }
328 }
329