1 /*
2  *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10 
11 package org.webrtc;
12 
13 import android.annotation.TargetApi;
14 import android.graphics.SurfaceTexture;
15 import android.opengl.GLES11Ext;
16 import android.opengl.GLES20;
17 import android.os.Build;
18 import android.os.Handler;
19 import android.os.HandlerThread;
20 import androidx.annotation.Nullable;
21 import java.util.concurrent.Callable;
22 import org.webrtc.EglBase.Context;
23 import org.webrtc.TextureBufferImpl.RefCountMonitor;
24 import org.webrtc.VideoFrame.TextureBuffer;
25 
26 /**
27  * Helper class for using a SurfaceTexture to create WebRTC VideoFrames. In order to create WebRTC
28  * VideoFrames, render onto the SurfaceTexture. The frames will be delivered to the listener. Only
29  * one texture frame can be in flight at once, so the frame must be released in order to receive a
30  * new frame. Call stopListening() to stop receiveing new frames. Call dispose to release all
31  * resources once the texture frame is released.
32  */
33 public class SurfaceTextureHelper {
34   /**
35    * Interface for monitoring texture buffers created from this SurfaceTexture. Since only one
36    * texture buffer can exist at a time, this can be used to monitor for stuck frames.
37    */
38   public interface FrameRefMonitor {
39     /** A new frame was created. New frames start with ref count of 1. */
onNewBuffer(TextureBuffer textureBuffer)40     void onNewBuffer(TextureBuffer textureBuffer);
41     /** Ref count of the frame was incremented by the calling thread. */
onRetainBuffer(TextureBuffer textureBuffer)42     void onRetainBuffer(TextureBuffer textureBuffer);
43     /** Ref count of the frame was decremented by the calling thread. */
onReleaseBuffer(TextureBuffer textureBuffer)44     void onReleaseBuffer(TextureBuffer textureBuffer);
45     /** Frame was destroyed (ref count reached 0). */
onDestroyBuffer(TextureBuffer textureBuffer)46     void onDestroyBuffer(TextureBuffer textureBuffer);
47   }
48 
49   private static final String TAG = "SurfaceTextureHelper";
50   /**
51    * Construct a new SurfaceTextureHelper sharing OpenGL resources with |sharedContext|. A dedicated
52    * thread and handler is created for handling the SurfaceTexture. May return null if EGL fails to
53    * initialize a pixel buffer surface and make it current. If alignTimestamps is true, the frame
54    * timestamps will be aligned to rtc::TimeNanos(). If frame timestamps are aligned to
55    * rtc::TimeNanos() there is no need for aligning timestamps again in
56    * PeerConnectionFactory.createVideoSource(). This makes the timestamps more accurate and
57    * closer to actual creation time.
58    */
create(final String threadName, final EglBase.Context sharedContext, boolean alignTimestamps, final YuvConverter yuvConverter, FrameRefMonitor frameRefMonitor)59   public static SurfaceTextureHelper create(final String threadName,
60       final EglBase.Context sharedContext, boolean alignTimestamps, final YuvConverter yuvConverter,
61       FrameRefMonitor frameRefMonitor) {
62     final HandlerThread thread = new HandlerThread(threadName);
63     thread.start();
64     final Handler handler = new Handler(thread.getLooper());
65 
66     // The onFrameAvailable() callback will be executed on the SurfaceTexture ctor thread. See:
67     // http://grepcode.com/file/repository.grepcode.com/java/ext/com.google.android/android/5.1.1_r1/android/graphics/SurfaceTexture.java#195.
68     // Therefore, in order to control the callback thread on API lvl < 21, the SurfaceTextureHelper
69     // is constructed on the |handler| thread.
70     return ThreadUtils.invokeAtFrontUninterruptibly(handler, new Callable<SurfaceTextureHelper>() {
71       @Nullable
72       @Override
73       public SurfaceTextureHelper call() {
74         try {
75           return new SurfaceTextureHelper(
76               sharedContext, handler, alignTimestamps, yuvConverter, frameRefMonitor);
77         } catch (RuntimeException e) {
78           Logging.e(TAG, threadName + " create failure", e);
79           return null;
80         }
81       }
82     });
83   }
84 
85   /**
86    * Same as above with alignTimestamps set to false and yuvConverter set to new YuvConverter.
87    *
88    * @see #create(String, EglBase.Context, boolean, YuvConverter, FrameRefMonitor)
89    */
90   public static SurfaceTextureHelper create(
91       final String threadName, final EglBase.Context sharedContext) {
92     return create(threadName, sharedContext, /* alignTimestamps= */ false, new YuvConverter(),
93         /*frameRefMonitor=*/null);
94   }
95 
96   /**
97    * Same as above with yuvConverter set to new YuvConverter.
98    *
99    * @see #create(String, EglBase.Context, boolean, YuvConverter, FrameRefMonitor)
100    */
101   public static SurfaceTextureHelper create(
102       final String threadName, final EglBase.Context sharedContext, boolean alignTimestamps) {
103     return create(
104         threadName, sharedContext, alignTimestamps, new YuvConverter(), /*frameRefMonitor=*/null);
105   }
106 
107   /**
108    * Create a SurfaceTextureHelper without frame ref monitor.
109    *
110    * @see #create(String, EglBase.Context, boolean, YuvConverter, FrameRefMonitor)
111    */
112   public static SurfaceTextureHelper create(final String threadName,
113       final EglBase.Context sharedContext, boolean alignTimestamps, YuvConverter yuvConverter) {
114     return create(
115         threadName, sharedContext, alignTimestamps, yuvConverter, /*frameRefMonitor=*/null);
116   }
117 
118   private final RefCountMonitor textureRefCountMonitor = new RefCountMonitor() {
119     @Override
120     public void onRetain(TextureBufferImpl textureBuffer) {
121       if (frameRefMonitor != null) {
122         frameRefMonitor.onRetainBuffer(textureBuffer);
123       }
124     }
125 
126     @Override
127     public void onRelease(TextureBufferImpl textureBuffer) {
128       if (frameRefMonitor != null) {
129         frameRefMonitor.onReleaseBuffer(textureBuffer);
130       }
131     }
132 
133     @Override
134     public void onDestroy(TextureBufferImpl textureBuffer) {
135       returnTextureFrame();
136       if (frameRefMonitor != null) {
137         frameRefMonitor.onDestroyBuffer(textureBuffer);
138       }
139     }
140   };
141 
142   private final Handler handler;
143   private final EglBase eglBase;
144   private final SurfaceTexture surfaceTexture;
145   private final int oesTextureId;
146   private final YuvConverter yuvConverter;
147   @Nullable private final TimestampAligner timestampAligner;
148   private final FrameRefMonitor frameRefMonitor;
149 
150   // These variables are only accessed from the |handler| thread.
151   @Nullable private VideoSink listener;
152   // The possible states of this class.
153   private boolean hasPendingTexture;
154   private volatile boolean isTextureInUse;
155   private boolean isQuitting;
156   private int frameRotation;
157   private int textureWidth;
158   private int textureHeight;
159   // |pendingListener| is set in setListener() and the runnable is posted to the handler thread.
160   // setListener() is not allowed to be called again before stopListening(), so this is thread safe.
161   @Nullable private VideoSink pendingListener;
162   final Runnable setListenerRunnable = new Runnable() {
163     @Override
164     public void run() {
165       Logging.d(TAG, "Setting listener to " + pendingListener);
166       listener = pendingListener;
167       pendingListener = null;
168       // May have a pending frame from the previous capture session - drop it.
169       if (hasPendingTexture) {
170         // Calling updateTexImage() is neccessary in order to receive new frames.
171         updateTexImage();
172         hasPendingTexture = false;
173       }
174     }
175   };
176 
177   private SurfaceTextureHelper(Context sharedContext, Handler handler, boolean alignTimestamps,
178       YuvConverter yuvConverter, FrameRefMonitor frameRefMonitor) {
179     if (handler.getLooper().getThread() != Thread.currentThread()) {
180       throw new IllegalStateException("SurfaceTextureHelper must be created on the handler thread");
181     }
182     this.handler = handler;
183     this.timestampAligner = alignTimestamps ? new TimestampAligner() : null;
184     this.yuvConverter = yuvConverter;
185     this.frameRefMonitor = frameRefMonitor;
186 
187     eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_BUFFER);
188     try {
189       // Both these statements have been observed to fail on rare occasions, see BUG=webrtc:5682.
190       eglBase.createDummyPbufferSurface();
191       eglBase.makeCurrent();
192     } catch (RuntimeException e) {
193       // Clean up before rethrowing the exception.
194       eglBase.release();
195       handler.getLooper().quit();
196       throw e;
197     }
198 
199     oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
200     surfaceTexture = new SurfaceTexture(oesTextureId);
201     setOnFrameAvailableListener(surfaceTexture, (SurfaceTexture st) -> {
202       hasPendingTexture = true;
203       tryDeliverTextureFrame();
204     }, handler);
205   }
206 
207   @TargetApi(21)
208   private static void setOnFrameAvailableListener(SurfaceTexture surfaceTexture,
209       SurfaceTexture.OnFrameAvailableListener listener, Handler handler) {
210     if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
211       surfaceTexture.setOnFrameAvailableListener(listener, handler);
212     } else {
213       // The documentation states that the listener will be called on an arbitrary thread, but in
214       // pratice, it is always the thread on which the SurfaceTexture was constructed. There are
215       // assertions in place in case this ever changes. For API >= 21, we use the new API to
216       // explicitly specify the handler.
217       surfaceTexture.setOnFrameAvailableListener(listener);
218     }
219   }
220 
221   /**
222    * Start to stream textures to the given |listener|. If you need to change listener, you need to
223    * call stopListening() first.
224    */
225   public void startListening(final VideoSink listener) {
226     if (this.listener != null || this.pendingListener != null) {
227       throw new IllegalStateException("SurfaceTextureHelper listener has already been set.");
228     }
229     this.pendingListener = listener;
230     handler.post(setListenerRunnable);
231   }
232 
233   /**
234    * Stop listening. The listener set in startListening() is guaranteded to not receive any more
235    * onFrame() callbacks after this function returns.
236    */
237   public void stopListening() {
238     Logging.d(TAG, "stopListening()");
239     handler.removeCallbacks(setListenerRunnable);
240     ThreadUtils.invokeAtFrontUninterruptibly(handler, () -> {
241       listener = null;
242       pendingListener = null;
243     });
244   }
245 
246   /**
247    * Use this function to set the texture size. Note, do not call setDefaultBufferSize() yourself
248    * since this class needs to be aware of the texture size.
249    */
250   public void setTextureSize(int textureWidth, int textureHeight) {
251     if (textureWidth <= 0) {
252       throw new IllegalArgumentException("Texture width must be positive, but was " + textureWidth);
253     }
254     if (textureHeight <= 0) {
255       throw new IllegalArgumentException(
256           "Texture height must be positive, but was " + textureHeight);
257     }
258     surfaceTexture.setDefaultBufferSize(textureWidth, textureHeight);
259     handler.post(() -> {
260       this.textureWidth = textureWidth;
261       this.textureHeight = textureHeight;
262       tryDeliverTextureFrame();
263     });
264   }
265 
266   /** Set the rotation of the delivered frames. */
267   public void setFrameRotation(int rotation) {
268     handler.post(() -> this.frameRotation = rotation);
269   }
270 
271   /**
272    * Retrieve the underlying SurfaceTexture. The SurfaceTexture should be passed in to a video
273    * producer such as a camera or decoder.
274    */
275   public SurfaceTexture getSurfaceTexture() {
276     return surfaceTexture;
277   }
278 
279   /** Retrieve the handler that calls onFrame(). This handler is valid until dispose() is called. */
280   public Handler getHandler() {
281     return handler;
282   }
283 
284   /**
285    * This function is called when the texture frame is released. Only one texture frame can be in
286    * flight at once, so this function must be called before a new frame is delivered.
287    */
288   private void returnTextureFrame() {
289     handler.post(() -> {
290       isTextureInUse = false;
291       if (isQuitting) {
292         release();
293       } else {
294         tryDeliverTextureFrame();
295       }
296     });
297   }
298 
299   public boolean isTextureInUse() {
300     return isTextureInUse;
301   }
302 
303   /**
304    * Call disconnect() to stop receiving frames. OpenGL resources are released and the handler is
305    * stopped when the texture frame has been released. You are guaranteed to not receive any more
306    * onFrame() after this function returns.
307    */
308   public void dispose() {
309     Logging.d(TAG, "dispose()");
310     ThreadUtils.invokeAtFrontUninterruptibly(handler, () -> {
311       isQuitting = true;
312       if (!isTextureInUse) {
313         release();
314       }
315     });
316   }
317 
318   /**
319    * Posts to the correct thread to convert |textureBuffer| to I420.
320    *
321    * @deprecated Use toI420() instead.
322    */
323   @Deprecated
324   public VideoFrame.I420Buffer textureToYuv(final TextureBuffer textureBuffer) {
325     return textureBuffer.toI420();
326   }
327 
328   private void updateTexImage() {
329     // SurfaceTexture.updateTexImage apparently can compete and deadlock with eglSwapBuffers,
330     // as observed on Nexus 5. Therefore, synchronize it with the EGL functions.
331     // See https://bugs.chromium.org/p/webrtc/issues/detail?id=5702 for more info.
332     synchronized (EglBase.lock) {
333       surfaceTexture.updateTexImage();
334     }
335   }
336 
337   private void tryDeliverTextureFrame() {
338     if (handler.getLooper().getThread() != Thread.currentThread()) {
339       throw new IllegalStateException("Wrong thread.");
340     }
341     if (isQuitting || !hasPendingTexture || isTextureInUse || listener == null) {
342       return;
343     }
344     if (textureWidth == 0 || textureHeight == 0) {
345       // Information about the resolution needs to be provided by a call to setTextureSize() before
346       // frames are produced.
347       Logging.w(TAG, "Texture size has not been set.");
348       return;
349     }
350     isTextureInUse = true;
351     hasPendingTexture = false;
352 
353     updateTexImage();
354 
355     final float[] transformMatrix = new float[16];
356     surfaceTexture.getTransformMatrix(transformMatrix);
357     long timestampNs = surfaceTexture.getTimestamp();
358     if (timestampAligner != null) {
359       timestampNs = timestampAligner.translateTimestamp(timestampNs);
360     }
361     final VideoFrame.TextureBuffer buffer =
362         new TextureBufferImpl(textureWidth, textureHeight, TextureBuffer.Type.OES, oesTextureId,
363             RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix), handler,
364             yuvConverter, textureRefCountMonitor);
365     if (frameRefMonitor != null) {
366       frameRefMonitor.onNewBuffer(buffer);
367     }
368     final VideoFrame frame = new VideoFrame(buffer, frameRotation, timestampNs);
369     listener.onFrame(frame);
370     frame.release();
371   }
372 
373   private void release() {
374     if (handler.getLooper().getThread() != Thread.currentThread()) {
375       throw new IllegalStateException("Wrong thread.");
376     }
377     if (isTextureInUse || !isQuitting) {
378       throw new IllegalStateException("Unexpected release.");
379     }
380     yuvConverter.release();
381     GLES20.glDeleteTextures(1, new int[] {oesTextureId}, 0);
382     surfaceTexture.release();
383     eglBase.release();
384     handler.getLooper().quit();
385     if (timestampAligner != null) {
386       timestampAligner.dispose();
387     }
388   }
389 }
390