1 /*
2  *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10 
11 package org.webrtc;
12 
13 import android.graphics.Matrix;
14 import android.graphics.Point;
15 import android.opengl.GLES20;
16 import androidx.annotation.Nullable;
17 import java.nio.ByteBuffer;
18 
19 /**
20  * Helper class to draw VideoFrames. Calls either drawer.drawOes, drawer.drawRgb, or
21  * drawer.drawYuv depending on the type of the buffer. The frame will be rendered with rotation
22  * taken into account. You can supply an additional render matrix for custom transformations.
23  */
24 public class VideoFrameDrawer {
25   public static final String TAG = "VideoFrameDrawer";
26   /**
27    * Draws a VideoFrame.TextureBuffer. Calls either drawer.drawOes or drawer.drawRgb
28    * depending on the type of the buffer. You can supply an additional render matrix. This is
29    * used multiplied together with the transformation matrix of the frame. (M = renderMatrix *
30    * transformationMatrix)
31    */
drawTexture(RendererCommon.GlDrawer drawer, VideoFrame.TextureBuffer buffer, Matrix renderMatrix, int frameWidth, int frameHeight, int viewportX, int viewportY, int viewportWidth, int viewportHeight)32   public static void drawTexture(RendererCommon.GlDrawer drawer, VideoFrame.TextureBuffer buffer,
33       Matrix renderMatrix, int frameWidth, int frameHeight, int viewportX, int viewportY,
34       int viewportWidth, int viewportHeight) {
35     Matrix finalMatrix = new Matrix(buffer.getTransformMatrix());
36     finalMatrix.preConcat(renderMatrix);
37     float[] finalGlMatrix = RendererCommon.convertMatrixFromAndroidGraphicsMatrix(finalMatrix);
38     switch (buffer.getType()) {
39       case OES:
40         drawer.drawOes(buffer.getTextureId(), finalGlMatrix, frameWidth, frameHeight, viewportX,
41             viewportY, viewportWidth, viewportHeight);
42         break;
43       case RGB:
44         drawer.drawRgb(buffer.getTextureId(), finalGlMatrix, frameWidth, frameHeight, viewportX,
45             viewportY, viewportWidth, viewportHeight);
46         break;
47       default:
48         throw new RuntimeException("Unknown texture type.");
49     }
50   }
51 
52   /**
53    * Helper class for uploading YUV bytebuffer frames to textures that handles stride > width. This
54    * class keeps an internal ByteBuffer to avoid unnecessary allocations for intermediate copies.
55    */
56   private static class YuvUploader {
57     // Intermediate copy buffer for uploading yuv frames that are not packed, i.e. stride > width.
58     // TODO(magjed): Investigate when GL_UNPACK_ROW_LENGTH is available, or make a custom shader
59     // that handles stride and compare performance with intermediate copy.
60     @Nullable private ByteBuffer copyBuffer;
61     @Nullable private int[] yuvTextures;
62 
63     /**
64      * Upload |planes| into OpenGL textures, taking stride into consideration.
65      *
66      * @return Array of three texture indices corresponding to Y-, U-, and V-plane respectively.
67      */
68     @Nullable
uploadYuvData(int width, int height, int[] strides, ByteBuffer[] planes)69     public int[] uploadYuvData(int width, int height, int[] strides, ByteBuffer[] planes) {
70       final int[] planeWidths = new int[] {width, width / 2, width / 2};
71       final int[] planeHeights = new int[] {height, height / 2, height / 2};
72       // Make a first pass to see if we need a temporary copy buffer.
73       int copyCapacityNeeded = 0;
74       for (int i = 0; i < 3; ++i) {
75         if (strides[i] > planeWidths[i]) {
76           copyCapacityNeeded = Math.max(copyCapacityNeeded, planeWidths[i] * planeHeights[i]);
77         }
78       }
79       // Allocate copy buffer if necessary.
80       if (copyCapacityNeeded > 0
81           && (copyBuffer == null || copyBuffer.capacity() < copyCapacityNeeded)) {
82         copyBuffer = ByteBuffer.allocateDirect(copyCapacityNeeded);
83       }
84       // Make sure YUV textures are allocated.
85       if (yuvTextures == null) {
86         yuvTextures = new int[3];
87         for (int i = 0; i < 3; i++) {
88           yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
89         }
90       }
91       // Upload each plane.
92       for (int i = 0; i < 3; ++i) {
93         GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
94         GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
95         // GLES only accepts packed data, i.e. stride == planeWidth.
96         final ByteBuffer packedByteBuffer;
97         if (strides[i] == planeWidths[i]) {
98           // Input is packed already.
99           packedByteBuffer = planes[i];
100         } else {
101           YuvHelper.copyPlane(
102               planes[i], strides[i], copyBuffer, planeWidths[i], planeWidths[i], planeHeights[i]);
103           packedByteBuffer = copyBuffer;
104         }
105         GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, planeWidths[i],
106             planeHeights[i], 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, packedByteBuffer);
107       }
108       return yuvTextures;
109     }
110 
111     @Nullable
uploadFromBuffer(VideoFrame.I420Buffer buffer)112     public int[] uploadFromBuffer(VideoFrame.I420Buffer buffer) {
113       int[] strides = {buffer.getStrideY(), buffer.getStrideU(), buffer.getStrideV()};
114       ByteBuffer[] planes = {buffer.getDataY(), buffer.getDataU(), buffer.getDataV()};
115       return uploadYuvData(buffer.getWidth(), buffer.getHeight(), strides, planes);
116     }
117 
118     @Nullable
getYuvTextures()119     public int[] getYuvTextures() {
120       return yuvTextures;
121     }
122 
123     /**
124      * Releases cached resources. Uploader can still be used and the resources will be reallocated
125      * on first use.
126      */
release()127     public void release() {
128       copyBuffer = null;
129       if (yuvTextures != null) {
130         GLES20.glDeleteTextures(3, yuvTextures, 0);
131         yuvTextures = null;
132       }
133     }
134   }
135 
distance(float x0, float y0, float x1, float y1)136   private static int distance(float x0, float y0, float x1, float y1) {
137     return (int) Math.round(Math.hypot(x1 - x0, y1 - y0));
138   }
139 
140   // These points are used to calculate the size of the part of the frame we are rendering.
141   final static float[] srcPoints =
142       new float[] {0f /* x0 */, 0f /* y0 */, 1f /* x1 */, 0f /* y1 */, 0f /* x2 */, 1f /* y2 */};
143   private final float[] dstPoints = new float[6];
144   private final Point renderSize = new Point();
145   private int renderWidth;
146   private int renderHeight;
147 
148   // Calculate the frame size after |renderMatrix| is applied. Stores the output in member variables
149   // |renderWidth| and |renderHeight| to avoid allocations since this function is called for every
150   // frame.
calculateTransformedRenderSize( int frameWidth, int frameHeight, @Nullable Matrix renderMatrix)151   private void calculateTransformedRenderSize(
152       int frameWidth, int frameHeight, @Nullable Matrix renderMatrix) {
153     if (renderMatrix == null) {
154       renderWidth = frameWidth;
155       renderHeight = frameHeight;
156       return;
157     }
158     // Transform the texture coordinates (in the range [0, 1]) according to |renderMatrix|.
159     renderMatrix.mapPoints(dstPoints, srcPoints);
160 
161     // Multiply with the width and height to get the positions in terms of pixels.
162     for (int i = 0; i < 3; ++i) {
163       dstPoints[i * 2 + 0] *= frameWidth;
164       dstPoints[i * 2 + 1] *= frameHeight;
165     }
166 
167     // Get the length of the sides of the transformed rectangle in terms of pixels.
168     renderWidth = distance(dstPoints[0], dstPoints[1], dstPoints[2], dstPoints[3]);
169     renderHeight = distance(dstPoints[0], dstPoints[1], dstPoints[4], dstPoints[5]);
170   }
171 
172   private final YuvUploader yuvUploader = new YuvUploader();
173   // This variable will only be used for checking reference equality and is used for caching I420
174   // textures.
175   @Nullable private VideoFrame lastI420Frame;
176   private final Matrix renderMatrix = new Matrix();
177 
drawFrame(VideoFrame frame, RendererCommon.GlDrawer drawer)178   public void drawFrame(VideoFrame frame, RendererCommon.GlDrawer drawer) {
179     drawFrame(frame, drawer, null /* additionalRenderMatrix */);
180   }
181 
drawFrame( VideoFrame frame, RendererCommon.GlDrawer drawer, Matrix additionalRenderMatrix)182   public void drawFrame(
183       VideoFrame frame, RendererCommon.GlDrawer drawer, Matrix additionalRenderMatrix) {
184     drawFrame(frame, drawer, additionalRenderMatrix, 0 /* viewportX */, 0 /* viewportY */,
185         frame.getRotatedWidth(), frame.getRotatedHeight());
186   }
187 
drawFrame(VideoFrame frame, RendererCommon.GlDrawer drawer, @Nullable Matrix additionalRenderMatrix, int viewportX, int viewportY, int viewportWidth, int viewportHeight)188   public void drawFrame(VideoFrame frame, RendererCommon.GlDrawer drawer,
189       @Nullable Matrix additionalRenderMatrix, int viewportX, int viewportY, int viewportWidth,
190       int viewportHeight) {
191     final int width = frame.getRotatedWidth();
192     final int height = frame.getRotatedHeight();
193     calculateTransformedRenderSize(width, height, additionalRenderMatrix);
194     if (renderWidth <= 0 || renderHeight <= 0) {
195       Logging.w(TAG, "Illegal frame size: " + renderWidth + "x" + renderHeight);
196       return;
197     }
198 
199     final boolean isTextureFrame = frame.getBuffer() instanceof VideoFrame.TextureBuffer;
200     renderMatrix.reset();
201     renderMatrix.preTranslate(0.5f, 0.5f);
202     if (!isTextureFrame) {
203       renderMatrix.preScale(1f, -1f); // I420-frames are upside down
204     }
205     renderMatrix.preRotate(frame.getRotation());
206     renderMatrix.preTranslate(-0.5f, -0.5f);
207     if (additionalRenderMatrix != null) {
208       renderMatrix.preConcat(additionalRenderMatrix);
209     }
210 
211     if (isTextureFrame) {
212       lastI420Frame = null;
213       drawTexture(drawer, (VideoFrame.TextureBuffer) frame.getBuffer(), renderMatrix, renderWidth,
214           renderHeight, viewportX, viewportY, viewportWidth, viewportHeight);
215     } else {
216       // Only upload the I420 data to textures once per frame, if we are called multiple times
217       // with the same frame.
218       if (frame != lastI420Frame) {
219         lastI420Frame = frame;
220         final VideoFrame.I420Buffer i420Buffer = frame.getBuffer().toI420();
221         yuvUploader.uploadFromBuffer(i420Buffer);
222         i420Buffer.release();
223       }
224 
225       drawer.drawYuv(yuvUploader.getYuvTextures(),
226           RendererCommon.convertMatrixFromAndroidGraphicsMatrix(renderMatrix), renderWidth,
227           renderHeight, viewportX, viewportY, viewportWidth, viewportHeight);
228     }
229   }
230 
prepareBufferForViewportSize( VideoFrame.Buffer buffer, int width, int height)231   public VideoFrame.Buffer prepareBufferForViewportSize(
232       VideoFrame.Buffer buffer, int width, int height) {
233     buffer.retain();
234     return buffer;
235   }
236 
release()237   public void release() {
238     yuvUploader.release();
239     lastI420Frame = null;
240   }
241 }
242