1/*
2 *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
3 *
4 *  Use of this source code is governed by a BSD-style license
5 *  that can be found in the LICENSE file in the root of the source
6 *  tree. An additional intellectual property rights grant can be found
7 *  in the file PATENTS.  All contributing project authors may
8 *  be found in the AUTHORS file in the root of the source tree.
9 */
10
11#import "RTCMTLRenderer+Private.h"
12
13#import <Metal/Metal.h>
14#import <MetalKit/MetalKit.h>
15
16#import "base/RTCLogging.h"
17#import "base/RTCVideoFrame.h"
18#import "base/RTCVideoFrameBuffer.h"
19
20#include "api/video/video_rotation.h"
21#include "rtc_base/checks.h"
22
23// As defined in shaderSource.
24static NSString *const vertexFunctionName = @"vertexPassthrough";
25static NSString *const fragmentFunctionName = @"fragmentColorConversion";
26
27static NSString *const pipelineDescriptorLabel = @"RTCPipeline";
28static NSString *const commandBufferLabel = @"RTCCommandBuffer";
29static NSString *const renderEncoderLabel = @"RTCEncoder";
30static NSString *const renderEncoderDebugGroup = @"RTCDrawFrame";
31
32// Computes the texture coordinates given rotation and cropping.
33static inline void getCubeVertexData(int cropX,
34                                     int cropY,
35                                     int cropWidth,
36                                     int cropHeight,
37                                     size_t frameWidth,
38                                     size_t frameHeight,
39                                     RTCVideoRotation rotation,
40                                     float *buffer) {
41  // The computed values are the adjusted texture coordinates, in [0..1].
42  // For the left and top, 0.0 means no cropping and e.g. 0.2 means we're skipping 20% of the
43  // left/top edge.
44  // For the right and bottom, 1.0 means no cropping and e.g. 0.8 means we're skipping 20% of the
45  // right/bottom edge (i.e. render up to 80% of the width/height).
46  float cropLeft = cropX / (float)frameWidth;
47  float cropRight = (cropX + cropWidth) / (float)frameWidth;
48  float cropTop = cropY / (float)frameHeight;
49  float cropBottom = (cropY + cropHeight) / (float)frameHeight;
50
51  // These arrays map the view coordinates to texture coordinates, taking cropping and rotation
52  // into account. The first two columns are view coordinates, the last two are texture coordinates.
53  switch (rotation) {
54    case RTCVideoRotation_0: {
55      float values[16] = {-1.0, -1.0, cropLeft, cropBottom,
56                           1.0, -1.0, cropRight, cropBottom,
57                          -1.0,  1.0, cropLeft, cropTop,
58                           1.0,  1.0, cropRight, cropTop};
59      memcpy(buffer, &values, sizeof(values));
60    } break;
61    case RTCVideoRotation_90: {
62      float values[16] = {-1.0, -1.0, cropRight, cropBottom,
63                           1.0, -1.0, cropRight, cropTop,
64                          -1.0,  1.0, cropLeft, cropBottom,
65                           1.0,  1.0, cropLeft, cropTop};
66      memcpy(buffer, &values, sizeof(values));
67    } break;
68    case RTCVideoRotation_180: {
69      float values[16] = {-1.0, -1.0, cropRight, cropTop,
70                           1.0, -1.0, cropLeft, cropTop,
71                          -1.0,  1.0, cropRight, cropBottom,
72                           1.0,  1.0, cropLeft, cropBottom};
73      memcpy(buffer, &values, sizeof(values));
74    } break;
75    case RTCVideoRotation_270: {
76      float values[16] = {-1.0, -1.0, cropLeft, cropTop,
77                           1.0, -1.0, cropLeft, cropBottom,
78                          -1.0, 1.0, cropRight, cropTop,
79                           1.0, 1.0, cropRight, cropBottom};
80      memcpy(buffer, &values, sizeof(values));
81    } break;
82  }
83}
84
85// The max number of command buffers in flight (submitted to GPU).
86// For now setting it up to 1.
87// In future we might use triple buffering method if it improves performance.
88static const NSInteger kMaxInflightBuffers = 1;
89
90@implementation RTCMTLRenderer {
91  __kindof MTKView *_view;
92
93  // Controller.
94
95  // Renderer.
96  id<MTLDevice> _device;
97  id<MTLCommandQueue> _commandQueue;
98  id<MTLLibrary> _defaultLibrary;
99  id<MTLRenderPipelineState> _pipelineState;
100
101  // Buffers.
102  id<MTLBuffer> _vertexBuffer;
103
104  // Values affecting the vertex buffer. Stored for comparison to avoid unnecessary recreation.
105  int _oldFrameWidth;
106  int _oldFrameHeight;
107  int _oldCropWidth;
108  int _oldCropHeight;
109  int _oldCropX;
110  int _oldCropY;
111  RTCVideoRotation _oldRotation;
112}
113
114@synthesize rotationOverride = _rotationOverride;
115
116- (instancetype)init {
117  if (self = [super init]) {
118  }
119
120  return self;
121}
122
123- (BOOL)addRenderingDestination:(__kindof MTKView *)view {
124  return [self setupWithView:view];
125}
126
127#pragma mark - Private
128
129- (BOOL)setupWithView:(__kindof MTKView *)view {
130  BOOL success = NO;
131  if ([self setupMetal]) {
132    _view = view;
133    view.device = _device;
134    view.preferredFramesPerSecond = 30;
135    view.autoResizeDrawable = NO;
136
137    [self loadAssets];
138
139    float vertexBufferArray[16] = {0};
140    _vertexBuffer = [_device newBufferWithBytes:vertexBufferArray
141                                         length:sizeof(vertexBufferArray)
142                                        options:MTLResourceCPUCacheModeWriteCombined];
143    success = YES;
144  }
145  return success;
146}
147#pragma mark - Inheritance
148
149- (id<MTLDevice>)currentMetalDevice {
150  return _device;
151}
152
153- (NSString *)shaderSource {
154  RTC_NOTREACHED() << "Virtual method not implemented in subclass.";
155  return nil;
156}
157
158- (void)uploadTexturesToRenderEncoder:(id<MTLRenderCommandEncoder>)renderEncoder {
159  RTC_NOTREACHED() << "Virtual method not implemented in subclass.";
160}
161
162- (void)getWidth:(int *)width
163          height:(int *)height
164       cropWidth:(int *)cropWidth
165      cropHeight:(int *)cropHeight
166           cropX:(int *)cropX
167           cropY:(int *)cropY
168         ofFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
169  RTC_NOTREACHED() << "Virtual method not implemented in subclass.";
170}
171
172- (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
173  // Apply rotation override if set.
174  RTCVideoRotation rotation;
175  NSValue *rotationOverride = self.rotationOverride;
176  if (rotationOverride) {
177#if defined(__IPHONE_11_0) && defined(__IPHONE_OS_VERSION_MAX_ALLOWED) && \
178    (__IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0)
179    if (@available(iOS 11, *)) {
180      [rotationOverride getValue:&rotation size:sizeof(rotation)];
181    } else
182#endif
183    {
184      [rotationOverride getValue:&rotation];
185    }
186  } else {
187    rotation = frame.rotation;
188  }
189
190  int frameWidth, frameHeight, cropWidth, cropHeight, cropX, cropY;
191  [self getWidth:&frameWidth
192          height:&frameHeight
193       cropWidth:&cropWidth
194      cropHeight:&cropHeight
195           cropX:&cropX
196           cropY:&cropY
197         ofFrame:frame];
198
199  // Recompute the texture cropping and recreate vertexBuffer if necessary.
200  if (cropX != _oldCropX || cropY != _oldCropY || cropWidth != _oldCropWidth ||
201      cropHeight != _oldCropHeight || rotation != _oldRotation || frameWidth != _oldFrameWidth ||
202      frameHeight != _oldFrameHeight) {
203    getCubeVertexData(cropX,
204                      cropY,
205                      cropWidth,
206                      cropHeight,
207                      frameWidth,
208                      frameHeight,
209                      rotation,
210                      (float *)_vertexBuffer.contents);
211    _oldCropX = cropX;
212    _oldCropY = cropY;
213    _oldCropWidth = cropWidth;
214    _oldCropHeight = cropHeight;
215    _oldRotation = rotation;
216    _oldFrameWidth = frameWidth;
217    _oldFrameHeight = frameHeight;
218  }
219
220  return YES;
221}
222
223#pragma mark - GPU methods
224
225- (BOOL)setupMetal {
226  // Set the view to use the default device.
227  _device = CGDirectDisplayCopyCurrentMetalDevice(CGMainDisplayID());
228  if (!_device) {
229    return NO;
230  }
231
232  // Create a new command queue.
233  _commandQueue = [_device newCommandQueue];
234
235  // Load metal library from source.
236  NSError *libraryError = nil;
237  NSString *shaderSource = [self shaderSource];
238
239  id<MTLLibrary> sourceLibrary =
240      [_device newLibraryWithSource:shaderSource options:NULL error:&libraryError];
241
242  if (libraryError) {
243    RTCLogError(@"Metal: Library with source failed\n%@", libraryError);
244    return NO;
245  }
246
247  if (!sourceLibrary) {
248    RTCLogError(@"Metal: Failed to load library. %@", libraryError);
249    return NO;
250  }
251  _defaultLibrary = sourceLibrary;
252
253  return YES;
254}
255
256- (void)loadAssets {
257  id<MTLFunction> vertexFunction = [_defaultLibrary newFunctionWithName:vertexFunctionName];
258  id<MTLFunction> fragmentFunction = [_defaultLibrary newFunctionWithName:fragmentFunctionName];
259
260  MTLRenderPipelineDescriptor *pipelineDescriptor = [[MTLRenderPipelineDescriptor alloc] init];
261  pipelineDescriptor.label = pipelineDescriptorLabel;
262  pipelineDescriptor.vertexFunction = vertexFunction;
263  pipelineDescriptor.fragmentFunction = fragmentFunction;
264  pipelineDescriptor.colorAttachments[0].pixelFormat = _view.colorPixelFormat;
265  pipelineDescriptor.depthAttachmentPixelFormat = MTLPixelFormatInvalid;
266  NSError *error = nil;
267  _pipelineState = [_device newRenderPipelineStateWithDescriptor:pipelineDescriptor error:&error];
268
269  if (!_pipelineState) {
270    RTCLogError(@"Metal: Failed to create pipeline state. %@", error);
271  }
272}
273
274- (void)render {
275  id<MTLCommandBuffer> commandBuffer = [_commandQueue commandBuffer];
276  commandBuffer.label = commandBufferLabel;
277
278
279  MTLRenderPassDescriptor *renderPassDescriptor = _view.currentRenderPassDescriptor;
280  if (renderPassDescriptor) {  // Valid drawable.
281    id<MTLRenderCommandEncoder> renderEncoder =
282        [commandBuffer renderCommandEncoderWithDescriptor:renderPassDescriptor];
283    renderEncoder.label = renderEncoderLabel;
284
285    // Set context state.
286    [renderEncoder pushDebugGroup:renderEncoderDebugGroup];
287    [renderEncoder setRenderPipelineState:_pipelineState];
288    [renderEncoder setVertexBuffer:_vertexBuffer offset:0 atIndex:0];
289    [self uploadTexturesToRenderEncoder:renderEncoder];
290
291    [renderEncoder drawPrimitives:MTLPrimitiveTypeTriangleStrip
292                      vertexStart:0
293                      vertexCount:4
294                    instanceCount:1];
295    [renderEncoder popDebugGroup];
296    [renderEncoder endEncoding];
297
298    [commandBuffer presentDrawable:_view.currentDrawable];
299  }
300
301  // CPU work is completed, GPU work can be started.
302  [commandBuffer commit];
303}
304
305#pragma mark - RTCMTLRenderer
306
307- (void)drawFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
308  @autoreleasepool {
309    // Wait until the inflight (curently sent to GPU) command buffer
310    // has completed the GPU work.
311
312    if ([self setupTexturesForFrame:frame]) {
313      [self render];
314    } else {
315    }
316  }
317}
318
319@end
320