1//
2//  VideoCMIOCapture.m
3//  TgVoipWebrtc
4//
5//  Created by Mikhail Filimonov on 21.06.2021.
6//  Copyright © 2021 Mikhail Filimonov. All rights reserved.
7//
8
9#import "VideoCMIOCapture.h"
10#import "TGCMIODevice.h"
11#import "TGCMIOCapturer.h"
12#import <VideoToolbox/VideoToolbox.h>
13#import  "TGRTCCVPixelBuffer.h"
14#include "rtc_base/logging.h"
15#import "base/RTCLogging.h"
16#import "base/RTCVideoFrameBuffer.h"
17#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
18#import "sdk/objc/native/src/objc_video_track_source.h"
19#import "sdk/objc/native/src/objc_frame_buffer.h"
20#import "api/video_track_source_proxy.h"
21
22#import <CoreMediaIO/CMIOHardware.h>
23
24#import "helpers/AVCaptureSession+DevicePosition.h"
25#import "helpers/RTCDispatcher+Private.h"
26#import "base/RTCVideoFrame.h"
27
28#include "common_video/libyuv/include/webrtc_libyuv.h"
29#include "rtc_base/checks.h"
30#include "rtc_base/logging.h"
31#include "third_party/libyuv/include/libyuv.h"
32#include "DarwinVideoSource.h"
33
34struct MTLFrameSize {
35    int width = 0;
36    int height = 0;
37};
38
39MTLFrameSize AspectFitted(MTLFrameSize from, MTLFrameSize to) {
40    double scale = std::min(
41        from.width / std::max(1., double(to.width)),
42        from.height / std::max(1., double(to.height)));
43    return {
44        int(std::ceil(to.width * scale)),
45        int(std::ceil(to.height * scale))
46    };
47}
48
49static const int64_t kNanosecondsPerSecond = 1000000000;
50
51@interface VideoCMIOCapture ()
52-(void)applyPixelBuffer:(CVPixelBufferRef)pixelBuffer timeStampNs:(int64_t)timeStampNs;
53@end
54
55
56void decompressionSessionDecodeFrameCallback(void *decompressionOutputRefCon,
57                                             void *sourceFrameRefCon,
58                                             OSStatus status,
59                                             VTDecodeInfoFlags infoFlags,
60                                             CVImageBufferRef imageBuffer,
61                                             CMTime presentationTimeStamp,
62                                             CMTime presentationDuration)
63{
64    VideoCMIOCapture *manager = (__bridge VideoCMIOCapture *)decompressionOutputRefCon;
65
66    if (status == noErr)
67    {
68        [manager applyPixelBuffer:imageBuffer timeStampNs: CMTimeGetSeconds(presentationTimeStamp) * kNanosecondsPerSecond];
69    }
70}
71
72
73static tgcalls::DarwinVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> nativeSource) {
74    webrtc::VideoTrackSourceProxy *proxy_source =
75    static_cast<webrtc::VideoTrackSourceProxy *>(nativeSource.get());
76    return static_cast<tgcalls::DarwinVideoTrackSource *>(proxy_source->internal());
77}
78
79@implementation VideoCMIOCapture
80{
81    TGCMIOCapturer *_capturer;
82    rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> _source;
83    std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> _uncroppedSink;
84    std::function<void ()> _onFatalError;
85
86
87    BOOL _hadFatalError;
88    BOOL _isRunning;
89    BOOL _shouldBeMirrored;
90    VTDecompressionSessionRef _decompressionSession;
91
92}
93
94
95- (void)start  {
96
97    __weak VideoCMIOCapture *weakSelf = self;
98
99    [_capturer start:^(CMSampleBufferRef sampleBuffer) {
100        [weakSelf apply:sampleBuffer];
101    }];
102}
103
104
105-(void)apply:(CMSampleBufferRef)sampleBuffer {
106    if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) ||
107        !CMSampleBufferDataIsReady(sampleBuffer)) {
108        return;
109    }
110
111    CMVideoFormatDescriptionRef formatDesc = CMSampleBufferGetFormatDescription(sampleBuffer);
112
113    CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
114    if (pixelBuffer == nil) {
115        if (_decompressionSession == nil) {
116            [self createDecompSession:formatDesc];
117        }
118        [self render:sampleBuffer];
119        return;
120    }
121
122
123    int64_t timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) *
124    kNanosecondsPerSecond;
125
126
127    [self applyPixelBuffer:pixelBuffer timeStampNs: timeStampNs];
128
129}
130
131-(void)applyPixelBuffer:(CVPixelBufferRef)pixelBuffer timeStampNs:(int64_t)timeStampNs {
132
133    int width = (int)CVPixelBufferGetWidth(pixelBuffer);
134    int height = (int)CVPixelBufferGetHeight(pixelBuffer);
135
136    MTLFrameSize fittedSize = AspectFitted({ 1280, 720 }, { width, height });
137
138    fittedSize.width -= (fittedSize.width % 4);
139    fittedSize.height -= (fittedSize.height % 4);
140
141    TGRTCCVPixelBuffer *rtcPixelBuffer = [[TGRTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer adaptedWidth:fittedSize.width adaptedHeight:fittedSize.height cropWidth:width cropHeight:height cropX:0 cropY:0];
142
143    rtcPixelBuffer.shouldBeMirrored = _shouldBeMirrored;
144
145    RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer
146                                                             rotation:RTCVideoRotation_0
147                                                          timeStampNs:timeStampNs];
148
149    if (_uncroppedSink) {
150        const int64_t timestamp_us = timeStampNs / rtc::kNumNanosecsPerMicrosec;
151
152        rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer;
153        buffer = new rtc::RefCountedObject<webrtc::ObjCFrameBuffer>(videoFrame.buffer);
154
155        webrtc::VideoRotation rotation = static_cast<webrtc::VideoRotation>(videoFrame.rotation);
156
157        _uncroppedSink->OnFrame(webrtc::VideoFrame::Builder()
158                                .set_video_frame_buffer(buffer)
159                                .set_rotation(rotation)
160                                .set_timestamp_us(timestamp_us)
161                                .build());
162    }
163
164    getObjCVideoSource(_source)->OnCapturedFrame(videoFrame);
165}
166
167- (void)stop {
168    [_capturer stop];
169}
170- (void)setIsEnabled:(bool)isEnabled {
171
172}
173- (void)setUncroppedSink:(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>>)sink {
174    self->_uncroppedSink = sink;
175}
176- (void)setPreferredCaptureAspectRatio:(float)aspectRatio {
177
178}
179- (void)setOnFatalError:(std::function<void()>)error {
180    if (!self->_hadFatalError) {
181      _onFatalError = std::move(error);
182    } else if (error) {
183      error();
184    }
185}
186- (void)setOnPause:(std::function<void(bool)>)pause {
187
188}
189
190- (instancetype)initWithSource:(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>)source {
191    self = [super init];
192    if (self != nil) {
193        _source = source;
194    }
195    return self;
196}
197- (void)setupCaptureWithDevice:(AVCaptureDevice *)device {
198
199    _shouldBeMirrored = NO;
200    _capturer = [[TGCMIOCapturer alloc] initWithDeviceId:device];
201}
202
203- (void) render:(CMSampleBufferRef)sampleBuffer
204{
205    VTDecodeFrameFlags flags = kVTDecodeFrame_EnableAsynchronousDecompression | kVTDecodeFrame_1xRealTimePlayback;
206    VTDecodeInfoFlags flagOut;
207    NSDate* currentTime = [NSDate date];
208    VTDecompressionSessionDecodeFrame(_decompressionSession, sampleBuffer, flags,
209                                      (void*)CFBridgingRetain(currentTime), &flagOut);
210}
211
212-(void) createDecompSession:(CMVideoFormatDescriptionRef)formatDesc
213{
214    if (_decompressionSession) {
215        CFRelease(_decompressionSession);
216    }
217    _decompressionSession = NULL;
218    VTDecompressionOutputCallbackRecord callBackRecord;
219    callBackRecord.decompressionOutputCallback = decompressionSessionDecodeFrameCallback;
220
221    callBackRecord.decompressionOutputRefCon = (__bridge void *)self;
222
223
224
225    VTDecompressionSessionCreate(NULL, formatDesc, NULL,
226                                                    NULL,
227                                                    &callBackRecord, &_decompressionSession);
228}
229
230-(void)dealloc {
231    if (_decompressionSession) {
232        CFRelease(_decompressionSession);
233    }
234}
235
236
237@end
238