1/**************************************************************************** 2** 3** Copyright (C) 2016 The Qt Company Ltd. 4** Contact: https://www.qt.io/licensing/ 5** 6** This file is part of the Qt Toolkit. 7** 8** $QT_BEGIN_LICENSE:LGPL$ 9** Commercial License Usage 10** Licensees holding valid commercial Qt licenses may use this file in 11** accordance with the commercial license agreement provided with the 12** Software or, alternatively, in accordance with the terms contained in 13** a written agreement between you and The Qt Company. For licensing terms 14** and conditions see https://www.qt.io/terms-conditions. For further 15** information use the contact form at https://www.qt.io/contact-us. 16** 17** GNU Lesser General Public License Usage 18** Alternatively, this file may be used under the terms of the GNU Lesser 19** General Public License version 3 as published by the Free Software 20** Foundation and appearing in the file LICENSE.LGPL3 included in the 21** packaging of this file. Please review the following information to 22** ensure the GNU Lesser General Public License version 3 requirements 23** will be met: https://www.gnu.org/licenses/lgpl-3.0.html. 24** 25** GNU General Public License Usage 26** Alternatively, this file may be used under the terms of the GNU 27** General Public License version 2.0 or (at your option) the GNU General 28** Public license version 3 or any later version approved by the KDE Free 29** Qt Foundation. The licenses are as published by the Free Software 30** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3 31** included in the packaging of this file. Please review the following 32** information to ensure the GNU General Public License requirements will 33** be met: https://www.gnu.org/licenses/gpl-2.0.html and 34** https://www.gnu.org/licenses/gpl-3.0.html. 35** 36** $QT_END_LICENSE$ 37** 38****************************************************************************/ 39 40#include "avfaudioinputselectorcontrol.h" 41#include "avfmediarecordercontrol_ios.h" 42#include "avfcamerarenderercontrol.h" 43#include "avfmediaassetwriter.h" 44#include "avfcameraservice.h" 45#include "avfcamerasession.h" 46#include "avfcameradebug.h" 47#include "avfmediacontainercontrol.h" 48 49#include <QtCore/qmetaobject.h> 50#include <QtCore/qatomic.h> 51 52QT_USE_NAMESPACE 53 54namespace { 55 56bool qt_camera_service_isValid(AVFCameraService *service) 57{ 58 if (!service || !service->session()) 59 return false; 60 61 AVFCameraSession *session = service->session(); 62 if (!session->captureSession()) 63 return false; 64 65 if (!session->videoInput()) 66 return false; 67 68 if (!service->videoOutput() 69 || !service->videoOutput()->videoDataOutput()) { 70 return false; 71 } 72 73 return true; 74} 75 76enum WriterState 77{ 78 WriterStateIdle, 79 WriterStateActive, 80 WriterStateAborted 81}; 82 83using AVFAtomicInt64 = QAtomicInteger<qint64>; 84 85} // unnamed namespace 86 87@interface QT_MANGLE_NAMESPACE(AVFMediaAssetWriter) (PrivateAPI) 88- (bool)addAudioCapture; 89- (bool)addWriterInputs; 90- (void)setQueues; 91- (void)updateDuration:(CMTime)newTimeStamp; 92@end 93 94@implementation QT_MANGLE_NAMESPACE(AVFMediaAssetWriter) 95{ 96@private 97 AVFCameraService *m_service; 98 99 AVFScopedPointer<AVAssetWriterInput> m_cameraWriterInput; 100 AVFScopedPointer<AVCaptureDeviceInput> m_audioInput; 101 AVFScopedPointer<AVCaptureAudioDataOutput> m_audioOutput; 102 AVFScopedPointer<AVAssetWriterInput> m_audioWriterInput; 103 104 AVCaptureDevice *m_audioCaptureDevice; 105 106 // Queue to write sample buffers: 107 AVFScopedPointer<dispatch_queue_t> m_writerQueue; 108 // High priority serial queue for video output: 109 AVFScopedPointer<dispatch_queue_t> m_videoQueue; 110 // Serial queue for audio output: 111 AVFScopedPointer<dispatch_queue_t> m_audioQueue; 112 113 AVFScopedPointer<AVAssetWriter> m_assetWriter; 114 115 AVFMediaRecorderControlIOS *m_delegate; 116 117 bool m_setStartTime; 118 119 QAtomicInt m_state; 120 121 CMTime m_startTime; 122 CMTime m_lastTimeStamp; 123 124 NSDictionary *m_audioSettings; 125 NSDictionary *m_videoSettings; 126 127 AVFAtomicInt64 m_durationInMs; 128} 129 130- (id)initWithDelegate:(AVFMediaRecorderControlIOS *)delegate 131{ 132 Q_ASSERT(delegate); 133 134 if (self = [super init]) { 135 m_delegate = delegate; 136 m_setStartTime = true; 137 m_state.store(WriterStateIdle); 138 m_startTime = kCMTimeInvalid; 139 m_lastTimeStamp = kCMTimeInvalid; 140 m_durationInMs.store(0); 141 m_audioSettings = nil; 142 m_videoSettings = nil; 143 } 144 145 return self; 146} 147 148- (bool)setupWithFileURL:(NSURL *)fileURL 149 cameraService:(AVFCameraService *)service 150 audioSettings:(NSDictionary *)audioSettings 151 videoSettings:(NSDictionary *)videoSettings 152 transform:(CGAffineTransform)transform 153{ 154 Q_ASSERT(fileURL); 155 156 if (!qt_camera_service_isValid(service)) { 157 qDebugCamera() << Q_FUNC_INFO << "invalid camera service"; 158 return false; 159 } 160 161 m_service = service; 162 m_audioSettings = audioSettings; 163 m_videoSettings = videoSettings; 164 165 m_writerQueue.reset(dispatch_queue_create("asset-writer-queue", DISPATCH_QUEUE_SERIAL)); 166 if (!m_writerQueue) { 167 qDebugCamera() << Q_FUNC_INFO << "failed to create an asset writer's queue"; 168 return false; 169 } 170 171 m_videoQueue.reset(dispatch_queue_create("video-output-queue", DISPATCH_QUEUE_SERIAL)); 172 if (!m_videoQueue) { 173 qDebugCamera() << Q_FUNC_INFO << "failed to create video queue"; 174 return false; 175 } 176 dispatch_set_target_queue(m_videoQueue, dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0)); 177 m_audioQueue.reset(dispatch_queue_create("audio-output-queue", DISPATCH_QUEUE_SERIAL)); 178 if (!m_audioQueue) { 179 qDebugCamera() << Q_FUNC_INFO << "failed to create audio queue"; 180 // But we still can write video! 181 } 182 183 m_assetWriter.reset([[AVAssetWriter alloc] initWithURL:fileURL 184 fileType:m_service->mediaContainerControl()->fileType() 185 error:nil]); 186 if (!m_assetWriter) { 187 qDebugCamera() << Q_FUNC_INFO << "failed to create asset writer"; 188 return false; 189 } 190 191 bool audioCaptureOn = false; 192 193 if (m_audioQueue) 194 audioCaptureOn = [self addAudioCapture]; 195 196 if (![self addWriterInputs]) { 197 if (audioCaptureOn) { 198 AVCaptureSession *session = m_service->session()->captureSession(); 199 [session removeOutput:m_audioOutput]; 200 [session removeInput:m_audioInput]; 201 m_audioOutput.reset(); 202 m_audioInput.reset(); 203 m_audioCaptureDevice = 0; 204 } 205 m_assetWriter.reset(); 206 return false; 207 } 208 209 m_cameraWriterInput.data().transform = transform; 210 211 // Ready to start ... 212 return true; 213} 214 215- (void)start 216{ 217 [self setQueues]; 218 219 m_setStartTime = true; 220 221 m_state.storeRelease(WriterStateActive); 222 223 [m_assetWriter startWriting]; 224 AVCaptureSession *session = m_service->session()->captureSession(); 225 if (!session.running) 226 [session startRunning]; 227} 228 229- (void)stop 230{ 231 if (m_state.loadAcquire() != WriterStateActive) 232 return; 233 234 if ([m_assetWriter status] != AVAssetWriterStatusWriting) 235 return; 236 237 // Do this here so that - 238 // 1. '-abort' should not try calling finishWriting again and 239 // 2. async block (see below) will know if recorder control was deleted 240 // before the block's execution: 241 m_state.storeRelease(WriterStateIdle); 242 // Now, since we have to ensure no sample buffers are 243 // appended after a call to finishWriting, we must 244 // ensure writer's queue sees this change in m_state 245 // _before_ we call finishWriting: 246 dispatch_sync(m_writerQueue, ^{}); 247 // Done, but now we also want to prevent video queue 248 // from updating our viewfinder: 249 dispatch_sync(m_videoQueue, ^{}); 250 251 // Now we're safe to stop: 252 [m_assetWriter finishWritingWithCompletionHandler:^{ 253 // This block is async, so by the time it's executed, 254 // it's possible that render control was deleted already ... 255 if (m_state.loadAcquire() == WriterStateAborted) 256 return; 257 258 AVCaptureSession *session = m_service->session()->captureSession(); 259 if (session.running) 260 [session stopRunning]; 261 [session removeOutput:m_audioOutput]; 262 [session removeInput:m_audioInput]; 263 QMetaObject::invokeMethod(m_delegate, "assetWriterFinished", Qt::QueuedConnection); 264 }]; 265} 266 267- (void)abort 268{ 269 // -abort is to be called from recorder control's dtor. 270 271 if (m_state.fetchAndStoreRelease(WriterStateAborted) != WriterStateActive) { 272 // Not recording, nothing to stop. 273 return; 274 } 275 276 // From Apple's docs: 277 // "To guarantee that all sample buffers are successfully written, 278 // you must ensure that all calls to appendSampleBuffer: and 279 // appendPixelBuffer:withPresentationTime: have returned before 280 // invoking this method." 281 // 282 // The only way we can ensure this is: 283 dispatch_sync(m_writerQueue, ^{}); 284 // At this point next block (if any) on the writer's queue 285 // will see m_state preventing it from any further processing. 286 dispatch_sync(m_videoQueue, ^{}); 287 // After this point video queue will not try to modify our 288 // viewfider, so we're safe to delete now. 289 290 [m_assetWriter finishWritingWithCompletionHandler:^{ 291 }]; 292} 293 294- (void)setStartTimeFrom:(CMSampleBufferRef)sampleBuffer 295{ 296 // Writer's queue only. 297 Q_ASSERT(m_setStartTime); 298 Q_ASSERT(sampleBuffer); 299 300 if (m_state.loadAcquire() != WriterStateActive) 301 return; 302 303 QMetaObject::invokeMethod(m_delegate, "assetWriterStarted", Qt::QueuedConnection); 304 305 m_durationInMs.storeRelease(0); 306 m_startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); 307 m_lastTimeStamp = m_startTime; 308 [m_assetWriter startSessionAtSourceTime:m_startTime]; 309 m_setStartTime = false; 310} 311 312- (void)writeVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer 313{ 314 // This code is executed only on a writer's queue. 315 Q_ASSERT(sampleBuffer); 316 317 if (m_state.loadAcquire() == WriterStateActive) { 318 if (m_setStartTime) 319 [self setStartTimeFrom:sampleBuffer]; 320 321 if (m_cameraWriterInput.data().readyForMoreMediaData) { 322 [self updateDuration:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)]; 323 [m_cameraWriterInput appendSampleBuffer:sampleBuffer]; 324 } 325 } 326 327 CFRelease(sampleBuffer); 328} 329 330- (void)writeAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer 331{ 332 Q_ASSERT(sampleBuffer); 333 334 // This code is executed only on a writer's queue. 335 if (m_state.loadAcquire() == WriterStateActive) { 336 if (m_setStartTime) 337 [self setStartTimeFrom:sampleBuffer]; 338 339 if (m_audioWriterInput.data().readyForMoreMediaData) { 340 [self updateDuration:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)]; 341 [m_audioWriterInput appendSampleBuffer:sampleBuffer]; 342 } 343 } 344 345 CFRelease(sampleBuffer); 346} 347 348- (void)captureOutput:(AVCaptureOutput *)captureOutput 349 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer 350 fromConnection:(AVCaptureConnection *)connection 351{ 352 Q_UNUSED(connection) 353 354 if (m_state.loadAcquire() != WriterStateActive) 355 return; 356 357 if (!CMSampleBufferDataIsReady(sampleBuffer)) { 358 qDebugCamera() << Q_FUNC_INFO << "sample buffer is not ready, skipping."; 359 return; 360 } 361 362 CFRetain(sampleBuffer); 363 364 if (captureOutput != m_audioOutput.data()) { 365 if (m_state.load() != WriterStateActive) { 366 CFRelease(sampleBuffer); 367 return; 368 } 369 // Find renderercontrol's delegate and invoke its method to 370 // show updated viewfinder's frame. 371 if (m_service && m_service->videoOutput()) { 372 NSObject<AVCaptureVideoDataOutputSampleBufferDelegate> *vfDelegate = 373 (NSObject<AVCaptureVideoDataOutputSampleBufferDelegate> *)m_service->videoOutput()->captureDelegate(); 374 if (vfDelegate) 375 [vfDelegate captureOutput:nil didOutputSampleBuffer:sampleBuffer fromConnection:nil]; 376 } 377 378 dispatch_async(m_writerQueue, ^{ 379 [self writeVideoSampleBuffer:sampleBuffer]; 380 }); 381 } else { 382 dispatch_async(m_writerQueue, ^{ 383 [self writeAudioSampleBuffer:sampleBuffer]; 384 }); 385 } 386} 387 388- (bool)addAudioCapture 389{ 390 Q_ASSERT(m_service && m_service->session() && m_service->session()->captureSession()); 391 392 if (!m_service->audioInputSelectorControl()) 393 return false; 394 395 AVCaptureSession *captureSession = m_service->session()->captureSession(); 396 397 m_audioCaptureDevice = m_service->audioInputSelectorControl()->createCaptureDevice(); 398 if (!m_audioCaptureDevice) { 399 qWarning() << Q_FUNC_INFO << "no audio input device available"; 400 return false; 401 } else { 402 NSError *error = nil; 403 m_audioInput.reset([[AVCaptureDeviceInput deviceInputWithDevice:m_audioCaptureDevice error:&error] retain]); 404 405 if (!m_audioInput || error) { 406 qWarning() << Q_FUNC_INFO << "failed to create audio device input"; 407 m_audioCaptureDevice = 0; 408 m_audioInput.reset(); 409 return false; 410 } else if (![captureSession canAddInput:m_audioInput]) { 411 qWarning() << Q_FUNC_INFO << "could not connect the audio input"; 412 m_audioCaptureDevice = 0; 413 m_audioInput.reset(); 414 return false; 415 } else { 416 [captureSession addInput:m_audioInput]; 417 } 418 } 419 420 421 m_audioOutput.reset([[AVCaptureAudioDataOutput alloc] init]); 422 if (m_audioOutput.data() && [captureSession canAddOutput:m_audioOutput]) { 423 [captureSession addOutput:m_audioOutput]; 424 } else { 425 qDebugCamera() << Q_FUNC_INFO << "failed to add audio output"; 426 [captureSession removeInput:m_audioInput]; 427 m_audioCaptureDevice = 0; 428 m_audioInput.reset(); 429 m_audioOutput.reset(); 430 return false; 431 } 432 433 return true; 434} 435 436- (bool)addWriterInputs 437{ 438 Q_ASSERT(m_service && m_service->videoOutput() 439 && m_service->videoOutput()->videoDataOutput()); 440 Q_ASSERT(m_assetWriter.data()); 441 442 m_cameraWriterInput.reset([[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo 443 outputSettings:m_videoSettings 444 sourceFormatHint:m_service->session()->videoCaptureDevice().activeFormat.formatDescription]); 445 if (!m_cameraWriterInput) { 446 qDebugCamera() << Q_FUNC_INFO << "failed to create camera writer input"; 447 return false; 448 } 449 450 if ([m_assetWriter canAddInput:m_cameraWriterInput]) { 451 [m_assetWriter addInput:m_cameraWriterInput]; 452 } else { 453 qDebugCamera() << Q_FUNC_INFO << "failed to add camera writer input"; 454 m_cameraWriterInput.reset(); 455 return false; 456 } 457 458 m_cameraWriterInput.data().expectsMediaDataInRealTime = YES; 459 460 if (m_audioOutput.data()) { 461 CMFormatDescriptionRef sourceFormat = m_audioCaptureDevice ? m_audioCaptureDevice.activeFormat.formatDescription : 0; 462 m_audioWriterInput.reset([[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeAudio 463 outputSettings:m_audioSettings 464 sourceFormatHint:sourceFormat]); 465 if (!m_audioWriterInput) { 466 qDebugCamera() << Q_FUNC_INFO << "failed to create audio writer input"; 467 // But we still can record video. 468 } else if ([m_assetWriter canAddInput:m_audioWriterInput]) { 469 [m_assetWriter addInput:m_audioWriterInput]; 470 m_audioWriterInput.data().expectsMediaDataInRealTime = YES; 471 } else { 472 qDebugCamera() << Q_FUNC_INFO << "failed to add audio writer input"; 473 m_audioWriterInput.reset(); 474 // We can (still) write video though ... 475 } 476 } 477 478 return true; 479} 480 481- (void)setQueues 482{ 483 Q_ASSERT(m_service && m_service->videoOutput() && m_service->videoOutput()->videoDataOutput()); 484 Q_ASSERT(m_videoQueue); 485 486 [m_service->videoOutput()->videoDataOutput() setSampleBufferDelegate:self queue:m_videoQueue]; 487 488 if (m_audioOutput.data()) { 489 Q_ASSERT(m_audioQueue); 490 [m_audioOutput setSampleBufferDelegate:self queue:m_audioQueue]; 491 } 492} 493 494- (void)updateDuration:(CMTime)newTimeStamp 495{ 496 Q_ASSERT(CMTimeCompare(m_startTime, kCMTimeInvalid)); 497 Q_ASSERT(CMTimeCompare(m_lastTimeStamp, kCMTimeInvalid)); 498 if (CMTimeCompare(newTimeStamp, m_lastTimeStamp) > 0) { 499 500 const CMTime duration = CMTimeSubtract(newTimeStamp, m_startTime); 501 if (!CMTimeCompare(duration, kCMTimeInvalid)) 502 return; 503 504 m_durationInMs.storeRelease(CMTimeGetSeconds(duration) * 1000); 505 m_lastTimeStamp = newTimeStamp; 506 } 507} 508 509- (qint64)durationInMs 510{ 511 return m_durationInMs.loadAcquire(); 512} 513 514@end 515