1 // Copyright 2018 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "media/capture/video/chromeos/request_manager.h"
6 
7 #include <sync/sync.h>
8 
9 #include <initializer_list>
10 #include <map>
11 #include <set>
12 #include <string>
13 #include <utility>
14 
15 #include "base/bind.h"
16 #include "base/callback_helpers.h"
17 #include "base/posix/safe_strerror.h"
18 #include "base/strings/string_number_conversions.h"
19 #include "base/trace_event/trace_event.h"
20 #include "media/capture/video/chromeos/camera_buffer_factory.h"
21 #include "media/capture/video/chromeos/camera_metadata_utils.h"
22 #include "media/capture/video/chromeos/video_capture_features_chromeos.h"
23 #include "mojo/public/cpp/platform/platform_handle.h"
24 #include "mojo/public/cpp/system/platform_handle.h"
25 
26 namespace media {
27 
28 namespace {
29 
30 constexpr uint32_t kUndefinedFrameNumber = 0xFFFFFFFF;
31 
32 constexpr std::initializer_list<StreamType> kYUVReprocessStreams = {
33     StreamType::kYUVInput, StreamType::kJpegOutput};
34 }  // namespace
35 
RequestManager(mojo::PendingReceiver<cros::mojom::Camera3CallbackOps> callback_ops_receiver,std::unique_ptr<StreamCaptureInterface> capture_interface,CameraDeviceContext * device_context,VideoCaptureBufferType buffer_type,std::unique_ptr<CameraBufferFactory> camera_buffer_factory,BlobifyCallback blobify_callback,scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner,CameraAppDeviceImpl * camera_app_device,ClientType client_type)36 RequestManager::RequestManager(
37     mojo::PendingReceiver<cros::mojom::Camera3CallbackOps>
38         callback_ops_receiver,
39     std::unique_ptr<StreamCaptureInterface> capture_interface,
40     CameraDeviceContext* device_context,
41     VideoCaptureBufferType buffer_type,
42     std::unique_ptr<CameraBufferFactory> camera_buffer_factory,
43     BlobifyCallback blobify_callback,
44     scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner,
45     CameraAppDeviceImpl* camera_app_device,
46     ClientType client_type)
47     : callback_ops_(this, std::move(callback_ops_receiver)),
48       capture_interface_(std::move(capture_interface)),
49       device_context_(device_context),
50       video_capture_use_gmb_(buffer_type ==
51                              VideoCaptureBufferType::kGpuMemoryBuffer),
52       stream_buffer_manager_(
53           new StreamBufferManager(device_context_,
54                                   video_capture_use_gmb_,
55                                   std::move(camera_buffer_factory),
56                                   client_type)),
57       blobify_callback_(std::move(blobify_callback)),
58       ipc_task_runner_(std::move(ipc_task_runner)),
59       capturing_(false),
60       partial_result_count_(1),
61       first_frame_shutter_time_(base::TimeTicks()),
62       camera_app_device_(std::move(camera_app_device)),
63       client_type_(client_type) {
64   DCHECK(ipc_task_runner_->BelongsToCurrentThread());
65   DCHECK(callback_ops_.is_bound());
66   DCHECK(device_context_);
67   // We use base::Unretained() for the StreamBufferManager here since we
68   // guarantee |request_buffer_callback| is only used by RequestBuilder. In
69   // addition, since C++ destroys member variables in reverse order of
70   // construction, we can ensure that RequestBuilder will be destroyed prior
71   // to StreamBufferManager since RequestBuilder constructs after
72   // StreamBufferManager.
73   auto request_buffer_callback =
74       base::BindRepeating(&StreamBufferManager::RequestBufferForCaptureRequest,
75                           base::Unretained(stream_buffer_manager_.get()));
76   request_builder_ = std::make_unique<RequestBuilder>(
77       device_context_, std::move(request_buffer_callback));
78 }
79 
80 RequestManager::~RequestManager() = default;
81 
SetUpStreamsAndBuffers(VideoCaptureFormat capture_format,const cros::mojom::CameraMetadataPtr & static_metadata,std::vector<cros::mojom::Camera3StreamPtr> streams)82 void RequestManager::SetUpStreamsAndBuffers(
83     VideoCaptureFormat capture_format,
84     const cros::mojom::CameraMetadataPtr& static_metadata,
85     std::vector<cros::mojom::Camera3StreamPtr> streams) {
86   // The partial result count metadata is optional; defaults to 1 in case it
87   // is not set in the static metadata.
88   const cros::mojom::CameraMetadataEntryPtr* partial_count = GetMetadataEntry(
89       static_metadata,
90       cros::mojom::CameraMetadataTag::ANDROID_REQUEST_PARTIAL_RESULT_COUNT);
91   if (partial_count) {
92     partial_result_count_ =
93         *reinterpret_cast<int32_t*>((*partial_count)->data.data());
94   }
95 
96   auto pipeline_depth = GetMetadataEntryAsSpan<uint8_t>(
97       static_metadata,
98       cros::mojom::CameraMetadataTag::ANDROID_REQUEST_PIPELINE_MAX_DEPTH);
99   CHECK_EQ(pipeline_depth.size(), 1u);
100   pipeline_depth_ = pipeline_depth[0];
101   preview_buffers_queued_ = 0;
102 
103   // Set the last received frame number for each stream types to be undefined.
104   for (const auto& stream : streams) {
105     StreamType stream_type = StreamIdToStreamType(stream->id);
106     last_received_frame_number_map_[stream_type] = kUndefinedFrameNumber;
107   }
108 
109   stream_buffer_manager_->SetUpStreamsAndBuffers(
110       capture_format, static_metadata, std::move(streams));
111 }
112 
GetStreamConfiguration(StreamType stream_type)113 cros::mojom::Camera3StreamPtr RequestManager::GetStreamConfiguration(
114     StreamType stream_type) {
115   return stream_buffer_manager_->GetStreamConfiguration(stream_type);
116 }
117 
HasStreamsConfiguredForTakePhoto()118 bool RequestManager::HasStreamsConfiguredForTakePhoto() {
119   if (stream_buffer_manager_->IsReprocessSupported()) {
120     return stream_buffer_manager_->HasStreamsConfigured(
121         {StreamType::kPreviewOutput, StreamType::kJpegOutput,
122          StreamType::kYUVInput, StreamType::kYUVOutput});
123   } else {
124     return stream_buffer_manager_->HasStreamsConfigured(
125         {StreamType::kPreviewOutput, StreamType::kJpegOutput});
126   }
127 }
128 
StartPreview(cros::mojom::CameraMetadataPtr preview_settings)129 void RequestManager::StartPreview(
130     cros::mojom::CameraMetadataPtr preview_settings) {
131   DCHECK(ipc_task_runner_->BelongsToCurrentThread());
132   DCHECK(repeating_request_settings_.is_null());
133 
134   capturing_ = true;
135   repeating_request_settings_ = std::move(preview_settings);
136 
137   PrepareCaptureRequest();
138 }
139 
StopPreview(base::OnceCallback<void (int32_t)> callback)140 void RequestManager::StopPreview(base::OnceCallback<void(int32_t)> callback) {
141   DCHECK(ipc_task_runner_->BelongsToCurrentThread());
142 
143   capturing_ = false;
144   repeating_request_settings_ = nullptr;
145   if (callback) {
146     capture_interface_->Flush(std::move(callback));
147   }
148 }
149 
TakePhoto(cros::mojom::CameraMetadataPtr settings,ReprocessTaskQueue reprocess_tasks)150 void RequestManager::TakePhoto(cros::mojom::CameraMetadataPtr settings,
151                                ReprocessTaskQueue reprocess_tasks) {
152   DCHECK(ipc_task_runner_->BelongsToCurrentThread());
153 
154   if (stream_buffer_manager_->IsReprocessSupported()) {
155     pending_reprocess_tasks_queue_.push(std::move(reprocess_tasks));
156   } else {
157     // There should be only one reprocess task in the queue which is format
158     // conversion task.
159     DCHECK_EQ(reprocess_tasks.size(), 1lu);
160 
161     take_photo_callback_queue_.push(
162         std::move(reprocess_tasks.front().callback));
163   }
164   take_photo_settings_queue_.push(std::move(settings));
165 }
166 
GetWeakPtr()167 base::WeakPtr<RequestManager> RequestManager::GetWeakPtr() {
168   return weak_ptr_factory_.GetWeakPtr();
169 }
170 
AddResultMetadataObserver(ResultMetadataObserver * observer)171 void RequestManager::AddResultMetadataObserver(
172     ResultMetadataObserver* observer) {
173   DCHECK(ipc_task_runner_->BelongsToCurrentThread());
174   DCHECK(!result_metadata_observers_.count(observer));
175 
176   result_metadata_observers_.insert(observer);
177 }
178 
RemoveResultMetadataObserver(ResultMetadataObserver * observer)179 void RequestManager::RemoveResultMetadataObserver(
180     ResultMetadataObserver* observer) {
181   DCHECK(ipc_task_runner_->BelongsToCurrentThread());
182   DCHECK(result_metadata_observers_.count(observer));
183 
184   result_metadata_observers_.erase(observer);
185 }
186 
SetCaptureMetadata(cros::mojom::CameraMetadataTag tag,cros::mojom::EntryType type,size_t count,std::vector<uint8_t> value)187 void RequestManager::SetCaptureMetadata(cros::mojom::CameraMetadataTag tag,
188                                         cros::mojom::EntryType type,
189                                         size_t count,
190                                         std::vector<uint8_t> value) {
191   DCHECK(ipc_task_runner_->BelongsToCurrentThread());
192 
193   cros::mojom::CameraMetadataEntryPtr setting =
194       cros::mojom::CameraMetadataEntry::New();
195 
196   setting->tag = tag;
197   setting->type = type;
198   setting->count = count;
199   setting->data = std::move(value);
200 
201   capture_settings_override_.push_back(std::move(setting));
202 }
203 
SetRepeatingCaptureMetadata(cros::mojom::CameraMetadataTag tag,cros::mojom::EntryType type,size_t count,std::vector<uint8_t> value)204 void RequestManager::SetRepeatingCaptureMetadata(
205     cros::mojom::CameraMetadataTag tag,
206     cros::mojom::EntryType type,
207     size_t count,
208     std::vector<uint8_t> value) {
209   DCHECK(ipc_task_runner_->BelongsToCurrentThread());
210   cros::mojom::CameraMetadataEntryPtr setting =
211       cros::mojom::CameraMetadataEntry::New();
212 
213   setting->tag = tag;
214   setting->type = type;
215   setting->count = count;
216   setting->data = std::move(value);
217 
218   capture_settings_repeating_override_[tag] = std::move(setting);
219 }
220 
UnsetRepeatingCaptureMetadata(cros::mojom::CameraMetadataTag tag)221 void RequestManager::UnsetRepeatingCaptureMetadata(
222     cros::mojom::CameraMetadataTag tag) {
223   DCHECK(ipc_task_runner_->BelongsToCurrentThread());
224   auto it = capture_settings_repeating_override_.find(tag);
225   if (it == capture_settings_repeating_override_.end()) {
226     LOG(ERROR) << "Unset a non-existent metadata: " << tag;
227     return;
228   }
229   capture_settings_repeating_override_.erase(it);
230 }
231 
SetJpegOrientation(cros::mojom::CameraMetadataPtr * settings,int32_t orientation)232 void RequestManager::SetJpegOrientation(
233     cros::mojom::CameraMetadataPtr* settings,
234     int32_t orientation) {
235   auto e = BuildMetadataEntry(
236       cros::mojom::CameraMetadataTag::ANDROID_JPEG_ORIENTATION, orientation);
237   AddOrUpdateMetadataEntry(settings, std::move(e));
238 }
239 
SetSensorTimestamp(cros::mojom::CameraMetadataPtr * settings,uint64_t shutter_timestamp)240 void RequestManager::SetSensorTimestamp(
241     cros::mojom::CameraMetadataPtr* settings,
242     uint64_t shutter_timestamp) {
243   auto e = BuildMetadataEntry(
244       cros::mojom::CameraMetadataTag::ANDROID_SENSOR_TIMESTAMP,
245       base::checked_cast<int64_t>(shutter_timestamp));
246   AddOrUpdateMetadataEntry(settings, std::move(e));
247 }
248 
SetZeroShutterLag(cros::mojom::CameraMetadataPtr * settings,bool enabled)249 void RequestManager::SetZeroShutterLag(cros::mojom::CameraMetadataPtr* settings,
250                                        bool enabled) {
251   auto e = BuildMetadataEntry(
252       cros::mojom::CameraMetadataTag::ANDROID_CONTROL_ENABLE_ZSL,
253       static_cast<uint8_t>(enabled));
254   AddOrUpdateMetadataEntry(settings, std::move(e));
255 }
256 
PrepareCaptureRequest()257 void RequestManager::PrepareCaptureRequest() {
258   DCHECK(ipc_task_runner_->BelongsToCurrentThread());
259 
260   if (!capturing_) {
261     return;
262   }
263 
264   // There are two types of devices, each has several possible combinations of
265   // streams.
266   //
267   // For device with reprocess capability:
268   // 1. Preview
269   // 2. Capture (YuvOutput)
270   // 3. Preview + Capture (YuvOutput)
271   // 4. Reprocess (YuvInput + BlobOutput)
272   //
273   // For device without reprocess capability:
274   // 1. Preview
275   // 2. Capture (BlobOutput)
276   // 3. Preview + Capture (BlobOutput)
277   std::set<StreamType> stream_types;
278   cros::mojom::CameraMetadataPtr settings;
279   TakePhotoCallback callback = base::NullCallback();
280   base::Optional<uint64_t> input_buffer_id;
281   cros::mojom::Effect reprocess_effect = cros::mojom::Effect::NO_EFFECT;
282 
283   bool is_reprocess_request = false;
284   bool is_preview_request = false;
285   bool is_oneshot_request = false;
286 
287   // First, check if there are pending reprocess tasks.
288   is_reprocess_request = TryPrepareReprocessRequest(
289       &stream_types, &settings, &callback, &input_buffer_id, &reprocess_effect);
290 
291   // If there is no pending reprocess task, then check if there are pending
292   // one-shot requests. And also try to put preview in the request.
293   if (!is_reprocess_request) {
294     is_preview_request = TryPreparePreviewRequest(&stream_types, &settings);
295 
296     // Order matters here. If the preview request and oneshot request are both
297     // added in single capture request, the settings will be overridden by the
298     // later.
299     is_oneshot_request =
300         TryPrepareOneShotRequest(&stream_types, &settings, &callback);
301   }
302 
303   if (!is_reprocess_request && !is_oneshot_request && !is_preview_request) {
304     // We have to keep the pipeline full.
305     if (preview_buffers_queued_ < pipeline_depth_) {
306       ipc_task_runner_->PostTask(
307           FROM_HERE,
308           base::BindOnce(&RequestManager::PrepareCaptureRequest, GetWeakPtr()));
309     }
310     return;
311   }
312 
313   auto capture_request = request_builder_->BuildRequest(
314       std::move(stream_types), std::move(settings), input_buffer_id);
315   CHECK_GT(capture_request->output_buffers.size(), 0u);
316 
317   CaptureResult& pending_result =
318       pending_results_[capture_request->frame_number];
319   pending_result.unsubmitted_buffer_count =
320       capture_request->output_buffers.size();
321   pending_result.input_buffer_id = input_buffer_id;
322   pending_result.reprocess_effect = reprocess_effect;
323   pending_result.still_capture_callback = std::move(callback);
324   pending_result.orientation = device_context_->GetCameraFrameRotation();
325 
326   // For reprocess supported devices, bind the ReprocessTaskQueue with this
327   // frame number. Once the shot result is returned, we will rebind the
328   // ReprocessTaskQueue with the id of YUV buffer which contains the result.
329   if (is_oneshot_request && stream_buffer_manager_->IsReprocessSupported() &&
330       !pending_reprocess_tasks_queue_.empty()) {
331     frame_number_reprocess_tasks_map_[capture_request->frame_number] =
332         std::move(pending_reprocess_tasks_queue_.front());
333     pending_reprocess_tasks_queue_.pop();
334   }
335 
336   if (is_preview_request) {
337     ++preview_buffers_queued_;
338   }
339 
340   // Currently only 3A related settings will be applied, which means we don't
341   // need to apply for reprocess request.
342   if (!is_reprocess_request) {
343     UpdateCaptureSettings(&capture_request->settings);
344   }
345   capture_interface_->ProcessCaptureRequest(
346       std::move(capture_request),
347       base::BindOnce(&RequestManager::OnProcessedCaptureRequest, GetWeakPtr()));
348 }
349 
TryPrepareReprocessRequest(std::set<StreamType> * stream_types,cros::mojom::CameraMetadataPtr * settings,TakePhotoCallback * callback,base::Optional<uint64_t> * input_buffer_id,cros::mojom::Effect * reprocess_effect)350 bool RequestManager::TryPrepareReprocessRequest(
351     std::set<StreamType>* stream_types,
352     cros::mojom::CameraMetadataPtr* settings,
353     TakePhotoCallback* callback,
354     base::Optional<uint64_t>* input_buffer_id,
355     cros::mojom::Effect* reprocess_effect) {
356   if (buffer_id_reprocess_job_info_map_.empty() ||
357       !stream_buffer_manager_->HasFreeBuffers(kYUVReprocessStreams)) {
358     return false;
359   }
360 
361   // Consume reprocess task.
362   ReprocessJobInfo* reprocess_job_info;
363   for (auto& it : buffer_id_reprocess_job_info_map_) {
364     if (processing_buffer_ids_.count(it.first) == 0) {
365       *input_buffer_id = it.first;
366       reprocess_job_info = &it.second;
367       break;
368     }
369   }
370 
371   if (!*input_buffer_id) {
372     return false;
373   }
374 
375   ReprocessTaskQueue* reprocess_task_queue = &reprocess_job_info->task_queue;
376   ReprocessTask task = std::move(reprocess_task_queue->front());
377   reprocess_task_queue->pop();
378 
379   stream_types->insert(kYUVReprocessStreams);
380   // Prepare metadata by adding extra metadata.
381   *settings = reprocess_job_info->metadata.Clone();
382   SetSensorTimestamp(settings, reprocess_job_info->shutter_timestamp);
383   SetJpegOrientation(settings, reprocess_job_info->orientation);
384   for (auto& metadata : task.extra_metadata) {
385     AddOrUpdateMetadataEntry(settings, std::move(metadata));
386   }
387   *callback = std::move(task.callback);
388   *reprocess_effect = task.effect;
389   processing_buffer_ids_.insert(**input_buffer_id);
390 
391   // Remove the mapping from map if all tasks consumed.
392   if (reprocess_task_queue->empty()) {
393     buffer_id_reprocess_job_info_map_.erase(**input_buffer_id);
394   }
395   return true;
396 }
397 
TryPreparePreviewRequest(std::set<StreamType> * stream_types,cros::mojom::CameraMetadataPtr * settings)398 bool RequestManager::TryPreparePreviewRequest(
399     std::set<StreamType>* stream_types,
400     cros::mojom::CameraMetadataPtr* settings) {
401   if (preview_buffers_queued_ == pipeline_depth_) {
402     return false;
403   }
404   if (!stream_buffer_manager_->HasFreeBuffers({StreamType::kPreviewOutput})) {
405     // Try our best to reserve an usable buffer.  If the reservation still
406     // fails, then we'd have to drop the camera frame.
407     DLOG(WARNING) << "Late request for reserving preview buffer";
408     stream_buffer_manager_->ReserveBuffer(StreamType::kPreviewOutput);
409     if (!stream_buffer_manager_->HasFreeBuffers({StreamType::kPreviewOutput})) {
410       DLOG(WARNING) << "No free buffer for preview stream";
411       return false;
412     }
413   }
414 
415   stream_types->insert({StreamType::kPreviewOutput});
416   *settings = repeating_request_settings_.Clone();
417   return true;
418 }
419 
TryPrepareOneShotRequest(std::set<StreamType> * stream_types,cros::mojom::CameraMetadataPtr * settings,TakePhotoCallback * callback)420 bool RequestManager::TryPrepareOneShotRequest(
421     std::set<StreamType>* stream_types,
422     cros::mojom::CameraMetadataPtr* settings,
423     TakePhotoCallback* callback) {
424   if (stream_buffer_manager_->IsReprocessSupported()) {
425     // For devices that support reprocess, fill the frame data in YUV buffer and
426     // reprocess on that YUV buffer.
427     if (take_photo_settings_queue_.empty() ||
428         !stream_buffer_manager_->HasFreeBuffers({StreamType::kYUVOutput})) {
429       return false;
430     }
431     stream_types->insert({StreamType::kYUVOutput});
432     *settings = std::move(take_photo_settings_queue_.front());
433   } else {
434     // For devices that do not support reprocess, fill the frame data in BLOB
435     // buffer and fill the callback.
436     if (take_photo_settings_queue_.empty() ||
437         take_photo_callback_queue_.empty() ||
438         !stream_buffer_manager_->HasFreeBuffers({StreamType::kJpegOutput})) {
439       return false;
440     }
441     stream_types->insert({StreamType::kJpegOutput});
442     *callback = std::move(take_photo_callback_queue_.front());
443     take_photo_callback_queue_.pop();
444 
445     *settings = std::move(take_photo_settings_queue_.front());
446     SetJpegOrientation(settings, device_context_->GetCameraFrameRotation());
447   }
448   SetZeroShutterLag(settings, true);
449   take_photo_settings_queue_.pop();
450   return true;
451 }
452 
OnProcessedCaptureRequest(int32_t result)453 void RequestManager::OnProcessedCaptureRequest(int32_t result) {
454   DCHECK(ipc_task_runner_->BelongsToCurrentThread());
455 
456   if (!capturing_) {
457     return;
458   }
459   if (result != 0) {
460     device_context_->SetErrorState(
461         media::VideoCaptureError::
462             kCrosHalV3BufferManagerProcessCaptureRequestFailed,
463         FROM_HERE,
464         std::string("Process capture request failed: ") +
465             base::safe_strerror(-result));
466     return;
467   }
468 
469   PrepareCaptureRequest();
470 }
471 
ProcessCaptureResult(cros::mojom::Camera3CaptureResultPtr result)472 void RequestManager::ProcessCaptureResult(
473     cros::mojom::Camera3CaptureResultPtr result) {
474   DCHECK(ipc_task_runner_->BelongsToCurrentThread());
475 
476   if (!capturing_) {
477     return;
478   }
479   uint32_t frame_number = result->frame_number;
480   // A new partial result may be created in either ProcessCaptureResult or
481   // Notify.
482   CaptureResult& pending_result = pending_results_[frame_number];
483 
484   // |result->partial_result| is set to 0 if the capture result contains only
485   // the result buffer handles and no result metadata.
486   if (result->partial_result != 0) {
487     uint32_t result_id = result->partial_result;
488     if (result_id > partial_result_count_) {
489       device_context_->SetErrorState(
490           media::VideoCaptureError::
491               kCrosHalV3BufferManagerInvalidPendingResultId,
492           FROM_HERE,
493           std::string("Invalid pending_result id: ") +
494               base::NumberToString(result_id));
495       return;
496     }
497     if (pending_result.partial_metadata_received.count(result_id)) {
498       device_context_->SetErrorState(
499           media::VideoCaptureError::
500               kCrosHalV3BufferManagerReceivedDuplicatedPartialMetadata,
501           FROM_HERE,
502           std::string("Received duplicated partial metadata: ") +
503               base::NumberToString(result_id));
504       return;
505     }
506     DVLOG(2) << "Received partial result " << result_id << " for frame "
507              << frame_number;
508     pending_result.partial_metadata_received.insert(result_id);
509     MergeMetadata(&pending_result.metadata, result->result);
510   }
511 
512   if (result->output_buffers) {
513     if (result->output_buffers->size() > kMaxConfiguredStreams) {
514       device_context_->SetErrorState(
515           media::VideoCaptureError::
516               kCrosHalV3BufferManagerIncorrectNumberOfOutputBuffersReceived,
517           FROM_HERE,
518           std::string("Incorrect number of output buffers received: ") +
519               base::NumberToString(result->output_buffers->size()));
520       return;
521     }
522 
523     for (auto& stream_buffer : result->output_buffers.value()) {
524       DVLOG(2) << "Received capture result for frame " << frame_number
525                << " stream_id: " << stream_buffer->stream_id;
526       StreamType stream_type = StreamIdToStreamType(stream_buffer->stream_id);
527       if (stream_type == StreamType::kUnknown) {
528         device_context_->SetErrorState(
529             media::VideoCaptureError::
530                 kCrosHalV3BufferManagerInvalidTypeOfOutputBuffersReceived,
531             FROM_HERE,
532             std::string("Invalid type of output buffers received: ") +
533                 base::NumberToString(stream_buffer->stream_id));
534         return;
535       }
536 
537       // The camera HAL v3 API specifies that only one capture result can carry
538       // the result buffer for any given frame number.
539       if (last_received_frame_number_map_[stream_type] ==
540           kUndefinedFrameNumber) {
541         last_received_frame_number_map_[stream_type] = frame_number;
542       } else {
543         if (last_received_frame_number_map_[stream_type] == frame_number) {
544           device_context_->SetErrorState(
545               media::VideoCaptureError::
546                   kCrosHalV3BufferManagerReceivedMultipleResultBuffersForFrame,
547               FROM_HERE,
548               std::string("Received multiple result buffers for frame ") +
549                   base::NumberToString(frame_number) +
550                   std::string(" for stream ") +
551                   base::NumberToString(stream_buffer->stream_id));
552           return;
553         } else if (last_received_frame_number_map_[stream_type] >
554                    frame_number) {
555           device_context_->SetErrorState(
556               media::VideoCaptureError::
557                   kCrosHalV3BufferManagerReceivedFrameIsOutOfOrder,
558               FROM_HERE,
559               std::string("Received frame is out-of-order; expect frame number "
560                           "greater than ") +
561                   base::NumberToString(
562                       last_received_frame_number_map_[stream_type]) +
563                   std::string(" but got ") +
564                   base::NumberToString(frame_number));
565         } else {
566           last_received_frame_number_map_[stream_type] = frame_number;
567         }
568       }
569 
570       if (stream_buffer->status ==
571           cros::mojom::Camera3BufferStatus::CAMERA3_BUFFER_STATUS_ERROR) {
572         // If the buffer is marked as error, its content is discarded for this
573         // frame.  Send the buffer to the free list directly through
574         // SubmitCaptureResult.
575         SubmitCaptureResult(frame_number, stream_type,
576                             std::move(stream_buffer));
577       } else {
578         pending_result.buffers[stream_type] = std::move(stream_buffer);
579       }
580     }
581   }
582 
583   TRACE_EVENT1("camera", "Capture Result", "frame_number", frame_number);
584   TrySubmitPendingBuffers(frame_number);
585 }
586 
TrySubmitPendingBuffers(uint32_t frame_number)587 void RequestManager::TrySubmitPendingBuffers(uint32_t frame_number) {
588   if (!pending_results_.count(frame_number)) {
589     return;
590   }
591 
592   CaptureResult& pending_result = pending_results_[frame_number];
593 
594   // If the metadata is not ready, or the shutter time is not set, just
595   // returned.
596   bool is_ready_to_submit =
597       pending_result.partial_metadata_received.size() > 0 &&
598       *pending_result.partial_metadata_received.rbegin() ==
599           partial_result_count_ &&
600       !pending_result.reference_time.is_null();
601   if (!is_ready_to_submit) {
602     return;
603   }
604 
605   if (!pending_result.buffers.empty()) {
606     // Put pending buffers into local map since |pending_result| might be
607     // deleted in SubmitCaptureResult(). We should not reference pending_result
608     // after SubmitCaptureResult() is triggered.
609     std::map<StreamType, cros::mojom::Camera3StreamBufferPtr> buffers =
610         std::move(pending_result.buffers);
611     for (auto& it : buffers) {
612       SubmitCaptureResult(frame_number, it.first, std::move(it.second));
613     }
614   }
615 }
616 
Notify(cros::mojom::Camera3NotifyMsgPtr message)617 void RequestManager::Notify(cros::mojom::Camera3NotifyMsgPtr message) {
618   DCHECK(ipc_task_runner_->BelongsToCurrentThread());
619 
620   if (!capturing_) {
621     return;
622   }
623   if (message->type == cros::mojom::Camera3MsgType::CAMERA3_MSG_ERROR) {
624     auto error = std::move(message->message->get_error());
625     uint32_t frame_number = error->frame_number;
626     uint64_t error_stream_id = error->error_stream_id;
627     StreamType stream_type = StreamIdToStreamType(error_stream_id);
628     if (stream_type == StreamType::kUnknown) {
629       device_context_->SetErrorState(
630           media::VideoCaptureError::
631               kCrosHalV3BufferManagerUnknownStreamInCamera3NotifyMsg,
632           FROM_HERE,
633           std::string("Unknown stream in Camera3NotifyMsg: ") +
634               base::NumberToString(error_stream_id));
635       return;
636     }
637     cros::mojom::Camera3ErrorMsgCode error_code = error->error_code;
638     HandleNotifyError(frame_number, stream_type, error_code);
639   } else if (message->type ==
640              cros::mojom::Camera3MsgType::CAMERA3_MSG_SHUTTER) {
641     auto shutter = std::move(message->message->get_shutter());
642     uint32_t frame_number = shutter->frame_number;
643     uint64_t shutter_time = shutter->timestamp;
644     DVLOG(2) << "Received shutter time for frame " << frame_number;
645     if (!shutter_time) {
646       device_context_->SetErrorState(
647           media::VideoCaptureError::
648               kCrosHalV3BufferManagerReceivedInvalidShutterTime,
649           FROM_HERE,
650           std::string("Received invalid shutter time: ") +
651               base::NumberToString(shutter_time));
652       return;
653     }
654     CaptureResult& pending_result = pending_results_[frame_number];
655     pending_result.shutter_timestamp = shutter_time;
656     // Shutter timestamp is in ns.
657     base::TimeTicks reference_time =
658         base::TimeTicks() +
659         base::TimeDelta::FromMicroseconds(shutter_time / 1000);
660     pending_result.reference_time = reference_time;
661     if (first_frame_shutter_time_.is_null()) {
662       // Record the shutter time of the first frame for calculating the
663       // timestamp.
664       first_frame_shutter_time_ = reference_time;
665     }
666     pending_result.timestamp = reference_time - first_frame_shutter_time_;
667     if (camera_app_device_ && pending_result.still_capture_callback) {
668       camera_app_device_->OnShutterDone();
669     }
670 
671     TrySubmitPendingBuffers(frame_number);
672   }
673 }
674 
HandleNotifyError(uint32_t frame_number,StreamType stream_type,cros::mojom::Camera3ErrorMsgCode error_code)675 void RequestManager::HandleNotifyError(
676     uint32_t frame_number,
677     StreamType stream_type,
678     cros::mojom::Camera3ErrorMsgCode error_code) {
679   DCHECK(ipc_task_runner_->BelongsToCurrentThread());
680 
681   std::string warning_msg;
682 
683   switch (error_code) {
684     case cros::mojom::Camera3ErrorMsgCode::CAMERA3_MSG_ERROR_DEVICE:
685       // Fatal error and no more frames will be produced by the device.
686       device_context_->SetErrorState(
687           media::VideoCaptureError::kCrosHalV3BufferManagerFatalDeviceError,
688           FROM_HERE, "Fatal device error");
689       return;
690 
691     case cros::mojom::Camera3ErrorMsgCode::CAMERA3_MSG_ERROR_REQUEST:
692       // An error has occurred in processing the request; the request
693       // specified by |frame_number| has been dropped by the camera device.
694       // Subsequent requests are unaffected.
695       //
696       // The HAL will call ProcessCaptureResult with the buffers' state set to
697       // STATUS_ERROR.  The content of the buffers will be dropped and the
698       // buffers will be reused in SubmitCaptureResult.
699       warning_msg =
700           std::string("An error occurred while processing request for frame ") +
701           base::NumberToString(frame_number);
702       break;
703 
704     case cros::mojom::Camera3ErrorMsgCode::CAMERA3_MSG_ERROR_RESULT:
705       // An error has occurred in producing the output metadata buffer for a
706       // result; the output metadata will not be available for the frame
707       // specified by |frame_number|.  Subsequent requests are unaffected.
708       warning_msg = std::string(
709                         "An error occurred while producing result "
710                         "metadata for frame ") +
711                     base::NumberToString(frame_number);
712       break;
713 
714     case cros::mojom::Camera3ErrorMsgCode::CAMERA3_MSG_ERROR_BUFFER:
715       // An error has occurred in placing the output buffer into a stream for
716       // a request. |frame_number| specifies the request for which the buffer
717       // was dropped, and |stream_type| specifies the stream that dropped
718       // the buffer.
719       //
720       // The HAL will call ProcessCaptureResult with the buffer's state set to
721       // STATUS_ERROR.  The content of the buffer will be dropped and the
722       // buffer will be reused in SubmitCaptureResult.
723       warning_msg =
724           std::string(
725               "An error occurred while filling output buffer for frame ") +
726           base::NumberToString(frame_number);
727       break;
728 
729     default:
730       // To eliminate the warning for not handling CAMERA3_MSG_NUM_ERRORS
731       break;
732   }
733 
734   LOG(WARNING) << warning_msg << " with type = " << stream_type;
735   device_context_->LogToClient(warning_msg);
736 
737   // If the buffer is already returned by the HAL, submit it and we're done.
738   if (pending_results_.count(frame_number)) {
739     auto it = pending_results_[frame_number].buffers.find(stream_type);
740     if (it != pending_results_[frame_number].buffers.end()) {
741       auto stream_buffer = std::move(it->second);
742       pending_results_[frame_number].buffers.erase(stream_type);
743       SubmitCaptureResult(frame_number, stream_type, std::move(stream_buffer));
744     }
745   }
746 }
747 
SubmitCaptureResult(uint32_t frame_number,StreamType stream_type,cros::mojom::Camera3StreamBufferPtr stream_buffer)748 void RequestManager::SubmitCaptureResult(
749     uint32_t frame_number,
750     StreamType stream_type,
751     cros::mojom::Camera3StreamBufferPtr stream_buffer) {
752   DCHECK(ipc_task_runner_->BelongsToCurrentThread());
753   DCHECK(pending_results_.count(frame_number));
754 
755   CaptureResult& pending_result = pending_results_[frame_number];
756   DVLOG(2) << "Submit capture result of frame " << frame_number
757            << " for stream " << static_cast<int>(stream_type);
758   for (auto* observer : result_metadata_observers_) {
759     observer->OnResultMetadataAvailable(frame_number, pending_result.metadata);
760   }
761 
762   if (camera_app_device_) {
763     camera_app_device_->OnResultMetadataAvailable(
764         pending_result.metadata,
765         static_cast<cros::mojom::StreamType>(stream_type));
766   }
767 
768   // Wait on release fence before delivering the result buffer to client.
769   if (stream_buffer->release_fence.is_valid()) {
770     const int kSyncWaitTimeoutMs = 1000;
771     mojo::PlatformHandle fence =
772         mojo::UnwrapPlatformHandle(std::move(stream_buffer->release_fence));
773     if (!fence.is_valid()) {
774       device_context_->SetErrorState(
775           media::VideoCaptureError::
776               kCrosHalV3BufferManagerFailedToUnwrapReleaseFenceFd,
777           FROM_HERE, "Failed to unwrap release fence fd");
778       return;
779     }
780     if (sync_wait(fence.GetFD().get(), kSyncWaitTimeoutMs)) {
781       device_context_->SetErrorState(
782           media::VideoCaptureError::
783               kCrosHalV3BufferManagerSyncWaitOnReleaseFenceTimedOut,
784           FROM_HERE, "Sync wait on release fence timed out");
785       return;
786     }
787   }
788 
789   uint64_t buffer_ipc_id = stream_buffer->buffer_id;
790   // Deliver the captured data to client.
791   if (stream_buffer->status ==
792       cros::mojom::Camera3BufferStatus::CAMERA3_BUFFER_STATUS_OK) {
793     if (stream_type == StreamType::kPreviewOutput) {
794       SubmitCapturedPreviewBuffer(frame_number, buffer_ipc_id);
795     } else if (stream_type == StreamType::kJpegOutput) {
796       SubmitCapturedJpegBuffer(frame_number, buffer_ipc_id);
797     } else if (stream_type == StreamType::kYUVOutput) {
798       DCHECK_GT(pending_result.shutter_timestamp, 0UL);
799       ReprocessJobInfo reprocess_job_info(
800           std::move(frame_number_reprocess_tasks_map_[frame_number]),
801           std::move(pending_result.metadata), pending_result.shutter_timestamp,
802           pending_result.orientation);
803       buffer_id_reprocess_job_info_map_.emplace(buffer_ipc_id,
804                                                 std::move(reprocess_job_info));
805       frame_number_reprocess_tasks_map_.erase(frame_number);
806 
807       // Don't release the buffer since we will need it as input buffer for
808       // reprocessing. We will release it until all reprocess tasks for this
809       // buffer are done.
810     }
811   } else {
812     stream_buffer_manager_->ReleaseBufferFromCaptureResult(stream_type,
813                                                            buffer_ipc_id);
814   }
815 
816   if (stream_type == StreamType::kPreviewOutput) {
817     --preview_buffers_queued_;
818   }
819 
820   pending_result.unsubmitted_buffer_count--;
821 
822   if (pending_result.unsubmitted_buffer_count == 0) {
823     pending_results_.erase(frame_number);
824   }
825   // Every time a buffer is released, try to prepare another capture request
826   // again.
827   PrepareCaptureRequest();
828 }
829 
SubmitCapturedPreviewBuffer(uint32_t frame_number,uint64_t buffer_ipc_id)830 void RequestManager::SubmitCapturedPreviewBuffer(uint32_t frame_number,
831                                                  uint64_t buffer_ipc_id) {
832   const CaptureResult& pending_result = pending_results_[frame_number];
833   if (video_capture_use_gmb_) {
834     VideoCaptureFormat format;
835     base::Optional<VideoCaptureDevice::Client::Buffer> buffer =
836         stream_buffer_manager_->AcquireBufferForClientById(
837             StreamType::kPreviewOutput, buffer_ipc_id, &format);
838     CHECK(buffer);
839 
840     // TODO: Figure out the right color space for the camera frame.  We may need
841     // to populate the camera metadata with the color space reported by the V4L2
842     // device.
843     VideoFrameMetadata metadata;
844     if (base::FeatureList::IsEnabled(
845             features::kDisableCameraFrameRotationAtSource)) {
846       // Camera frame rotation at source is disabled, so we record the intended
847       // video frame rotation in the metadata.  The consumer of the video frame
848       // is responsible for taking care of the frame rotation.
849       auto translate_rotation = [](const int rotation) -> VideoRotation {
850         switch (rotation) {
851           case 0:
852             return VideoRotation::VIDEO_ROTATION_0;
853           case 90:
854             return VideoRotation::VIDEO_ROTATION_90;
855           case 180:
856             return VideoRotation::VIDEO_ROTATION_180;
857           case 270:
858             return VideoRotation::VIDEO_ROTATION_270;
859         }
860         return VideoRotation::VIDEO_ROTATION_0;
861       };
862       metadata.rotation =
863           translate_rotation(device_context_->GetRotationForDisplay());
864     } else {
865       // All frames are pre-rotated to the display orientation.
866       metadata.rotation = VideoRotation::VIDEO_ROTATION_0;
867     }
868     device_context_->SubmitCapturedVideoCaptureBuffer(
869         client_type_, std::move(*buffer), format, pending_result.reference_time,
870         pending_result.timestamp, metadata);
871     // |buffer| ownership is transferred to client, so we need to reserve a
872     // new video buffer.
873     stream_buffer_manager_->ReserveBuffer(StreamType::kPreviewOutput);
874   } else {
875     gfx::GpuMemoryBuffer* gmb = stream_buffer_manager_->GetGpuMemoryBufferById(
876         StreamType::kPreviewOutput, buffer_ipc_id);
877     CHECK(gmb);
878     device_context_->SubmitCapturedGpuMemoryBuffer(
879         client_type_, gmb,
880         stream_buffer_manager_->GetStreamCaptureFormat(
881             StreamType::kPreviewOutput),
882         pending_result.reference_time, pending_result.timestamp);
883     stream_buffer_manager_->ReleaseBufferFromCaptureResult(
884         StreamType::kPreviewOutput, buffer_ipc_id);
885   }
886 }
887 
SubmitCapturedJpegBuffer(uint32_t frame_number,uint64_t buffer_ipc_id)888 void RequestManager::SubmitCapturedJpegBuffer(uint32_t frame_number,
889                                               uint64_t buffer_ipc_id) {
890   CaptureResult& pending_result = pending_results_[frame_number];
891   DCHECK(pending_result.still_capture_callback);
892   gfx::Size buffer_dimension =
893       stream_buffer_manager_->GetBufferDimension(StreamType::kJpegOutput);
894   gfx::GpuMemoryBuffer* gmb = stream_buffer_manager_->GetGpuMemoryBufferById(
895       StreamType::kJpegOutput, buffer_ipc_id);
896   CHECK(gmb);
897   if (video_capture_use_gmb_ && !gmb->Map()) {
898     device_context_->SetErrorState(
899         media::VideoCaptureError::
900             kCrosHalV3BufferManagerFailedToCreateGpuMemoryBuffer,
901         FROM_HERE, "Failed to map GPU memory buffer");
902     return;
903   }
904   const Camera3JpegBlob* header = reinterpret_cast<Camera3JpegBlob*>(
905       reinterpret_cast<const uintptr_t>(gmb->memory(0)) +
906       buffer_dimension.width() - sizeof(Camera3JpegBlob));
907   if (header->jpeg_blob_id != kCamera3JpegBlobId) {
908     device_context_->SetErrorState(
909         media::VideoCaptureError::kCrosHalV3BufferManagerInvalidJpegBlob,
910         FROM_HERE, "Invalid JPEG blob");
911     if (video_capture_use_gmb_) {
912       gmb->Unmap();
913     }
914     return;
915   }
916   // Still capture result from HALv3 already has orientation info in EXIF,
917   // so just provide 0 as screen rotation in |blobify_callback_| parameters.
918   mojom::BlobPtr blob = blobify_callback_.Run(
919       reinterpret_cast<const uint8_t*>(gmb->memory(0)), header->jpeg_size,
920       stream_buffer_manager_->GetStreamCaptureFormat(StreamType::kJpegOutput),
921       0);
922   if (blob) {
923     int task_status = kReprocessSuccess;
924     if (stream_buffer_manager_->IsReprocessSupported()) {
925       task_status = CameraAppDeviceImpl::GetReprocessReturnCode(
926           pending_result.reprocess_effect, &pending_result.metadata);
927     }
928     std::move(pending_result.still_capture_callback)
929         .Run(task_status, std::move(blob));
930   } else {
931     // TODO(wtlee): If it is fatal, we should set error state here.
932     LOG(ERROR) << "Failed to blobify the captured JPEG image";
933   }
934 
935   if (pending_result.input_buffer_id) {
936     // Remove the id from processing list to run next reprocess task.
937     processing_buffer_ids_.erase(*pending_result.input_buffer_id);
938 
939     // If all reprocess tasks are done for this buffer, release the buffer.
940     if (!base::Contains(buffer_id_reprocess_job_info_map_,
941                         *pending_result.input_buffer_id)) {
942       stream_buffer_manager_->ReleaseBufferFromCaptureResult(
943           StreamType::kYUVOutput, *pending_result.input_buffer_id);
944     }
945   }
946   stream_buffer_manager_->ReleaseBufferFromCaptureResult(
947       StreamType::kJpegOutput, buffer_ipc_id);
948   if (video_capture_use_gmb_) {
949     gmb->Unmap();
950   }
951 }
952 
UpdateCaptureSettings(cros::mojom::CameraMetadataPtr * capture_settings)953 void RequestManager::UpdateCaptureSettings(
954     cros::mojom::CameraMetadataPtr* capture_settings) {
955   DCHECK(ipc_task_runner_->BelongsToCurrentThread());
956 
957   if (capture_settings_override_.empty() &&
958       capture_settings_repeating_override_.empty()) {
959     return;
960   }
961 
962   for (const auto& setting : capture_settings_repeating_override_) {
963     AddOrUpdateMetadataEntry(capture_settings, setting.second.Clone());
964   }
965 
966   for (auto& s : capture_settings_override_) {
967     AddOrUpdateMetadataEntry(capture_settings, std::move(s));
968   }
969   capture_settings_override_.clear();
970   SortCameraMetadata(capture_settings);
971 }
972 
CaptureResult()973 RequestManager::CaptureResult::CaptureResult()
974     : metadata(cros::mojom::CameraMetadata::New()),
975       unsubmitted_buffer_count(0) {}
976 
977 RequestManager::CaptureResult::~CaptureResult() = default;
978 
ReprocessJobInfo(ReprocessTaskQueue queue,cros::mojom::CameraMetadataPtr metadata,uint64_t timestamp,int32_t orientation)979 RequestManager::ReprocessJobInfo::ReprocessJobInfo(
980     ReprocessTaskQueue queue,
981     cros::mojom::CameraMetadataPtr metadata,
982     uint64_t timestamp,
983     int32_t orientation)
984     : task_queue(std::move(queue)),
985       metadata(std::move(metadata)),
986       shutter_timestamp(timestamp),
987       orientation(orientation) {}
988 
ReprocessJobInfo(ReprocessJobInfo && info)989 RequestManager::ReprocessJobInfo::ReprocessJobInfo(ReprocessJobInfo&& info)
990     : task_queue(std::move(info.task_queue)),
991       metadata(std::move(info.metadata)),
992       shutter_timestamp(info.shutter_timestamp),
993       orientation(info.orientation) {}
994 
995 RequestManager::ReprocessJobInfo::~ReprocessJobInfo() = default;
996 
997 }  // namespace media
998