1 // Copyright 2019 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "media/gpu/vaapi/vaapi_jpeg_decoder.h"
6 
7 #include <string.h>
8 #include <va/va.h>
9 
10 #include <iostream>
11 #include <type_traits>
12 
13 #include "base/logging.h"
14 #include "base/numerics/safe_conversions.h"
15 #include "base/stl_util.h"
16 #include "media/base/video_types.h"
17 #include "media/gpu/macros.h"
18 #include "media/gpu/vaapi/vaapi_utils.h"
19 #include "media/gpu/vaapi/vaapi_wrapper.h"
20 #include "media/parsers/jpeg_parser.h"
21 #include "ui/gfx/geometry/size.h"
22 
23 namespace media {
24 
25 namespace {
26 
FillPictureParameters(const JpegFrameHeader & frame_header,VAPictureParameterBufferJPEGBaseline * pic_param)27 static void FillPictureParameters(
28     const JpegFrameHeader& frame_header,
29     VAPictureParameterBufferJPEGBaseline* pic_param) {
30   pic_param->picture_width = frame_header.coded_width;
31   pic_param->picture_height = frame_header.coded_height;
32   pic_param->num_components = frame_header.num_components;
33 
34   for (int i = 0; i < pic_param->num_components; i++) {
35     pic_param->components[i].component_id = frame_header.components[i].id;
36     pic_param->components[i].h_sampling_factor =
37         frame_header.components[i].horizontal_sampling_factor;
38     pic_param->components[i].v_sampling_factor =
39         frame_header.components[i].vertical_sampling_factor;
40     pic_param->components[i].quantiser_table_selector =
41         frame_header.components[i].quantization_table_selector;
42   }
43 }
44 
FillIQMatrix(const JpegQuantizationTable * q_table,VAIQMatrixBufferJPEGBaseline * iq_matrix)45 static void FillIQMatrix(const JpegQuantizationTable* q_table,
46                          VAIQMatrixBufferJPEGBaseline* iq_matrix) {
47   static_assert(kJpegMaxQuantizationTableNum ==
48                     std::extent<decltype(iq_matrix->load_quantiser_table)>(),
49                 "max number of quantization table mismatched");
50   static_assert(
51       sizeof(iq_matrix->quantiser_table[0]) == sizeof(q_table[0].value),
52       "number of quantization entries mismatched");
53   for (size_t i = 0; i < kJpegMaxQuantizationTableNum; i++) {
54     if (!q_table[i].valid)
55       continue;
56     iq_matrix->load_quantiser_table[i] = 1;
57     for (size_t j = 0; j < base::size(q_table[i].value); j++)
58       iq_matrix->quantiser_table[i][j] = q_table[i].value[j];
59   }
60 }
61 
FillHuffmanTable(const JpegHuffmanTable * dc_table,const JpegHuffmanTable * ac_table,VAHuffmanTableBufferJPEGBaseline * huffman_table)62 static void FillHuffmanTable(const JpegHuffmanTable* dc_table,
63                              const JpegHuffmanTable* ac_table,
64                              VAHuffmanTableBufferJPEGBaseline* huffman_table) {
65   // Use default huffman tables if not specified in header.
66   bool has_huffman_table = false;
67   for (size_t i = 0; i < kJpegMaxHuffmanTableNumBaseline; i++) {
68     if (dc_table[i].valid || ac_table[i].valid) {
69       has_huffman_table = true;
70       break;
71     }
72   }
73   if (!has_huffman_table) {
74     dc_table = kDefaultDcTable;
75     ac_table = kDefaultAcTable;
76   }
77 
78   static_assert(kJpegMaxHuffmanTableNumBaseline ==
79                     std::extent<decltype(huffman_table->load_huffman_table)>(),
80                 "max number of huffman table mismatched");
81   static_assert(sizeof(huffman_table->huffman_table[0].num_dc_codes) ==
82                     sizeof(dc_table[0].code_length),
83                 "size of huffman table code length mismatch");
84   static_assert(sizeof(huffman_table->huffman_table[0].dc_values[0]) ==
85                     sizeof(dc_table[0].code_value[0]),
86                 "size of huffman table code value mismatch");
87   for (size_t i = 0; i < kJpegMaxHuffmanTableNumBaseline; i++) {
88     if (!dc_table[i].valid || !ac_table[i].valid)
89       continue;
90     huffman_table->load_huffman_table[i] = 1;
91 
92     memcpy(huffman_table->huffman_table[i].num_dc_codes,
93            dc_table[i].code_length,
94            sizeof(huffman_table->huffman_table[i].num_dc_codes));
95     memcpy(huffman_table->huffman_table[i].dc_values, dc_table[i].code_value,
96            sizeof(huffman_table->huffman_table[i].dc_values));
97     memcpy(huffman_table->huffman_table[i].num_ac_codes,
98            ac_table[i].code_length,
99            sizeof(huffman_table->huffman_table[i].num_ac_codes));
100     memcpy(huffman_table->huffman_table[i].ac_values, ac_table[i].code_value,
101            sizeof(huffman_table->huffman_table[i].ac_values));
102   }
103 }
104 
FillSliceParameters(const JpegParseResult & parse_result,VASliceParameterBufferJPEGBaseline * slice_param)105 static void FillSliceParameters(
106     const JpegParseResult& parse_result,
107     VASliceParameterBufferJPEGBaseline* slice_param) {
108   slice_param->slice_data_size = parse_result.data_size;
109   slice_param->slice_data_offset = 0;
110   slice_param->slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
111   slice_param->slice_horizontal_position = 0;
112   slice_param->slice_vertical_position = 0;
113   slice_param->num_components = parse_result.scan.num_components;
114   for (int i = 0; i < slice_param->num_components; i++) {
115     slice_param->components[i].component_selector =
116         parse_result.scan.components[i].component_selector;
117     slice_param->components[i].dc_table_selector =
118         parse_result.scan.components[i].dc_selector;
119     slice_param->components[i].ac_table_selector =
120         parse_result.scan.components[i].ac_selector;
121   }
122   slice_param->restart_interval = parse_result.restart_interval;
123 
124   // Cast to int to prevent overflow.
125   int max_h_factor =
126       parse_result.frame_header.components[0].horizontal_sampling_factor;
127   int max_v_factor =
128       parse_result.frame_header.components[0].vertical_sampling_factor;
129   int mcu_cols = parse_result.frame_header.coded_width / (max_h_factor * 8);
130   DCHECK_GT(mcu_cols, 0);
131   int mcu_rows = parse_result.frame_header.coded_height / (max_v_factor * 8);
132   DCHECK_GT(mcu_rows, 0);
133   slice_param->num_mcus = mcu_rows * mcu_cols;
134 }
135 
136 // VAAPI only supports a subset of JPEG profiles. This function determines
137 // whether a given parsed JPEG result is supported or not.
IsVaapiSupportedJpeg(const JpegParseResult & jpeg)138 static bool IsVaapiSupportedJpeg(const JpegParseResult& jpeg) {
139   // Make sure the JPEG's chroma subsampling format is supported.
140   if (!VaapiWrapper::IsDecodingSupportedForInternalFormat(
141           VAProfileJPEGBaseline, VaSurfaceFormatForJpeg(jpeg.frame_header))) {
142     DLOG(ERROR) << "The JPEG's subsampling format is unsupported";
143     return false;
144   }
145 
146   // Validate the visible size.
147   if (jpeg.frame_header.visible_width == 0u) {
148     DLOG(ERROR) << "Visible width can't be zero";
149     return false;
150   }
151   if (jpeg.frame_header.visible_height == 0u) {
152     DLOG(ERROR) << "Visible height can't be zero";
153     return false;
154   }
155 
156   // Validate the coded size.
157   gfx::Size min_jpeg_resolution;
158   if (!VaapiWrapper::GetDecodeMinResolution(VAProfileJPEGBaseline,
159                                             &min_jpeg_resolution)) {
160     DLOG(ERROR) << "Could not get the minimum resolution";
161     return false;
162   }
163   gfx::Size max_jpeg_resolution;
164   if (!VaapiWrapper::GetDecodeMaxResolution(VAProfileJPEGBaseline,
165                                             &max_jpeg_resolution)) {
166     DLOG(ERROR) << "Could not get the maximum resolution";
167     return false;
168   }
169   const int actual_jpeg_coded_width =
170       base::strict_cast<int>(jpeg.frame_header.coded_width);
171   const int actual_jpeg_coded_height =
172       base::strict_cast<int>(jpeg.frame_header.coded_height);
173   if (actual_jpeg_coded_width < min_jpeg_resolution.width() ||
174       actual_jpeg_coded_height < min_jpeg_resolution.height() ||
175       actual_jpeg_coded_width > max_jpeg_resolution.width() ||
176       actual_jpeg_coded_height > max_jpeg_resolution.height()) {
177     DLOG(ERROR) << "VAAPI doesn't support size " << actual_jpeg_coded_width
178                 << "x" << actual_jpeg_coded_height << ": not in range "
179                 << min_jpeg_resolution.ToString() << " - "
180                 << max_jpeg_resolution.ToString();
181     return false;
182   }
183 
184   return true;
185 }
186 
187 }  // namespace
188 
VaSurfaceFormatForJpeg(const JpegFrameHeader & frame_header)189 unsigned int VaSurfaceFormatForJpeg(const JpegFrameHeader& frame_header) {
190   if (frame_header.num_components != 3)
191     return kInvalidVaRtFormat;
192 
193   const uint8_t y_h = frame_header.components[0].horizontal_sampling_factor;
194   const uint8_t y_v = frame_header.components[0].vertical_sampling_factor;
195   const uint8_t u_h = frame_header.components[1].horizontal_sampling_factor;
196   const uint8_t u_v = frame_header.components[1].vertical_sampling_factor;
197   const uint8_t v_h = frame_header.components[2].horizontal_sampling_factor;
198   const uint8_t v_v = frame_header.components[2].vertical_sampling_factor;
199 
200   if (u_h != 1 || u_v != 1 || v_h != 1 || v_v != 1)
201     return kInvalidVaRtFormat;
202 
203   if (y_h == 2 && y_v == 2)
204     return VA_RT_FORMAT_YUV420;
205   else if (y_h == 2 && y_v == 1)
206     return VA_RT_FORMAT_YUV422;
207   else if (y_h == 1 && y_v == 1)
208     return VA_RT_FORMAT_YUV444;
209   return kInvalidVaRtFormat;
210 }
211 
VaapiJpegDecoder()212 VaapiJpegDecoder::VaapiJpegDecoder()
213     : VaapiImageDecoder(VAProfileJPEGBaseline) {}
214 
215 VaapiJpegDecoder::~VaapiJpegDecoder() = default;
216 
AllocateVASurfaceAndSubmitVABuffers(base::span<const uint8_t> encoded_image)217 VaapiImageDecodeStatus VaapiJpegDecoder::AllocateVASurfaceAndSubmitVABuffers(
218     base::span<const uint8_t> encoded_image) {
219   DCHECK(vaapi_wrapper_);
220 
221   // Parse the JPEG encoded data.
222   JpegParseResult parse_result;
223   if (!ParseJpegPicture(encoded_image.data(), encoded_image.size(),
224                         &parse_result)) {
225     VLOGF(1) << "ParseJpegPicture failed";
226     return VaapiImageDecodeStatus::kParseFailed;
227   }
228 
229   // Figure out the right format for the VaSurface.
230   const unsigned int picture_va_rt_format =
231       VaSurfaceFormatForJpeg(parse_result.frame_header);
232   if (picture_va_rt_format == kInvalidVaRtFormat) {
233     VLOGF(1) << "Unsupported subsampling";
234     return VaapiImageDecodeStatus::kUnsupportedSubsampling;
235   }
236 
237   // Make sure this JPEG can be decoded.
238   if (!IsVaapiSupportedJpeg(parse_result)) {
239     VLOGF(1) << "The supplied JPEG is unsupported";
240     return VaapiImageDecodeStatus::kUnsupportedImage;
241   }
242 
243   // Prepare the surface for decoding.
244   const gfx::Size new_visible_size(
245       base::strict_cast<int>(parse_result.frame_header.visible_width),
246       base::strict_cast<int>(parse_result.frame_header.visible_height));
247   const gfx::Size new_coded_size(
248       base::strict_cast<int>(parse_result.frame_header.coded_width),
249       base::strict_cast<int>(parse_result.frame_header.coded_height));
250   if (!MaybeCreateSurface(picture_va_rt_format, new_coded_size,
251                           new_visible_size)) {
252     return VaapiImageDecodeStatus::kSurfaceCreationFailed;
253   }
254 
255   // Submit input buffers.
256   if (!SubmitBuffers(parse_result))
257     return VaapiImageDecodeStatus::kSubmitVABuffersFailed;
258 
259   return VaapiImageDecodeStatus::kSuccess;
260 }
261 
GetType() const262 gpu::ImageDecodeAcceleratorType VaapiJpegDecoder::GetType() const {
263   return gpu::ImageDecodeAcceleratorType::kJpeg;
264 }
265 
GetYUVColorSpace() const266 SkYUVColorSpace VaapiJpegDecoder::GetYUVColorSpace() const {
267   return SkYUVColorSpace::kJPEG_SkYUVColorSpace;
268 }
269 
GetImage(uint32_t preferred_image_fourcc,VaapiImageDecodeStatus * status)270 std::unique_ptr<ScopedVAImage> VaapiJpegDecoder::GetImage(
271     uint32_t preferred_image_fourcc,
272     VaapiImageDecodeStatus* status) {
273   if (!scoped_va_context_and_surface_) {
274     VLOGF(1) << "No decoded JPEG available";
275     *status = VaapiImageDecodeStatus::kInvalidState;
276     return nullptr;
277   }
278 
279   DCHECK(scoped_va_context_and_surface_->IsValid());
280   DCHECK(vaapi_wrapper_);
281   uint32_t image_fourcc;
282   if (!VaapiWrapper::GetJpegDecodeSuitableImageFourCC(
283           scoped_va_context_and_surface_->format(), preferred_image_fourcc,
284           &image_fourcc)) {
285     VLOGF(1) << "Cannot determine the output FOURCC";
286     *status = VaapiImageDecodeStatus::kCannotGetImage;
287     return nullptr;
288   }
289   VAImageFormat image_format{.fourcc = image_fourcc};
290   // In at least one driver, the VPP seems to have problems if we request a
291   // VAImage with odd dimensions. Rather than debugging the issue in depth, we
292   // disable support for odd dimensions since the VAImage path is only expected
293   // to be used in camera captures (and we don't expect JPEGs with odd
294   // dimensions in that path).
295   if ((scoped_va_context_and_surface_->size().width() & 1) ||
296       (scoped_va_context_and_surface_->size().height() & 1)) {
297     VLOGF(1) << "Getting images with odd dimensions is not supported";
298     *status = VaapiImageDecodeStatus::kCannotGetImage;
299     NOTREACHED();
300     return nullptr;
301   }
302   auto scoped_image = vaapi_wrapper_->CreateVaImage(
303       scoped_va_context_and_surface_->id(), &image_format,
304       scoped_va_context_and_surface_->size());
305   if (!scoped_image) {
306     VLOGF(1) << "Cannot get VAImage, FOURCC = "
307              << FourccToString(image_format.fourcc);
308     *status = VaapiImageDecodeStatus::kCannotGetImage;
309     return nullptr;
310   }
311 
312   *status = VaapiImageDecodeStatus::kSuccess;
313   return scoped_image;
314 }
315 
MaybeCreateSurface(unsigned int picture_va_rt_format,const gfx::Size & new_coded_size,const gfx::Size & new_visible_size)316 bool VaapiJpegDecoder::MaybeCreateSurface(unsigned int picture_va_rt_format,
317                                           const gfx::Size& new_coded_size,
318                                           const gfx::Size& new_visible_size) {
319   DCHECK(!scoped_va_context_and_surface_ ||
320          scoped_va_context_and_surface_->IsValid());
321   if (scoped_va_context_and_surface_ &&
322       new_visible_size == scoped_va_context_and_surface_->size() &&
323       picture_va_rt_format == scoped_va_context_and_surface_->format()) {
324     // No need to allocate a new surface. We can re-use the current one.
325     return true;
326   }
327 
328   scoped_va_context_and_surface_.reset();
329 
330   // We'll request a surface of |new_coded_size| from the VAAPI, but we will
331   // keep track of the |new_visible_size| inside the ScopedVASurface so that
332   // when we create a VAImage or export the surface as a NativePixmapDmaBuf, we
333   // can report the size that clients should be using to read the contents.
334   scoped_va_context_and_surface_.reset(
335       vaapi_wrapper_
336           ->CreateContextAndScopedVASurface(picture_va_rt_format,
337                                             new_coded_size, new_visible_size)
338           .release());
339   if (!scoped_va_context_and_surface_) {
340     VLOGF(1) << "CreateContextAndScopedVASurface() failed";
341     return false;
342   }
343 
344   DCHECK(scoped_va_context_and_surface_->IsValid());
345   return true;
346 }
347 
SubmitBuffers(const JpegParseResult & parse_result)348 bool VaapiJpegDecoder::SubmitBuffers(const JpegParseResult& parse_result) {
349   // Set picture parameters.
350   VAPictureParameterBufferJPEGBaseline pic_param{};
351   FillPictureParameters(parse_result.frame_header, &pic_param);
352 
353   // Set quantization table.
354   VAIQMatrixBufferJPEGBaseline iq_matrix{};
355   FillIQMatrix(parse_result.q_table, &iq_matrix);
356 
357   // Set huffman table.
358   VAHuffmanTableBufferJPEGBaseline huffman_table{};
359   FillHuffmanTable(parse_result.dc_table, parse_result.ac_table,
360                    &huffman_table);
361 
362   // Set slice parameters.
363   VASliceParameterBufferJPEGBaseline slice_param{};
364   FillSliceParameters(parse_result, &slice_param);
365 
366   return vaapi_wrapper_->SubmitBuffers(
367       {{VAPictureParameterBufferType, sizeof(pic_param), &pic_param},
368        {VAIQMatrixBufferType, sizeof(iq_matrix), &iq_matrix},
369        {VAHuffmanTableBufferType, sizeof(huffman_table), &huffman_table},
370        {VASliceParameterBufferType, sizeof(slice_param), &slice_param},
371        {VASliceDataBufferType, parse_result.data_size,
372         const_cast<char*>(parse_result.data)}});
373 }
374 
375 }  // namespace media
376