1 /*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #include "tools/DDLPromiseImageHelper.h"
9
10 #include "include/core/SkDeferredDisplayListRecorder.h"
11 #include "include/core/SkPicture.h"
12 #include "include/core/SkSerialProcs.h"
13 #include "include/core/SkYUVAIndex.h"
14 #include "include/core/SkYUVASizeInfo.h"
15 #include "include/gpu/GrContext.h"
16 #include "src/core/SkCachedData.h"
17 #include "src/core/SkTaskGroup.h"
18 #include "src/gpu/GrContextPriv.h"
19 #include "src/image/SkImage_Base.h"
20 #include "src/image/SkImage_GpuYUVA.h"
21
~PromiseImageCallbackContext()22 DDLPromiseImageHelper::PromiseImageCallbackContext::~PromiseImageCallbackContext() {
23 SkASSERT(fDoneCnt == fNumImages);
24 SkASSERT(!fUnreleasedFulfills);
25 SkASSERT(fTotalReleases == fTotalFulfills);
26 SkASSERT(!fTotalFulfills || fDoneCnt);
27
28 if (fPromiseImageTexture) {
29 fContext->deleteBackendTexture(fPromiseImageTexture->backendTexture());
30 }
31 }
32
setBackendTexture(const GrBackendTexture & backendTexture)33 void DDLPromiseImageHelper::PromiseImageCallbackContext::setBackendTexture(
34 const GrBackendTexture& backendTexture) {
35 SkASSERT(!fPromiseImageTexture);
36 SkASSERT(fBackendFormat == backendTexture.getBackendFormat());
37 fPromiseImageTexture = SkPromiseImageTexture::Make(backendTexture);
38 }
39
40 ///////////////////////////////////////////////////////////////////////////////////////////////////
41
deflateSKP(const SkPicture * inputPicture)42 sk_sp<SkData> DDLPromiseImageHelper::deflateSKP(const SkPicture* inputPicture) {
43 SkSerialProcs procs;
44
45 procs.fImageCtx = this;
46 procs.fImageProc = [](SkImage* image, void* ctx) -> sk_sp<SkData> {
47 auto helper = static_cast<DDLPromiseImageHelper*>(ctx);
48
49 int id = helper->findOrDefineImage(image);
50
51 // Even if 'id' is invalid (i.e., -1) write it to the SKP
52 return SkData::MakeWithCopy(&id, sizeof(id));
53 };
54
55 return inputPicture->serialize(&procs);
56 }
57
create_yuva_texture(GrContext * context,const SkPixmap & pm,const SkYUVAIndex yuvaIndices[4],int texIndex)58 static GrBackendTexture create_yuva_texture(GrContext* context, const SkPixmap& pm,
59 const SkYUVAIndex yuvaIndices[4], int texIndex) {
60 SkASSERT(texIndex >= 0 && texIndex <= 3);
61
62 #ifdef SK_DEBUG
63 int channelCount = 0;
64 for (int i = 0; i < SkYUVAIndex::kIndexCount; ++i) {
65 if (yuvaIndices[i].fIndex == texIndex) {
66 ++channelCount;
67 }
68 }
69 if (2 == channelCount) {
70 SkASSERT(kR8G8_unorm_SkColorType == pm.colorType());
71 }
72 #endif
73
74 return context->createBackendTexture(&pm, 1, GrRenderable::kNo, GrProtected::kNo);
75 }
76
77 /*
78 * Create backend textures and upload data to them for all the textures required to satisfy
79 * a single promise image.
80 * For YUV textures this will result in up to 4 actual textures.
81 */
CreateBETexturesForPromiseImage(GrContext * context,PromiseImageInfo * info)82 void DDLPromiseImageHelper::CreateBETexturesForPromiseImage(GrContext* context,
83 PromiseImageInfo* info) {
84 SkASSERT(context->priv().asDirectContext());
85
86 // DDL TODO: how can we tell if we need mipmapping!
87 if (info->isYUV()) {
88 int numPixmaps;
89 SkAssertResult(SkYUVAIndex::AreValidIndices(info->yuvaIndices(), &numPixmaps));
90 for (int j = 0; j < numPixmaps; ++j) {
91 const SkPixmap& yuvPixmap = info->yuvPixmap(j);
92
93 PromiseImageCallbackContext* callbackContext = info->callbackContext(j);
94 SkASSERT(callbackContext);
95
96 callbackContext->setBackendTexture(create_yuva_texture(context, yuvPixmap,
97 info->yuvaIndices(), j));
98 SkASSERT(callbackContext->promiseImageTexture());
99 }
100 } else {
101 PromiseImageCallbackContext* callbackContext = info->callbackContext(0);
102 if (!callbackContext) {
103 // This texture would've been too large to fit on the GPU
104 return;
105 }
106
107 const SkBitmap& bm = info->normalBitmap();
108
109 GrBackendTexture backendTex = context->createBackendTexture(
110 &bm.pixmap(), 1, GrRenderable::kNo,
111 GrProtected::kNo);
112 SkASSERT(backendTex.isValid());
113
114 callbackContext->setBackendTexture(backendTex);
115 }
116 }
117
DeleteBETexturesForPromiseImage(GrContext * context,PromiseImageInfo * info)118 void DDLPromiseImageHelper::DeleteBETexturesForPromiseImage(GrContext* context,
119 PromiseImageInfo* info) {
120 SkASSERT(context->priv().asDirectContext());
121
122 if (info->isYUV()) {
123 int numPixmaps;
124 SkAssertResult(SkYUVAIndex::AreValidIndices(info->yuvaIndices(), &numPixmaps));
125 for (int j = 0; j < numPixmaps; ++j) {
126 PromiseImageCallbackContext* callbackContext = info->callbackContext(j);
127 SkASSERT(callbackContext);
128
129 callbackContext->destroyBackendTexture();
130 SkASSERT(!callbackContext->promiseImageTexture());
131 }
132 } else {
133 PromiseImageCallbackContext* callbackContext = info->callbackContext(0);
134 if (!callbackContext) {
135 // This texture would've been too large to fit on the GPU
136 return;
137 }
138
139 callbackContext->destroyBackendTexture();
140 SkASSERT(!callbackContext->promiseImageTexture());
141 }
142 }
143
createCallbackContexts(GrContext * context)144 void DDLPromiseImageHelper::createCallbackContexts(GrContext* context) {
145 const GrCaps* caps = context->priv().caps();
146 const int maxDimension = caps->maxTextureSize();
147
148 for (int i = 0; i < fImageInfo.count(); ++i) {
149 PromiseImageInfo& info = fImageInfo[i];
150
151 if (info.isYUV()) {
152 int numPixmaps;
153 SkAssertResult(SkYUVAIndex::AreValidIndices(info.yuvaIndices(), &numPixmaps));
154
155 for (int j = 0; j < numPixmaps; ++j) {
156 const SkPixmap& yuvPixmap = info.yuvPixmap(j);
157
158 GrBackendFormat backendFormat = context->defaultBackendFormat(yuvPixmap.colorType(),
159 GrRenderable::kNo);
160
161 sk_sp<PromiseImageCallbackContext> callbackContext(
162 new PromiseImageCallbackContext(context, backendFormat));
163
164 info.setCallbackContext(j, std::move(callbackContext));
165 }
166 } else {
167 const SkBitmap& bm = info.normalBitmap();
168
169 // TODO: explicitly mark the PromiseImageInfo as too big and check in uploadAllToGPU
170 if (maxDimension < std::max(bm.width(), bm.height())) {
171 // This won't fit on the GPU. Fallback to a raster-backed image per tile.
172 continue;
173 }
174
175 GrBackendFormat backendFormat = context->defaultBackendFormat(bm.pixmap().colorType(),
176 GrRenderable::kNo);
177 if (!caps->isFormatTexturable(backendFormat)) {
178 continue;
179 }
180
181
182 sk_sp<PromiseImageCallbackContext> callbackContext(
183 new PromiseImageCallbackContext(context, backendFormat));
184
185 info.setCallbackContext(0, std::move(callbackContext));
186 }
187 }
188 }
189
uploadAllToGPU(SkTaskGroup * taskGroup,GrContext * context)190 void DDLPromiseImageHelper::uploadAllToGPU(SkTaskGroup* taskGroup, GrContext* context) {
191 SkASSERT(context->priv().asDirectContext());
192
193 if (taskGroup) {
194 for (int i = 0; i < fImageInfo.count(); ++i) {
195 PromiseImageInfo* info = &fImageInfo[i];
196
197 taskGroup->add([context, info]() { CreateBETexturesForPromiseImage(context, info); });
198 }
199 } else {
200 for (int i = 0; i < fImageInfo.count(); ++i) {
201 CreateBETexturesForPromiseImage(context, &fImageInfo[i]);
202 }
203 }
204 }
205
deleteAllFromGPU(SkTaskGroup * taskGroup,GrContext * context)206 void DDLPromiseImageHelper::deleteAllFromGPU(SkTaskGroup* taskGroup, GrContext* context) {
207 SkASSERT(context->priv().asDirectContext());
208
209 if (taskGroup) {
210 for (int i = 0; i < fImageInfo.count(); ++i) {
211 PromiseImageInfo* info = &fImageInfo[i];
212
213 taskGroup->add([context, info]() { DeleteBETexturesForPromiseImage(context, info); });
214 }
215 } else {
216 for (int i = 0; i < fImageInfo.count(); ++i) {
217 DeleteBETexturesForPromiseImage(context, &fImageInfo[i]);
218 }
219 }
220 }
221
reinflateSKP(SkDeferredDisplayListRecorder * recorder,SkData * compressedPictureData,SkTArray<sk_sp<SkImage>> * promiseImages) const222 sk_sp<SkPicture> DDLPromiseImageHelper::reinflateSKP(
223 SkDeferredDisplayListRecorder* recorder,
224 SkData* compressedPictureData,
225 SkTArray<sk_sp<SkImage>>* promiseImages) const {
226 PerRecorderContext perRecorderContext { recorder, this, promiseImages };
227
228 SkDeserialProcs procs;
229 procs.fImageCtx = (void*) &perRecorderContext;
230 procs.fImageProc = CreatePromiseImages;
231
232 return SkPicture::MakeFromData(compressedPictureData, &procs);
233 }
234
235 // This generates promise images to replace the indices in the compressed picture. This
236 // reconstitution is performed separately in each thread so we end up with multiple
237 // promise images referring to the same GrBackendTexture.
CreatePromiseImages(const void * rawData,size_t length,void * ctxIn)238 sk_sp<SkImage> DDLPromiseImageHelper::CreatePromiseImages(const void* rawData,
239 size_t length, void* ctxIn) {
240 PerRecorderContext* perRecorderContext = static_cast<PerRecorderContext*>(ctxIn);
241 const DDLPromiseImageHelper* helper = perRecorderContext->fHelper;
242 SkDeferredDisplayListRecorder* recorder = perRecorderContext->fRecorder;
243
244 SkASSERT(length == sizeof(int));
245
246 const int* indexPtr = static_cast<const int*>(rawData);
247 if (!helper->isValidID(*indexPtr)) {
248 return nullptr;
249 }
250
251 const DDLPromiseImageHelper::PromiseImageInfo& curImage = helper->getInfo(*indexPtr);
252
253 // If there is no callback context that means 'createCallbackContexts' determined the
254 // texture wouldn't fit on the GPU. Create a separate bitmap-backed image for each thread.
255 if (!curImage.isYUV() && !curImage.callbackContext(0)) {
256 SkASSERT(curImage.normalBitmap().isImmutable());
257 return SkImage::MakeFromBitmap(curImage.normalBitmap());
258 }
259
260 SkASSERT(curImage.index() == *indexPtr);
261
262 sk_sp<SkImage> image;
263 if (curImage.isYUV()) {
264 GrBackendFormat backendFormats[SkYUVASizeInfo::kMaxCount];
265 void* contexts[SkYUVASizeInfo::kMaxCount] = { nullptr, nullptr, nullptr, nullptr };
266 SkISize sizes[SkYUVASizeInfo::kMaxCount];
267 // TODO: store this value somewhere?
268 int textureCount;
269 SkAssertResult(SkYUVAIndex::AreValidIndices(curImage.yuvaIndices(), &textureCount));
270 for (int i = 0; i < textureCount; ++i) {
271 backendFormats[i] = curImage.backendFormat(i);
272 SkASSERT(backendFormats[i].isValid());
273 contexts[i] = curImage.refCallbackContext(i).release();
274 sizes[i].set(curImage.yuvPixmap(i).width(), curImage.yuvPixmap(i).height());
275 }
276 for (int i = textureCount; i < SkYUVASizeInfo::kMaxCount; ++i) {
277 sizes[i] = SkISize::MakeEmpty();
278 }
279
280 image = recorder->makeYUVAPromiseTexture(
281 curImage.yuvColorSpace(),
282 backendFormats,
283 sizes,
284 curImage.yuvaIndices(),
285 curImage.overallWidth(),
286 curImage.overallHeight(),
287 GrSurfaceOrigin::kTopLeft_GrSurfaceOrigin,
288 curImage.refOverallColorSpace(),
289 DDLPromiseImageHelper::PromiseImageFulfillProc,
290 DDLPromiseImageHelper::PromiseImageReleaseProc,
291 DDLPromiseImageHelper::PromiseImageDoneProc,
292 contexts,
293 SkDeferredDisplayListRecorder::PromiseImageApiVersion::kNew);
294 for (int i = 0; i < textureCount; ++i) {
295 curImage.callbackContext(i)->wasAddedToImage();
296 }
297
298 #ifdef SK_DEBUG
299 {
300 // By the peekProxy contract this image should not have a single backing proxy so
301 // should return null. The call should also not trigger the conversion to RGBA.
302 SkImage_GpuYUVA* yuva = reinterpret_cast<SkImage_GpuYUVA*>(image.get());
303 SkASSERT(!yuva->peekProxy());
304 SkASSERT(!yuva->peekProxy()); // the first call didn't force a conversion to RGBA
305 }
306 #endif
307 } else {
308 GrBackendFormat backendFormat = curImage.backendFormat(0);
309 SkASSERT(backendFormat.isValid());
310
311 // Each DDL recorder gets its own ref on the promise callback context for the
312 // promise images it creates.
313 // DDL TODO: sort out mipmapping
314 image = recorder->makePromiseTexture(
315 backendFormat,
316 curImage.overallWidth(),
317 curImage.overallHeight(),
318 GrMipMapped::kNo,
319 GrSurfaceOrigin::kTopLeft_GrSurfaceOrigin,
320 curImage.overallColorType(),
321 curImage.overallAlphaType(),
322 curImage.refOverallColorSpace(),
323 DDLPromiseImageHelper::PromiseImageFulfillProc,
324 DDLPromiseImageHelper::PromiseImageReleaseProc,
325 DDLPromiseImageHelper::PromiseImageDoneProc,
326 (void*)curImage.refCallbackContext(0).release(),
327 SkDeferredDisplayListRecorder::PromiseImageApiVersion::kNew);
328 curImage.callbackContext(0)->wasAddedToImage();
329 }
330 perRecorderContext->fPromiseImages->push_back(image);
331 SkASSERT(image);
332 return image;
333 }
334
findImage(SkImage * image) const335 int DDLPromiseImageHelper::findImage(SkImage* image) const {
336 for (int i = 0; i < fImageInfo.count(); ++i) {
337 if (fImageInfo[i].originalUniqueID() == image->uniqueID()) { // trying to dedup here
338 SkASSERT(fImageInfo[i].index() == i);
339 SkASSERT(this->isValidID(i) && this->isValidID(fImageInfo[i].index()));
340 return i;
341 }
342 }
343 return -1;
344 }
345
addImage(SkImage * image)346 int DDLPromiseImageHelper::addImage(SkImage* image) {
347 SkImage_Base* ib = as_IB(image);
348
349 SkImageInfo overallII = SkImageInfo::Make(image->width(), image->height(),
350 image->colorType() == kBGRA_8888_SkColorType
351 ? kRGBA_8888_SkColorType
352 : image->colorType(),
353 image->alphaType(),
354 image->refColorSpace());
355
356 PromiseImageInfo& newImageInfo = fImageInfo.emplace_back(fImageInfo.count(),
357 image->uniqueID(),
358 overallII);
359
360 SkYUVASizeInfo yuvaSizeInfo;
361 SkYUVAIndex yuvaIndices[SkYUVAIndex::kIndexCount];
362 SkYUVColorSpace yuvColorSpace;
363 const void* planes[SkYUVASizeInfo::kMaxCount];
364 sk_sp<SkCachedData> yuvData = ib->getPlanes(&yuvaSizeInfo, yuvaIndices, &yuvColorSpace, planes);
365 if (yuvData) {
366 newImageInfo.setYUVData(std::move(yuvData), yuvaIndices, yuvColorSpace);
367
368 // determine colortypes from index data
369 // for testing we only ever use A8, RG_88
370 SkColorType colorTypes[SkYUVASizeInfo::kMaxCount] = {
371 kUnknown_SkColorType, kUnknown_SkColorType,
372 kUnknown_SkColorType, kUnknown_SkColorType
373 };
374 for (int yuvIndex = 0; yuvIndex < SkYUVAIndex::kIndexCount; ++yuvIndex) {
375 int texIdx = yuvaIndices[yuvIndex].fIndex;
376 if (texIdx < 0) {
377 SkASSERT(SkYUVAIndex::kA_Index == yuvIndex);
378 continue;
379 }
380 if (kUnknown_SkColorType == colorTypes[texIdx]) {
381 colorTypes[texIdx] = kAlpha_8_SkColorType;
382 } else {
383 colorTypes[texIdx] = kR8G8_unorm_SkColorType;
384 }
385 }
386
387 for (int i = 0; i < SkYUVASizeInfo::kMaxCount; ++i) {
388 if (yuvaSizeInfo.fSizes[i].isEmpty()) {
389 SkASSERT(!yuvaSizeInfo.fWidthBytes[i] && kUnknown_SkColorType == colorTypes[i]);
390 continue;
391 }
392
393 SkImageInfo planeII = SkImageInfo::Make(yuvaSizeInfo.fSizes[i].fWidth,
394 yuvaSizeInfo.fSizes[i].fHeight,
395 colorTypes[i],
396 kUnpremul_SkAlphaType);
397 newImageInfo.addYUVPlane(i, planeII, planes[i], yuvaSizeInfo.fWidthBytes[i]);
398 }
399 } else {
400 sk_sp<SkImage> rasterImage = image->makeRasterImage(); // force decoding of lazy images
401 if (!rasterImage) {
402 return -1;
403 }
404
405 SkBitmap tmp;
406 tmp.allocPixels(overallII);
407
408 if (!rasterImage->readPixels(tmp.pixmap(), 0, 0)) {
409 return -1;
410 }
411
412 tmp.setImmutable();
413 newImageInfo.setNormalBitmap(tmp);
414 }
415 // In either case newImageInfo's PromiseImageCallbackContext is filled in by uploadAllToGPU
416
417 return fImageInfo.count()-1;
418 }
419
findOrDefineImage(SkImage * image)420 int DDLPromiseImageHelper::findOrDefineImage(SkImage* image) {
421 int preExistingID = this->findImage(image);
422 if (preExistingID >= 0) {
423 SkASSERT(this->isValidID(preExistingID));
424 return preExistingID;
425 }
426
427 int newID = this->addImage(image);
428 return newID;
429 }
430