1 /*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #include "include/core/SkAlphaType.h"
9 #include "include/core/SkCanvas.h"
10 #include "include/core/SkColor.h"
11 #include "include/core/SkColorFilter.h"
12 #include "include/core/SkColorSpace.h"
13 #include "include/core/SkColorType.h"
14 #include "include/core/SkImage.h"
15 #include "include/core/SkImageInfo.h"
16 #include "include/core/SkPaint.h"
17 #include "include/core/SkRect.h"
18 #include "include/core/SkRefCnt.h"
19 #include "include/core/SkSamplingOptions.h"
20 #include "include/core/SkShader.h"
21 #include "include/core/SkSurface.h"
22 #include "include/core/SkTypes.h"
23 #include "include/gpu/GpuTypes.h"
24 #include "include/gpu/ganesh/GrBackendSurface.h"
25 #include "include/gpu/ganesh/GrDirectContext.h"
26 #include "include/gpu/ganesh/GrTypes.h"
27 #include "include/gpu/ganesh/SkSurfaceGanesh.h"
28 #include "include/private/base/SkTArray.h"
29 #include "include/private/chromium/GrDeferredDisplayListRecorder.h"
30 #include "include/private/chromium/GrPromiseImageTexture.h"
31 #include "include/private/chromium/SkImageChromium.h"
32 #include "include/private/gpu/ganesh/GrTypesPriv.h"
33 #include "src/gpu/ganesh/GrCaps.h"
34 #include "src/gpu/ganesh/GrDirectContextPriv.h"
35 #include "src/gpu/ganesh/GrResourceProvider.h"
36 #include "src/gpu/ganesh/GrTexture.h"
37 #include "tests/CtsEnforcement.h"
38 #include "tests/Test.h"
39 #include "tools/gpu/ContextType.h"
40 #include "tools/gpu/FenceSync.h"
41 #include "tools/gpu/ManagedBackendTexture.h"
42
43 #include <cstddef>
44 #include <functional>
45 #include <utility>
46
47 using namespace skia_private;
48
49 struct GrContextOptions;
50
51 using namespace sk_gpu_test;
52
53 struct PromiseTextureChecker {
54 // shared indicates whether the backend texture is used to fulfill more than one promise
55 // image.
PromiseTextureCheckerPromiseTextureChecker56 explicit PromiseTextureChecker(const GrBackendTexture& tex,
57 skiatest::Reporter* reporter,
58 bool shared)
59 : fTexture(GrPromiseImageTexture::Make(tex)), fReporter(reporter), fShared(shared) {}
60 sk_sp<GrPromiseImageTexture> fTexture;
61 skiatest::Reporter* fReporter;
62 bool fShared;
63 int fFulfillCount = 0;
64 int fReleaseCount = 0;
65
FulfillPromiseTextureChecker66 static sk_sp<GrPromiseImageTexture> Fulfill(void* self) {
67 auto checker = static_cast<PromiseTextureChecker*>(self);
68 checker->fFulfillCount++;
69 return checker->fTexture;
70 }
ReleasePromiseTextureChecker71 static void Release(void* self) { static_cast<PromiseTextureChecker*>(self)->fReleaseCount++; }
72 };
73
74 enum class ReleaseBalanceExpectation {
75 kBalanced,
76 kAllUnbalanced,
77 kUnknown,
78 kUnbalancedByOne,
79 kBalancedOrOffByOne,
80 };
81
check_fulfill_and_release_cnts(skiatest::Reporter * reporter,const PromiseTextureChecker & promiseChecker,int expectedFulfillCnt,ReleaseBalanceExpectation releaseBalanceExpecation)82 static void check_fulfill_and_release_cnts(skiatest::Reporter* reporter,
83 const PromiseTextureChecker& promiseChecker,
84 int expectedFulfillCnt,
85 ReleaseBalanceExpectation releaseBalanceExpecation) {
86 REPORTER_ASSERT(reporter, promiseChecker.fFulfillCount == expectedFulfillCnt);
87 if (!expectedFulfillCnt) {
88 // Release and Done should only ever be called after Fulfill.
89 REPORTER_ASSERT(reporter, !promiseChecker.fReleaseCount);
90 return;
91 }
92 int releaseDiff = promiseChecker.fFulfillCount - promiseChecker.fReleaseCount;
93 switch (releaseBalanceExpecation) {
94 case ReleaseBalanceExpectation::kBalanced:
95 REPORTER_ASSERT(reporter, !releaseDiff);
96 break;
97 case ReleaseBalanceExpectation::kAllUnbalanced:
98 REPORTER_ASSERT(reporter, releaseDiff == promiseChecker.fFulfillCount);
99 break;
100 case ReleaseBalanceExpectation::kUnknown:
101 REPORTER_ASSERT(reporter,
102 releaseDiff >= 0 && releaseDiff <= promiseChecker.fFulfillCount);
103 break;
104 case ReleaseBalanceExpectation::kUnbalancedByOne:
105 REPORTER_ASSERT(reporter, releaseDiff == 1);
106 break;
107 case ReleaseBalanceExpectation::kBalancedOrOffByOne:
108 REPORTER_ASSERT(reporter, releaseDiff == 0 || releaseDiff == 1);
109 break;
110 }
111 }
112
check_unfulfilled(const PromiseTextureChecker & promiseChecker,skiatest::Reporter * reporter)113 static void check_unfulfilled(const PromiseTextureChecker& promiseChecker,
114 skiatest::Reporter* reporter) {
115 check_fulfill_and_release_cnts(reporter, promiseChecker, 0,
116 ReleaseBalanceExpectation::kBalanced);
117 }
118
check_only_fulfilled(skiatest::Reporter * reporter,const PromiseTextureChecker & promiseChecker,int expectedFulfillCnt=1)119 static void check_only_fulfilled(skiatest::Reporter* reporter,
120 const PromiseTextureChecker& promiseChecker,
121 int expectedFulfillCnt = 1) {
122 check_fulfill_and_release_cnts(reporter, promiseChecker, expectedFulfillCnt,
123 ReleaseBalanceExpectation::kAllUnbalanced);
124 }
125
check_all_flushed_but_not_synced(skiatest::Reporter * reporter,const PromiseTextureChecker & promiseChecker,GrBackendApi api,int expectedFulfillCnt=1)126 static void check_all_flushed_but_not_synced(skiatest::Reporter* reporter,
127 const PromiseTextureChecker& promiseChecker,
128 GrBackendApi api,
129 int expectedFulfillCnt = 1) {
130 ReleaseBalanceExpectation releaseBalanceExpectation = ReleaseBalanceExpectation::kBalanced;
131 // On Vulkan and D3D Done isn't guaranteed to be called until a sync has occurred.
132 if (api == GrBackendApi::kVulkan || api == GrBackendApi::kDirect3D) {
133 releaseBalanceExpectation = expectedFulfillCnt == 1
134 ? ReleaseBalanceExpectation::kBalancedOrOffByOne
135 : ReleaseBalanceExpectation::kUnknown;
136 }
137 check_fulfill_and_release_cnts(reporter, promiseChecker, expectedFulfillCnt,
138 releaseBalanceExpectation);
139 }
140
check_all_done(skiatest::Reporter * reporter,const PromiseTextureChecker & promiseChecker,int expectedFulfillCnt=1)141 static void check_all_done(skiatest::Reporter* reporter,
142 const PromiseTextureChecker& promiseChecker,
143 int expectedFulfillCnt = 1) {
144 check_fulfill_and_release_cnts(reporter, promiseChecker, expectedFulfillCnt,
145 ReleaseBalanceExpectation::kBalanced);
146 }
147
DEF_GANESH_TEST_FOR_RENDERING_CONTEXTS(PromiseImageTest,reporter,ctxInfo,CtsEnforcement::kNever)148 DEF_GANESH_TEST_FOR_RENDERING_CONTEXTS(PromiseImageTest,
149 reporter,
150 ctxInfo,
151 CtsEnforcement::kNever) {
152 using namespace skgpu;
153 const int kWidth = 10;
154 const int kHeight = 10;
155
156 auto ctx = ctxInfo.directContext();
157
158 Protected isProtected = Protected(ctx->priv().caps()->supportsProtectedContent());
159
160 GrBackendTexture backendTex = ctx->createBackendTexture(kWidth,
161 kHeight,
162 kRGBA_8888_SkColorType,
163 SkColors::kTransparent,
164 skgpu::Mipmapped::kNo,
165 GrRenderable::kYes,
166 isProtected);
167 REPORTER_ASSERT(reporter, backendTex.isValid());
168
169 GrBackendFormat backendFormat = backendTex.getBackendFormat();
170 REPORTER_ASSERT(reporter, backendFormat.isValid());
171
172 PromiseTextureChecker promiseChecker(backendTex, reporter, false);
173 GrSurfaceOrigin texOrigin = kTopLeft_GrSurfaceOrigin;
174 sk_sp<SkImage> refImg(SkImages::PromiseTextureFrom(ctx->threadSafeProxy(),
175 backendFormat,
176 {kWidth, kHeight},
177 skgpu::Mipmapped::kNo,
178 texOrigin,
179 kRGBA_8888_SkColorType,
180 kPremul_SkAlphaType,
181 nullptr,
182 PromiseTextureChecker::Fulfill,
183 PromiseTextureChecker::Release,
184 &promiseChecker));
185
186 SkImageInfo info = SkImageInfo::MakeN32Premul(kWidth, kHeight);
187 sk_sp<SkSurface> surface = SkSurfaces::RenderTarget(ctx, skgpu::Budgeted::kNo, info);
188 SkCanvas* canvas = surface->getCanvas();
189
190 canvas->drawImage(refImg, 0, 0);
191 check_unfulfilled(promiseChecker, reporter);
192
193 ctx->flushAndSubmit(surface.get(), GrSyncCpu::kNo);
194 // We still own the image so we should not have called Release or Done.
195 check_only_fulfilled(reporter, promiseChecker);
196
197 ctx->submit(GrSyncCpu::kYes);
198 check_only_fulfilled(reporter, promiseChecker);
199
200 canvas->drawImage(refImg, 0, 0);
201 canvas->drawImage(refImg, 0, 0);
202
203 ctx->flushAndSubmit(surface.get(), GrSyncCpu::kYes);
204
205 // Image should still be fulfilled from the first time we drew/flushed it.
206 check_only_fulfilled(reporter, promiseChecker);
207
208 canvas->drawImage(refImg, 0, 0);
209 ctx->flushAndSubmit(surface.get(), GrSyncCpu::kNo);
210 check_only_fulfilled(reporter, promiseChecker);
211
212 canvas->drawImage(refImg, 0, 0);
213 refImg.reset();
214 // We no longer own the image but the last draw is still unflushed.
215 check_only_fulfilled(reporter, promiseChecker);
216
217 ctx->flushAndSubmit(surface.get(), GrSyncCpu::kNo);
218 // Flushing should have called Release. Depending on the backend and timing it may have called
219 // done.
220 check_all_flushed_but_not_synced(reporter, promiseChecker, ctx->backend());
221 ctx->submit(GrSyncCpu::kYes);
222 // Now Done should definitely have been called.
223 check_all_done(reporter, promiseChecker);
224
225 ctx->deleteBackendTexture(backendTex);
226 }
227
DEF_GANESH_TEST(PromiseImageTextureShutdown,reporter,ctxInfo,CtsEnforcement::kNever)228 DEF_GANESH_TEST(PromiseImageTextureShutdown, reporter, ctxInfo, CtsEnforcement::kNever) {
229 const int kWidth = 10;
230 const int kHeight = 10;
231
232 // Different ways of killing contexts.
233 using DeathFn = std::function<void(sk_gpu_test::GrContextFactory*, GrDirectContext*)>;
234 DeathFn destroy = [](sk_gpu_test::GrContextFactory* factory, GrDirectContext*) {
235 factory->destroyContexts();
236 };
237 DeathFn abandon = [](sk_gpu_test::GrContextFactory* factory, GrDirectContext* dContext) {
238 dContext->abandonContext();
239 };
240 DeathFn releaseResourcesAndAbandon = [](sk_gpu_test::GrContextFactory* factory,
241 GrDirectContext* dContext) {
242 dContext->releaseResourcesAndAbandonContext();
243 };
244
245 for (int type = 0; type < skgpu::kContextTypeCount; ++type) {
246 auto contextType = static_cast<skgpu::ContextType>(type);
247 // These tests are difficult to get working with Vulkan. See http://skbug.com/8705
248 // and http://skbug.com/8275
249 // And Direct3D, for similar reasons.
250 GrBackendApi api = skgpu::ganesh::ContextTypeBackend(contextType);
251 if (api == GrBackendApi::kUnsupported || api == GrBackendApi::kVulkan ||
252 api == GrBackendApi::kDirect3D) {
253 continue;
254 }
255 DeathFn contextKillers[] = {destroy, abandon, releaseResourcesAndAbandon};
256 for (const DeathFn& contextDeath : contextKillers) {
257 sk_gpu_test::GrContextFactory factory;
258 auto ctx = factory.get(contextType);
259 if (!ctx) {
260 continue;
261 }
262
263 auto mbet = sk_gpu_test::ManagedBackendTexture::MakeWithoutData(ctx,
264 kWidth,
265 kHeight,
266 kAlpha_8_SkColorType,
267 skgpu::Mipmapped::kNo,
268 GrRenderable::kNo);
269 if (!mbet) {
270 ERRORF(reporter, "Could not create texture alpha texture.");
271 continue;
272 }
273
274 SkImageInfo info = SkImageInfo::Make(kWidth, kHeight, kRGBA_8888_SkColorType,
275 kPremul_SkAlphaType);
276 sk_sp<SkSurface> surface = SkSurfaces::RenderTarget(ctx, skgpu::Budgeted::kNo, info);
277 SkCanvas* canvas = surface->getCanvas();
278
279 PromiseTextureChecker promiseChecker(mbet->texture(), reporter, false);
280 sk_sp<SkImage> image(SkImages::PromiseTextureFrom(ctx->threadSafeProxy(),
281 mbet->texture().getBackendFormat(),
282 {kWidth, kHeight},
283 skgpu::Mipmapped::kNo,
284 kTopLeft_GrSurfaceOrigin,
285 kAlpha_8_SkColorType,
286 kPremul_SkAlphaType,
287 /*color space*/ nullptr,
288 PromiseTextureChecker::Fulfill,
289 PromiseTextureChecker::Release,
290 &promiseChecker));
291 REPORTER_ASSERT(reporter, image);
292
293 canvas->drawImage(image, 0, 0);
294 image.reset();
295 // If the surface still holds a ref to the context then the factory will not be able
296 // to destroy the context (and instead will release-all-and-abandon).
297 surface.reset();
298
299 ctx->flushAndSubmit();
300 contextDeath(&factory, ctx);
301
302 check_all_done(reporter, promiseChecker);
303 }
304 }
305 }
306
DEF_GANESH_TEST_FOR_RENDERING_CONTEXTS(PromiseImageTextureFullCache,reporter,ctxInfo,CtsEnforcement::kNever)307 DEF_GANESH_TEST_FOR_RENDERING_CONTEXTS(PromiseImageTextureFullCache,
308 reporter,
309 ctxInfo,
310 CtsEnforcement::kNever) {
311 using namespace skgpu;
312
313 const int kWidth = 10;
314 const int kHeight = 10;
315
316 auto dContext = ctxInfo.directContext();
317
318 Protected isProtected = Protected(dContext->priv().caps()->supportsProtectedContent());
319
320 GrBackendTexture backendTex = dContext->createBackendTexture(kWidth,
321 kHeight,
322 kAlpha_8_SkColorType,
323 SkColors::kTransparent,
324 skgpu::Mipmapped::kNo,
325 GrRenderable::kNo,
326 isProtected);
327 REPORTER_ASSERT(reporter, backendTex.isValid());
328
329 SkImageInfo info =
330 SkImageInfo::Make(kWidth, kHeight, kRGBA_8888_SkColorType, kPremul_SkAlphaType);
331 sk_sp<SkSurface> surface = SkSurfaces::RenderTarget(dContext, skgpu::Budgeted::kNo, info);
332 SkCanvas* canvas = surface->getCanvas();
333
334 PromiseTextureChecker promiseChecker(backendTex, reporter, false);
335 sk_sp<SkImage> image(SkImages::PromiseTextureFrom(dContext->threadSafeProxy(),
336 backendTex.getBackendFormat(),
337 {kWidth, kHeight},
338 skgpu::Mipmapped::kNo,
339 kTopLeft_GrSurfaceOrigin,
340 kAlpha_8_SkColorType,
341 kPremul_SkAlphaType,
342 nullptr,
343 PromiseTextureChecker::Fulfill,
344 PromiseTextureChecker::Release,
345 &promiseChecker));
346 REPORTER_ASSERT(reporter, image);
347
348 // Make the cache full. This tests that we don't preemptively purge cached textures for
349 // fulfillment due to cache pressure.
350 static constexpr int kMaxBytes = 1;
351 dContext->setResourceCacheLimit(kMaxBytes);
352 TArray<sk_sp<GrTexture>> textures;
353 for (int i = 0; i < 5; ++i) {
354 auto format = dContext->priv().caps()->getDefaultBackendFormat(GrColorType::kRGBA_8888,
355 GrRenderable::kNo);
356 textures.emplace_back(dContext->priv().resourceProvider()->createTexture(
357 {100, 100},
358 format,
359 GrTextureType::k2D,
360 GrRenderable::kNo,
361 1,
362 skgpu::Mipmapped::kNo,
363 skgpu::Budgeted::kYes,
364 isProtected,
365 /*label=*/"PromiseImageTextureFullCacheTest"));
366 REPORTER_ASSERT(reporter, textures[i]);
367 }
368
369 size_t bytesUsed;
370
371 dContext->getResourceCacheUsage(nullptr, &bytesUsed);
372 REPORTER_ASSERT(reporter, bytesUsed > kMaxBytes);
373
374 // Relying on the asserts in the promiseImageChecker to ensure that fulfills and releases are
375 // properly ordered.
376 canvas->drawImage(image, 0, 0);
377 dContext->flushAndSubmit(surface.get(), GrSyncCpu::kNo);
378 canvas->drawImage(image, 1, 0);
379 dContext->flushAndSubmit(surface.get(), GrSyncCpu::kNo);
380 canvas->drawImage(image, 2, 0);
381 dContext->flushAndSubmit(surface.get(), GrSyncCpu::kNo);
382 canvas->drawImage(image, 3, 0);
383 dContext->flushAndSubmit(surface.get(), GrSyncCpu::kNo);
384 canvas->drawImage(image, 4, 0);
385 dContext->flushAndSubmit(surface.get(), GrSyncCpu::kNo);
386 canvas->drawImage(image, 5, 0);
387 dContext->flushAndSubmit(surface.get(), GrSyncCpu::kNo);
388 // Must call these to ensure that all callbacks are performed before the checker is destroyed.
389 image.reset();
390 dContext->flushAndSubmit(GrSyncCpu::kYes);
391
392 dContext->deleteBackendTexture(backendTex);
393 }
394
395 // Test case where promise image fulfill returns nullptr.
DEF_GANESH_TEST_FOR_RENDERING_CONTEXTS(PromiseImageNullFulfill,reporter,ctxInfo,CtsEnforcement::kNever)396 DEF_GANESH_TEST_FOR_RENDERING_CONTEXTS(PromiseImageNullFulfill,
397 reporter,
398 ctxInfo,
399 CtsEnforcement::kNever) {
400 const int kWidth = 10;
401 const int kHeight = 10;
402
403 auto dContext = ctxInfo.directContext();
404
405 GrBackendFormat backendFormat =
406 dContext->defaultBackendFormat(kRGBA_8888_SkColorType, GrRenderable::kYes);
407 if (!backendFormat.isValid()) {
408 ERRORF(reporter, "No valid default kRGBA_8888 texture format.");
409 return;
410 }
411
412 struct Counts {
413 int fFulfillCount = 0;
414 int fReleaseCount = 0;
415 } counts;
416 auto fulfill = [](SkImages::PromiseImageTextureContext ctx) {
417 ++static_cast<Counts*>(ctx)->fFulfillCount;
418 return sk_sp<GrPromiseImageTexture>();
419 };
420 auto release = [](SkImages::PromiseImageTextureContext ctx) {
421 ++static_cast<Counts*>(ctx)->fReleaseCount;
422 };
423 GrSurfaceOrigin texOrigin = kTopLeft_GrSurfaceOrigin;
424 sk_sp<SkImage> refImg(SkImages::PromiseTextureFrom(dContext->threadSafeProxy(),
425 backendFormat,
426 {kWidth, kHeight},
427 skgpu::Mipmapped::kNo,
428 texOrigin,
429 kRGBA_8888_SkColorType,
430 kPremul_SkAlphaType,
431 nullptr,
432 fulfill,
433 release,
434 &counts));
435
436 SkImageInfo info = SkImageInfo::MakeN32Premul(kWidth, kHeight);
437 sk_sp<SkSurface> surface = SkSurfaces::RenderTarget(dContext, skgpu::Budgeted::kNo, info);
438 SkCanvas* canvas = surface->getCanvas();
439 // Draw the image a few different ways.
440 canvas->drawImage(refImg, 0, 0);
441 SkPaint paint;
442 paint.setColorFilter(SkColorFilters::LinearToSRGBGamma());
443 canvas->drawImage(refImg, 0, 0, SkSamplingOptions(), &paint);
444 auto shader = refImg->makeShader(SkSamplingOptions());
445 REPORTER_ASSERT(reporter, shader);
446 paint.setShader(std::move(shader));
447 canvas->drawRect(SkRect::MakeWH(1,1), paint);
448 paint.setShader(nullptr);
449 refImg.reset();
450 dContext->flushAndSubmit(surface.get(), GrSyncCpu::kNo);
451 // We should only call each callback once and we should have made all the calls by this point.
452 REPORTER_ASSERT(reporter, counts.fFulfillCount == 1);
453 REPORTER_ASSERT(reporter, counts.fReleaseCount == 1);
454 }
455