1 /*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 // This is a GPU-backend specific test. It relies on static initializers to work
9
10 #include "include/core/SkTypes.h"
11
12 #if defined(SK_GANESH) && defined(SK_BUILD_FOR_ANDROID) && __ANDROID_API__ >= 26 && defined(SK_VULKAN)
13
14 #include "include/core/SkBitmap.h"
15 #include "include/core/SkCanvas.h"
16 #include "include/core/SkColorSpace.h"
17 #include "include/core/SkImage.h"
18 #include "include/core/SkSurface.h"
19 #include "include/gpu/ganesh/GrBackendSemaphore.h"
20 #include "include/gpu/ganesh/GrDirectContext.h"
21 #include "include/gpu/ganesh/GrTypes.h"
22 #include "include/gpu/MutableTextureState.h"
23 #include "include/gpu/ganesh/SkImageGanesh.h"
24 #include "include/gpu/ganesh/SkSurfaceGanesh.h"
25 #include "include/gpu/ganesh/gl/GrGLBackendSurface.h"
26 #include "include/gpu/ganesh/vk/GrVkBackendSemaphore.h"
27 #include "include/gpu/ganesh/vk/GrVkBackendSurface.h"
28 #include "include/gpu/ganesh/vk/GrVkDirectContext.h"
29 #include "include/gpu/ganesh/vk/GrVkTypes.h"
30 #include "include/gpu/vk/VulkanBackendContext.h"
31 #include "include/gpu/vk/VulkanExtensions.h"
32 #include "include/gpu/vk/VulkanMemoryAllocator.h"
33 #include "include/gpu/vk/VulkanMutableTextureState.h"
34 #include "src/base/SkAutoMalloc.h"
35 #include "src/gpu/ganesh/GrDirectContextPriv.h"
36 #include "src/gpu/ganesh/GrGpu.h"
37 #include "src/gpu/ganesh/GrProxyProvider.h"
38 #include "src/gpu/ganesh/SkGr.h"
39 #include "src/gpu/ganesh/gl/GrGLDefines.h"
40 #include "src/gpu/ganesh/gl/GrGLUtil.h"
41 #include "tests/Test.h"
42 #include "tools/gpu/GrContextFactory.h"
43 #include "tools/gpu/vk/VkTestUtils.h"
44
45 #include <android/hardware_buffer.h>
46 #include <cinttypes>
47
48 #include <EGL/egl.h>
49 #include <EGL/eglext.h>
50 #include <GLES/gl.h>
51 #include <GLES/glext.h>
52
53 static const int DEV_W = 16, DEV_H = 16;
54
55 class BaseTestHelper {
56 public:
~BaseTestHelper()57 virtual ~BaseTestHelper() {}
58
59 virtual bool init(skiatest::Reporter* reporter) = 0;
60
61 virtual void cleanup() = 0;
62 // This is used to release a surface back to the external queue in vulkan
63 virtual void releaseSurfaceToExternal(SkSurface*) = 0;
64 virtual void releaseImage() = 0;
65
66 virtual sk_sp<SkImage> importHardwareBufferForRead(skiatest::Reporter* reporter,
67 AHardwareBuffer* buffer) = 0;
68 virtual sk_sp<SkSurface> importHardwareBufferForWrite(skiatest::Reporter* reporter,
69 AHardwareBuffer* buffer) = 0;
70
71 virtual void doClientSync() = 0;
72 virtual bool flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter, sk_sp<SkSurface>) = 0;
73 virtual bool importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle,
74 sk_sp<SkSurface>) = 0;
75
76 virtual void makeCurrent() = 0;
77
78 virtual GrDirectContext* directContext() = 0;
79
getFdHandle()80 int getFdHandle() { return fFdHandle; }
81
82 protected:
BaseTestHelper()83 BaseTestHelper() {}
84
85 int fFdHandle = 0;
86 };
87
88 #ifdef SK_GL
89 class EGLTestHelper : public BaseTestHelper {
90 public:
EGLTestHelper(const GrContextOptions & options)91 EGLTestHelper(const GrContextOptions& options) : fFactory(options) {}
92
~EGLTestHelper()93 ~EGLTestHelper() override {}
94
releaseImage()95 void releaseImage() override {
96 this->makeCurrent();
97 if (!fGLCtx) {
98 return;
99 }
100 if (EGL_NO_IMAGE_KHR != fImage) {
101 fGLCtx->destroyEGLImage(fImage);
102 fImage = EGL_NO_IMAGE_KHR;
103 }
104 if (fTexID) {
105 GR_GL_CALL(fGLCtx->gl(), DeleteTextures(1, &fTexID));
106 fTexID = 0;
107 }
108 }
109
releaseSurfaceToExternal(SkSurface *)110 void releaseSurfaceToExternal(SkSurface*) override {}
111
cleanup()112 void cleanup() override {
113 this->releaseImage();
114 }
115
116 bool init(skiatest::Reporter* reporter) override;
117
118 sk_sp<SkImage> importHardwareBufferForRead(skiatest::Reporter* reporter,
119 AHardwareBuffer* buffer) override;
120 sk_sp<SkSurface> importHardwareBufferForWrite(skiatest::Reporter* reporter,
121 AHardwareBuffer* buffer) override;
122
123 void doClientSync() override;
124 bool flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter, sk_sp<SkSurface>) override;
125 bool importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle,
126 sk_sp<SkSurface>) override;
127
makeCurrent()128 void makeCurrent() override { fGLCtx->makeCurrent(); }
129
directContext()130 GrDirectContext* directContext() override { return fDirectContext; }
131
132 private:
133 bool importHardwareBuffer(skiatest::Reporter* reporter, AHardwareBuffer* buffer);
134
135 typedef EGLClientBuffer (*EGLGetNativeClientBufferANDROIDProc)(const struct AHardwareBuffer*);
136 typedef EGLImageKHR (*EGLCreateImageKHRProc)(EGLDisplay, EGLContext, EGLenum, EGLClientBuffer,
137 const EGLint*);
138 typedef void (*EGLImageTargetTexture2DOESProc)(EGLenum, void*);
139 EGLGetNativeClientBufferANDROIDProc fEGLGetNativeClientBufferANDROID;
140 EGLCreateImageKHRProc fEGLCreateImageKHR;
141 EGLImageTargetTexture2DOESProc fEGLImageTargetTexture2DOES;
142
143 PFNEGLCREATESYNCKHRPROC fEGLCreateSyncKHR;
144 PFNEGLWAITSYNCKHRPROC fEGLWaitSyncKHR;
145 PFNEGLGETSYNCATTRIBKHRPROC fEGLGetSyncAttribKHR;
146 PFNEGLDUPNATIVEFENCEFDANDROIDPROC fEGLDupNativeFenceFDANDROID;
147 PFNEGLDESTROYSYNCKHRPROC fEGLDestroySyncKHR;
148
149 EGLImageKHR fImage = EGL_NO_IMAGE_KHR;
150 GrGLuint fTexID = 0;
151
152 sk_gpu_test::GrContextFactory fFactory;
153 sk_gpu_test::ContextInfo fGLESContextInfo;
154
155 sk_gpu_test::GLTestContext* fGLCtx = nullptr;
156 GrDirectContext* fDirectContext = nullptr;
157 };
158
init(skiatest::Reporter * reporter)159 bool EGLTestHelper::init(skiatest::Reporter* reporter) {
160 fGLESContextInfo = fFactory.getContextInfo(skgpu::ContextType::kGLES);
161 fDirectContext = fGLESContextInfo.directContext();
162 fGLCtx = fGLESContextInfo.glContext();
163 if (!fDirectContext || !fGLCtx) {
164 return false;
165 }
166
167 if (kGLES_GrGLStandard != fGLCtx->gl()->fStandard) {
168 return false;
169 }
170
171 // Confirm we have egl and the needed extensions
172 if (!fGLCtx->gl()->hasExtension("EGL_KHR_image") ||
173 !fGLCtx->gl()->hasExtension("EGL_ANDROID_get_native_client_buffer") ||
174 !fGLCtx->gl()->hasExtension("GL_OES_EGL_image_external") ||
175 !fGLCtx->gl()->hasExtension("GL_OES_EGL_image") ||
176 !fGLCtx->gl()->hasExtension("EGL_KHR_fence_sync") ||
177 !fGLCtx->gl()->hasExtension("EGL_ANDROID_native_fence_sync")) {
178 return false;
179 }
180
181 fEGLGetNativeClientBufferANDROID =
182 (EGLGetNativeClientBufferANDROIDProc) eglGetProcAddress("eglGetNativeClientBufferANDROID");
183 if (!fEGLGetNativeClientBufferANDROID) {
184 ERRORF(reporter, "Failed to get the eglGetNativeClientBufferAndroid proc");
185 return false;
186 }
187
188 fEGLCreateImageKHR = (EGLCreateImageKHRProc) eglGetProcAddress("eglCreateImageKHR");
189 if (!fEGLCreateImageKHR) {
190 ERRORF(reporter, "Failed to get the proc eglCreateImageKHR");
191 return false;
192 }
193
194 fEGLImageTargetTexture2DOES =
195 (EGLImageTargetTexture2DOESProc) eglGetProcAddress("glEGLImageTargetTexture2DOES");
196 if (!fEGLImageTargetTexture2DOES) {
197 ERRORF(reporter, "Failed to get the proc EGLImageTargetTexture2DOES");
198 return false;
199 }
200
201 fEGLCreateSyncKHR = (PFNEGLCREATESYNCKHRPROC) eglGetProcAddress("eglCreateSyncKHR");
202 if (!fEGLCreateSyncKHR) {
203 ERRORF(reporter, "Failed to get the proc eglCreateSyncKHR");
204 return false;
205
206 }
207 fEGLWaitSyncKHR = (PFNEGLWAITSYNCKHRPROC) eglGetProcAddress("eglWaitSyncKHR");
208 if (!fEGLWaitSyncKHR) {
209 ERRORF(reporter, "Failed to get the proc eglWaitSyncKHR");
210 return false;
211
212 }
213 fEGLGetSyncAttribKHR = (PFNEGLGETSYNCATTRIBKHRPROC) eglGetProcAddress("eglGetSyncAttribKHR");
214 if (!fEGLGetSyncAttribKHR) {
215 ERRORF(reporter, "Failed to get the proc eglGetSyncAttribKHR");
216 return false;
217
218 }
219 fEGLDupNativeFenceFDANDROID =
220 (PFNEGLDUPNATIVEFENCEFDANDROIDPROC) eglGetProcAddress("eglDupNativeFenceFDANDROID");
221 if (!fEGLDupNativeFenceFDANDROID) {
222 ERRORF(reporter, "Failed to get the proc eglDupNativeFenceFDANDROID");
223 return false;
224
225 }
226 fEGLDestroySyncKHR = (PFNEGLDESTROYSYNCKHRPROC) eglGetProcAddress("eglDestroySyncKHR");
227 if (!fEGLDestroySyncKHR) {
228 ERRORF(reporter, "Failed to get the proc eglDestroySyncKHR");
229 return false;
230
231 }
232
233 return true;
234 }
235
importHardwareBuffer(skiatest::Reporter * reporter,AHardwareBuffer * buffer)236 bool EGLTestHelper::importHardwareBuffer(skiatest::Reporter* reporter, AHardwareBuffer* buffer) {
237 while (fGLCtx->gl()->fFunctions.fGetError() != GR_GL_NO_ERROR) {}
238
239 EGLClientBuffer eglClientBuffer = fEGLGetNativeClientBufferANDROID(buffer);
240 EGLint eglAttribs[] = { EGL_IMAGE_PRESERVED_KHR, EGL_TRUE,
241 EGL_NONE };
242 EGLDisplay eglDisplay = eglGetCurrentDisplay();
243 fImage = fEGLCreateImageKHR(eglDisplay, EGL_NO_CONTEXT,
244 EGL_NATIVE_BUFFER_ANDROID,
245 eglClientBuffer, eglAttribs);
246 if (EGL_NO_IMAGE_KHR == fImage) {
247 SkDebugf("Could not create EGL image, err = (%#x)\n", (int) eglGetError() );
248 return false;
249 }
250
251 GR_GL_CALL(fGLCtx->gl(), GenTextures(1, &fTexID));
252 if (!fTexID) {
253 ERRORF(reporter, "Failed to create GL Texture");
254 return false;
255 }
256 GR_GL_CALL_NOERRCHECK(fGLCtx->gl(), BindTexture(GR_GL_TEXTURE_2D, fTexID));
257 if (fGLCtx->gl()->fFunctions.fGetError() != GR_GL_NO_ERROR) {
258 ERRORF(reporter, "Failed to bind GL Texture");
259 return false;
260 }
261
262 fEGLImageTargetTexture2DOES(GL_TEXTURE_2D, fImage);
263 if (GrGLenum error = fGLCtx->gl()->fFunctions.fGetError(); error != GR_GL_NO_ERROR) {
264 ERRORF(reporter, "EGLImageTargetTexture2DOES failed (%#x)", (int) error);
265 return false;
266 }
267
268 fDirectContext->resetContext(kTextureBinding_GrGLBackendState);
269 return true;
270 }
271
importHardwareBufferForRead(skiatest::Reporter * reporter,AHardwareBuffer * buffer)272 sk_sp<SkImage> EGLTestHelper::importHardwareBufferForRead(skiatest::Reporter* reporter,
273 AHardwareBuffer* buffer) {
274 if (!this->importHardwareBuffer(reporter, buffer)) {
275 return nullptr;
276 }
277 GrGLTextureInfo textureInfo;
278 textureInfo.fTarget = GR_GL_TEXTURE_2D;
279 textureInfo.fID = fTexID;
280 textureInfo.fFormat = GR_GL_RGBA8;
281
282 auto backendTex = GrBackendTextures::MakeGL(DEV_W, DEV_H, skgpu::Mipmapped::kNo, textureInfo);
283 REPORTER_ASSERT(reporter, backendTex.isValid());
284
285 sk_sp<SkImage> image = SkImages::BorrowTextureFrom(fDirectContext,
286 backendTex,
287 kTopLeft_GrSurfaceOrigin,
288 kRGBA_8888_SkColorType,
289 kPremul_SkAlphaType,
290 nullptr);
291
292 if (!image) {
293 ERRORF(reporter, "Failed to make wrapped GL SkImage");
294 return nullptr;
295 }
296
297 return image;
298 }
299
importHardwareBufferForWrite(skiatest::Reporter * reporter,AHardwareBuffer * buffer)300 sk_sp<SkSurface> EGLTestHelper::importHardwareBufferForWrite(skiatest::Reporter* reporter,
301 AHardwareBuffer* buffer) {
302 if (!this->importHardwareBuffer(reporter, buffer)) {
303 return nullptr;
304 }
305 GrGLTextureInfo textureInfo;
306 textureInfo.fTarget = GR_GL_TEXTURE_2D;
307 textureInfo.fID = fTexID;
308 textureInfo.fFormat = GR_GL_RGBA8;
309
310 auto backendTex = GrBackendTextures::MakeGL(DEV_W, DEV_H, skgpu::Mipmapped::kNo, textureInfo);
311 REPORTER_ASSERT(reporter, backendTex.isValid());
312
313 sk_sp<SkSurface> surface = SkSurfaces::WrapBackendTexture(fDirectContext,
314 backendTex,
315 kTopLeft_GrSurfaceOrigin,
316 0,
317 kRGBA_8888_SkColorType,
318 nullptr,
319 nullptr);
320
321 if (!surface) {
322 ERRORF(reporter, "Failed to make wrapped GL SkSurface");
323 return nullptr;
324 }
325
326 return surface;
327 }
328
flushSurfaceAndSignalSemaphore(skiatest::Reporter * reporter,sk_sp<SkSurface> surface)329 bool EGLTestHelper::flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter,
330 sk_sp<SkSurface> surface) {
331 skgpu::ganesh::FlushAndSubmit(surface);
332
333 EGLDisplay eglDisplay = eglGetCurrentDisplay();
334 EGLSyncKHR eglsync = fEGLCreateSyncKHR(eglDisplay, EGL_SYNC_NATIVE_FENCE_ANDROID, nullptr);
335 if (EGL_NO_SYNC_KHR == eglsync) {
336 ERRORF(reporter, "Failed to create EGLSync for EGL_SYNC_NATIVE_FENCE_ANDROID\n");
337 return false;
338 }
339
340 GR_GL_CALL(fGLCtx->gl(), Flush());
341 fFdHandle = fEGLDupNativeFenceFDANDROID(eglDisplay, eglsync);
342
343 EGLint result = fEGLDestroySyncKHR(eglDisplay, eglsync);
344 if (EGL_TRUE != result) {
345 ERRORF(reporter, "Failed to delete EGLSync, error: %d\n", result);
346 return false;
347 }
348
349 return true;
350 }
351
importAndWaitOnSemaphore(skiatest::Reporter * reporter,int fdHandle,sk_sp<SkSurface> surface)352 bool EGLTestHelper::importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle,
353 sk_sp<SkSurface> surface) {
354 EGLDisplay eglDisplay = eglGetCurrentDisplay();
355 EGLint attr[] = {
356 EGL_SYNC_NATIVE_FENCE_FD_ANDROID, fdHandle,
357 EGL_NONE
358 };
359 EGLSyncKHR eglsync = fEGLCreateSyncKHR(eglDisplay, EGL_SYNC_NATIVE_FENCE_ANDROID, attr);
360 if (EGL_NO_SYNC_KHR == eglsync) {
361 ERRORF(reporter,
362 "Failed to create EGLSync when importing EGL_SYNC_NATIVE_FENCE_FD_ANDROID\n");
363 return false;
364 }
365 EGLint result = fEGLWaitSyncKHR(eglDisplay, eglsync, 0);
366 if (EGL_TRUE != result) {
367 ERRORF(reporter, "Failed called to eglWaitSyncKHR, error: %d\n", result);
368 // Don't return false yet, try to delete the sync first
369 }
370 result = fEGLDestroySyncKHR(eglDisplay, eglsync);
371 if (EGL_TRUE != result) {
372 ERRORF(reporter, "Failed to delete EGLSync, error: %d\n", result);
373 return false;
374 }
375 return true;
376 }
377
doClientSync()378 void EGLTestHelper::doClientSync() {
379 this->directContext()->flush();
380 this->directContext()->submit(GrSyncCpu::kYes);
381 }
382 #endif // SK_GL
383
384 #define DECLARE_VK_PROC(name) PFN_vk##name fVk##name
385
386 #define ACQUIRE_INST_VK_PROC(name) \
387 do { \
388 fVk##name = reinterpret_cast<PFN_vk##name>(getProc("vk" #name, fBackendContext.fInstance,\
389 VK_NULL_HANDLE)); \
390 if (fVk##name == nullptr) { \
391 ERRORF(reporter, "Function ptr for vk%s could not be acquired\n", #name); \
392 return false; \
393 } \
394 } while(false)
395
396 #define ACQUIRE_DEVICE_VK_PROC(name) \
397 do { \
398 fVk##name = reinterpret_cast<PFN_vk##name>(getProc("vk" #name, VK_NULL_HANDLE, fDevice)); \
399 if (fVk##name == nullptr) { \
400 ERRORF(reporter, "Function ptr for vk%s could not be acquired\n", #name); \
401 return false; \
402 } \
403 } while(false)
404
405 class VulkanTestHelper : public BaseTestHelper {
406 public:
VulkanTestHelper()407 VulkanTestHelper() {}
408
~VulkanTestHelper()409 ~VulkanTestHelper() override {}
410
releaseImage()411 void releaseImage() override {
412 if (VK_NULL_HANDLE == fDevice) {
413 return;
414 }
415 if (fImage != VK_NULL_HANDLE) {
416 fVkDestroyImage(fDevice, fImage, nullptr);
417 fImage = VK_NULL_HANDLE;
418 }
419
420 if (fMemory != VK_NULL_HANDLE) {
421 fVkFreeMemory(fDevice, fMemory, nullptr);
422 fMemory = VK_NULL_HANDLE;
423 }
424 }
425
releaseSurfaceToExternal(SkSurface * surface)426 void releaseSurfaceToExternal(SkSurface* surface) override {
427 skgpu::MutableTextureState newState = skgpu::MutableTextureStates::MakeVulkan(
428 VK_IMAGE_LAYOUT_UNDEFINED, VK_QUEUE_FAMILY_EXTERNAL);
429 fDirectContext->flush(surface, {}, &newState);
430 }
431
cleanup()432 void cleanup() override {
433 fDirectContext.reset();
434 this->releaseImage();
435 if (fSignalSemaphore != VK_NULL_HANDLE) {
436 fVkDestroySemaphore(fDevice, fSignalSemaphore, nullptr);
437 fSignalSemaphore = VK_NULL_HANDLE;
438 }
439 fBackendContext.fMemoryAllocator.reset();
440 if (fDevice != VK_NULL_HANDLE) {
441 fVkDeviceWaitIdle(fDevice);
442 fVkDestroyDevice(fDevice, nullptr);
443 fDevice = VK_NULL_HANDLE;
444 }
445 #ifdef SK_ENABLE_VK_LAYERS
446 if (fDebugCallback != VK_NULL_HANDLE) {
447 fDestroyDebugCallback(fBackendContext.fInstance, fDebugCallback, nullptr);
448 }
449 #endif
450 if (fBackendContext.fInstance != VK_NULL_HANDLE) {
451 fVkDestroyInstance(fBackendContext.fInstance, nullptr);
452 fBackendContext.fInstance = VK_NULL_HANDLE;
453 }
454
455 delete fExtensions;
456
457 sk_gpu_test::FreeVulkanFeaturesStructs(fFeatures);
458 delete fFeatures;
459 }
460
461 bool init(skiatest::Reporter* reporter) override;
462
doClientSync()463 void doClientSync() override {
464 if (!fDirectContext) {
465 return;
466 }
467
468 fDirectContext->submit(GrSyncCpu::kYes);
469 }
470
471 bool flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter, sk_sp<SkSurface>) override;
472 bool importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle,
473 sk_sp<SkSurface>) override;
474
475 sk_sp<SkImage> importHardwareBufferForRead(skiatest::Reporter* reporter,
476 AHardwareBuffer* buffer) override;
477
478 sk_sp<SkSurface> importHardwareBufferForWrite(skiatest::Reporter* reporter,
479 AHardwareBuffer* buffer) override;
480
makeCurrent()481 void makeCurrent() override {}
482
directContext()483 GrDirectContext* directContext() override { return fDirectContext.get(); }
484
485 private:
486 bool checkOptimalHardwareBuffer(skiatest::Reporter* reporter);
487
488 bool importHardwareBuffer(skiatest::Reporter* reporter, AHardwareBuffer* buffer, bool forWrite,
489 GrVkImageInfo* outImageInfo);
490
491 bool setupSemaphoreForSignaling(skiatest::Reporter* reporter, GrBackendSemaphore*);
492 bool exportSemaphore(skiatest::Reporter* reporter, const GrBackendSemaphore&);
493
494 DECLARE_VK_PROC(DestroyInstance);
495 DECLARE_VK_PROC(DeviceWaitIdle);
496 DECLARE_VK_PROC(DestroyDevice);
497
498 DECLARE_VK_PROC(GetPhysicalDeviceExternalSemaphoreProperties);
499 DECLARE_VK_PROC(GetPhysicalDeviceImageFormatProperties2);
500 DECLARE_VK_PROC(GetPhysicalDeviceMemoryProperties2);
501
502 DECLARE_VK_PROC(GetAndroidHardwareBufferPropertiesANDROID);
503
504 DECLARE_VK_PROC(CreateImage);
505 DECLARE_VK_PROC(GetImageMemoryRequirements2);
506 DECLARE_VK_PROC(DestroyImage);
507
508 DECLARE_VK_PROC(AllocateMemory);
509 DECLARE_VK_PROC(BindImageMemory2);
510 DECLARE_VK_PROC(FreeMemory);
511
512 DECLARE_VK_PROC(CreateSemaphore);
513 DECLARE_VK_PROC(GetSemaphoreFdKHR);
514 DECLARE_VK_PROC(ImportSemaphoreFdKHR);
515 DECLARE_VK_PROC(DestroySemaphore);
516
517 VkImage fImage = VK_NULL_HANDLE;
518 VkDeviceMemory fMemory = VK_NULL_HANDLE;
519
520 skgpu::VulkanExtensions* fExtensions = nullptr;
521 VkPhysicalDeviceFeatures2* fFeatures = nullptr;
522 VkDebugReportCallbackEXT fDebugCallback = VK_NULL_HANDLE;
523 PFN_vkDestroyDebugReportCallbackEXT fDestroyDebugCallback = nullptr;
524
525 // We hold on to the semaphore so we can delete once the GPU is done.
526 VkSemaphore fSignalSemaphore = VK_NULL_HANDLE;
527
528 VkDevice fDevice = VK_NULL_HANDLE;
529
530 skgpu::VulkanBackendContext fBackendContext;
531 sk_sp<GrDirectContext> fDirectContext;
532 };
533
init(skiatest::Reporter * reporter)534 bool VulkanTestHelper::init(skiatest::Reporter* reporter) {
535 PFN_vkGetInstanceProcAddr instProc;
536 if (!sk_gpu_test::LoadVkLibraryAndGetProcAddrFuncs(&instProc)) {
537 return false;
538 }
539
540 fExtensions = new skgpu::VulkanExtensions();
541 fFeatures = new VkPhysicalDeviceFeatures2;
542 memset(fFeatures, 0, sizeof(VkPhysicalDeviceFeatures2));
543 fFeatures->sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
544 fFeatures->pNext = nullptr;
545
546 fBackendContext.fInstance = VK_NULL_HANDLE;
547 fBackendContext.fDevice = VK_NULL_HANDLE;
548
549 if (!sk_gpu_test::CreateVkBackendContext(instProc, &fBackendContext, fExtensions,
550 fFeatures, &fDebugCallback)) {
551 return false;
552 }
553 fDevice = fBackendContext.fDevice;
554 auto getProc = fBackendContext.fGetProc;
555
556 if (fDebugCallback != VK_NULL_HANDLE) {
557 fDestroyDebugCallback = (PFN_vkDestroyDebugReportCallbackEXT) instProc(
558 fBackendContext.fInstance, "vkDestroyDebugReportCallbackEXT");
559 }
560
561 ACQUIRE_INST_VK_PROC(DestroyInstance);
562 ACQUIRE_INST_VK_PROC(DeviceWaitIdle);
563 ACQUIRE_INST_VK_PROC(DestroyDevice);
564
565 if (!fExtensions->hasExtension(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME,
566 2)) {
567 return false;
568 }
569 if (!fExtensions->hasExtension(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME, 1)) {
570 return false;
571 }
572 if (!fExtensions->hasExtension(VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME, 1)) {
573 return false;
574 }
575 if (!fExtensions->hasExtension(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME, 1)) {
576 // return false;
577 }
578
579 ACQUIRE_INST_VK_PROC(GetPhysicalDeviceMemoryProperties2);
580 ACQUIRE_INST_VK_PROC(GetPhysicalDeviceImageFormatProperties2);
581 ACQUIRE_INST_VK_PROC(GetPhysicalDeviceExternalSemaphoreProperties);
582
583 ACQUIRE_DEVICE_VK_PROC(GetAndroidHardwareBufferPropertiesANDROID);
584
585 ACQUIRE_DEVICE_VK_PROC(CreateImage);
586 ACQUIRE_DEVICE_VK_PROC(GetImageMemoryRequirements2);
587 ACQUIRE_DEVICE_VK_PROC(DestroyImage);
588
589 ACQUIRE_DEVICE_VK_PROC(AllocateMemory);
590 ACQUIRE_DEVICE_VK_PROC(BindImageMemory2);
591 ACQUIRE_DEVICE_VK_PROC(FreeMemory);
592
593 ACQUIRE_DEVICE_VK_PROC(CreateSemaphore);
594 ACQUIRE_DEVICE_VK_PROC(GetSemaphoreFdKHR);
595 ACQUIRE_DEVICE_VK_PROC(ImportSemaphoreFdKHR);
596 ACQUIRE_DEVICE_VK_PROC(DestroySemaphore);
597
598 fDirectContext = GrDirectContexts::MakeVulkan(fBackendContext);
599 REPORTER_ASSERT(reporter, fDirectContext.get());
600 if (!fDirectContext) {
601 return false;
602 }
603
604 return this->checkOptimalHardwareBuffer(reporter);
605 }
606
checkOptimalHardwareBuffer(skiatest::Reporter * reporter)607 bool VulkanTestHelper::checkOptimalHardwareBuffer(skiatest::Reporter* reporter) {
608 VkResult err;
609
610 VkPhysicalDeviceExternalImageFormatInfo externalImageFormatInfo;
611 externalImageFormatInfo.sType =
612 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO;
613 externalImageFormatInfo.pNext = nullptr;
614 externalImageFormatInfo.handleType =
615 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
616 //externalImageFormatInfo.handType = 0x80;
617
618 // We will create the hardware buffer with gpu sampled so these usages should all be valid
619 VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT |
620 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
621 VK_IMAGE_USAGE_TRANSFER_DST_BIT;
622 VkPhysicalDeviceImageFormatInfo2 imageFormatInfo;
623 imageFormatInfo.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2;
624 imageFormatInfo.pNext = &externalImageFormatInfo;
625 imageFormatInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
626 imageFormatInfo.type = VK_IMAGE_TYPE_2D;
627 imageFormatInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
628 imageFormatInfo.usage = usageFlags;
629 imageFormatInfo.flags = 0;
630
631 VkAndroidHardwareBufferUsageANDROID hwbUsage;
632 hwbUsage.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID;
633 hwbUsage.pNext = nullptr;
634
635 VkExternalImageFormatProperties externalImgFormatProps;
636 externalImgFormatProps.sType = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES;
637 externalImgFormatProps.pNext = &hwbUsage;
638
639 VkImageFormatProperties2 imgFormProps;
640 imgFormProps.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2;
641 imgFormProps.pNext = &externalImgFormatProps;
642
643 err = fVkGetPhysicalDeviceImageFormatProperties2(fBackendContext.fPhysicalDevice,
644 &imageFormatInfo, &imgFormProps);
645 if (VK_SUCCESS != err) {
646 ERRORF(reporter, "vkGetPhysicalDeviceImageFormatProperites failed, err: %d", err);
647 return false;
648 }
649
650 const VkImageFormatProperties& imageFormatProperties = imgFormProps.imageFormatProperties;
651 REPORTER_ASSERT(reporter, DEV_W <= imageFormatProperties.maxExtent.width);
652 REPORTER_ASSERT(reporter, DEV_H <= imageFormatProperties.maxExtent.height);
653
654 const VkExternalMemoryProperties& externalImageFormatProps =
655 externalImgFormatProps.externalMemoryProperties;
656 REPORTER_ASSERT(reporter, SkToBool(VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT &
657 externalImageFormatProps.externalMemoryFeatures));
658 REPORTER_ASSERT(reporter, SkToBool(VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT &
659 externalImageFormatProps.externalMemoryFeatures));
660
661 REPORTER_ASSERT(reporter, SkToBool(AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE &
662 hwbUsage.androidHardwareBufferUsage));
663
664 return true;
665 }
666
importHardwareBuffer(skiatest::Reporter * reporter,AHardwareBuffer * buffer,bool forWrite,GrVkImageInfo * outImageInfo)667 bool VulkanTestHelper::importHardwareBuffer(skiatest::Reporter* reporter,
668 AHardwareBuffer* buffer,
669 bool forWrite,
670 GrVkImageInfo* outImageInfo) {
671 VkResult err;
672
673 VkAndroidHardwareBufferFormatPropertiesANDROID hwbFormatProps;
674 hwbFormatProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
675 hwbFormatProps.pNext = nullptr;
676
677 VkAndroidHardwareBufferPropertiesANDROID hwbProps;
678 hwbProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
679 hwbProps.pNext = &hwbFormatProps;
680
681 err = fVkGetAndroidHardwareBufferPropertiesANDROID(fDevice, buffer, &hwbProps);
682 if (VK_SUCCESS != err) {
683 ERRORF(reporter, "GetAndroidHardwareBufferPropertiesAndroid failed, err: %d", err);
684 return false;
685 }
686
687 REPORTER_ASSERT(reporter, VK_FORMAT_R8G8B8A8_UNORM == hwbFormatProps.format);
688 REPORTER_ASSERT(reporter,
689 SkToBool(VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT & hwbFormatProps.formatFeatures) &&
690 SkToBool(VK_FORMAT_FEATURE_TRANSFER_SRC_BIT & hwbFormatProps.formatFeatures) &&
691 SkToBool(VK_FORMAT_FEATURE_TRANSFER_DST_BIT & hwbFormatProps.formatFeatures));
692 if (forWrite) {
693 REPORTER_ASSERT(reporter,
694 SkToBool(VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT & hwbFormatProps.formatFeatures));
695
696 }
697
698 bool useExternalFormat = VK_FORMAT_UNDEFINED == hwbFormatProps.format;
699 const VkExternalFormatANDROID externalFormatInfo {
700 VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID, // sType
701 nullptr, // pNext
702 useExternalFormat ? hwbFormatProps.externalFormat : 0, // externalFormat
703 };
704
705 const VkExternalMemoryImageCreateInfo externalMemoryImageInfo {
706 VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, // sType
707 &externalFormatInfo, // pNext
708 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, // handleTypes
709 };
710
711 VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT |
712 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
713 VK_IMAGE_USAGE_TRANSFER_DST_BIT;
714 if (forWrite) {
715 usageFlags |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
716 }
717
718 const VkImageCreateInfo imageCreateInfo = {
719 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // sType
720 &externalMemoryImageInfo, // pNext
721 0, // VkImageCreateFlags
722 VK_IMAGE_TYPE_2D, // VkImageType
723 hwbFormatProps.format, // VkFormat
724 { DEV_W, DEV_H, 1 }, // VkExtent3D
725 1, // mipLevels
726 1, // arrayLayers
727 VK_SAMPLE_COUNT_1_BIT, // samples
728 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling
729 usageFlags, // VkImageUsageFlags
730 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode
731 0, // queueFamilyCount
732 0, // pQueueFamilyIndices
733 VK_IMAGE_LAYOUT_UNDEFINED, // initialLayout
734 };
735
736 err = fVkCreateImage(fDevice, &imageCreateInfo, nullptr, &fImage);
737 if (VK_SUCCESS != err) {
738 ERRORF(reporter, "Create Image failed, err: %d", err);
739 return false;
740 }
741
742 VkPhysicalDeviceMemoryProperties2 phyDevMemProps;
743 phyDevMemProps.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2;
744 phyDevMemProps.pNext = nullptr;
745
746 uint32_t typeIndex = 0;
747 uint32_t heapIndex = 0;
748 bool foundHeap = false;
749 fVkGetPhysicalDeviceMemoryProperties2(fBackendContext.fPhysicalDevice, &phyDevMemProps);
750 uint32_t memTypeCnt = phyDevMemProps.memoryProperties.memoryTypeCount;
751 for (uint32_t i = 0; i < memTypeCnt && !foundHeap; ++i) {
752 if (hwbProps.memoryTypeBits & (1 << i)) {
753 const VkPhysicalDeviceMemoryProperties& pdmp = phyDevMemProps.memoryProperties;
754 uint32_t supportedFlags = pdmp.memoryTypes[i].propertyFlags &
755 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
756 if (supportedFlags == VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) {
757 typeIndex = i;
758 heapIndex = pdmp.memoryTypes[i].heapIndex;
759 REPORTER_ASSERT(reporter, heapIndex < pdmp.memoryHeapCount);
760 foundHeap = true;
761 }
762 }
763 }
764
765 // Fallback to align with GrAHardwareBufferUtils
766 if (!foundHeap && hwbProps.memoryTypeBits) {
767 typeIndex = ffs(hwbProps.memoryTypeBits) - 1;
768 foundHeap = true;
769 }
770
771 if (!foundHeap) {
772 ERRORF(reporter, "Failed to find valid heap for imported memory");
773 return false;
774 }
775
776 VkImportAndroidHardwareBufferInfoANDROID hwbImportInfo;
777 hwbImportInfo.sType = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID;
778 hwbImportInfo.pNext = nullptr;
779 hwbImportInfo.buffer = buffer;
780
781 VkMemoryDedicatedAllocateInfo dedicatedAllocInfo;
782 dedicatedAllocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO;
783 dedicatedAllocInfo.pNext = &hwbImportInfo;
784 dedicatedAllocInfo.image = fImage;
785 dedicatedAllocInfo.buffer = VK_NULL_HANDLE;
786
787 VkMemoryAllocateInfo allocInfo = {
788 VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, // sType
789 &dedicatedAllocInfo, // pNext
790 hwbProps.allocationSize, // allocationSize
791 typeIndex, // memoryTypeIndex
792 };
793
794 err = fVkAllocateMemory(fDevice, &allocInfo, nullptr, &fMemory);
795 if (VK_SUCCESS != err) {
796 ERRORF(reporter, "AllocateMemory failed for imported buffer, err: %d", err);
797 return false;
798 }
799
800 VkBindImageMemoryInfo bindImageInfo;
801 bindImageInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
802 bindImageInfo.pNext = nullptr;
803 bindImageInfo.image = fImage;
804 bindImageInfo.memory = fMemory;
805 bindImageInfo.memoryOffset = 0;
806
807 err = fVkBindImageMemory2(fDevice, 1, &bindImageInfo);
808 if (VK_SUCCESS != err) {
809 ERRORF(reporter, "BindImageMemory failed for imported buffer, err: %d", err);
810 return false;
811 }
812
813 skgpu::VulkanAlloc alloc;
814 alloc.fMemory = fMemory;
815 alloc.fOffset = 0;
816 alloc.fSize = hwbProps.allocationSize;
817 alloc.fFlags = 0;
818
819 outImageInfo->fImage = fImage;
820 outImageInfo->fAlloc = alloc;
821 outImageInfo->fImageTiling = VK_IMAGE_TILING_OPTIMAL;
822 outImageInfo->fImageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
823 outImageInfo->fFormat = VK_FORMAT_R8G8B8A8_UNORM;
824 outImageInfo->fImageUsageFlags = usageFlags;
825 outImageInfo->fLevelCount = 1;
826 outImageInfo->fCurrentQueueFamily = VK_QUEUE_FAMILY_EXTERNAL;
827 return true;
828 }
829
importHardwareBufferForRead(skiatest::Reporter * reporter,AHardwareBuffer * buffer)830 sk_sp<SkImage> VulkanTestHelper::importHardwareBufferForRead(skiatest::Reporter* reporter,
831 AHardwareBuffer* buffer) {
832 GrVkImageInfo imageInfo;
833 if (!this->importHardwareBuffer(reporter, buffer, false, &imageInfo)) {
834 return nullptr;
835 }
836
837 auto backendTex = GrBackendTextures::MakeVk(DEV_W, DEV_H, imageInfo);
838
839 sk_sp<SkImage> wrappedImage = SkImages::BorrowTextureFrom(fDirectContext.get(),
840 backendTex,
841 kTopLeft_GrSurfaceOrigin,
842 kRGBA_8888_SkColorType,
843 kPremul_SkAlphaType,
844 nullptr);
845
846 if (!wrappedImage.get()) {
847 ERRORF(reporter, "Failed to create wrapped Vulkan SkImage");
848 return nullptr;
849 }
850
851 return wrappedImage;
852 }
853
flushSurfaceAndSignalSemaphore(skiatest::Reporter * reporter,sk_sp<SkSurface> surface)854 bool VulkanTestHelper::flushSurfaceAndSignalSemaphore(skiatest::Reporter* reporter,
855 sk_sp<SkSurface> surface) {
856 this->releaseSurfaceToExternal(surface.get());
857 surface.reset();
858 GrBackendSemaphore semaphore;
859 if (!this->setupSemaphoreForSignaling(reporter, &semaphore)) {
860 return false;
861 }
862 GrFlushInfo info;
863 info.fNumSemaphores = 1;
864 info.fSignalSemaphores = &semaphore;
865 GrSemaphoresSubmitted submitted = fDirectContext->flush(info);
866 fDirectContext->submit();
867 if (GrSemaphoresSubmitted::kNo == submitted) {
868 ERRORF(reporter, "Failing call to flush on GrDirectContext");
869 return false;
870 }
871 SkASSERT(semaphore.isInitialized());
872 if (!this->exportSemaphore(reporter, semaphore)) {
873 return false;
874 }
875 return true;
876 }
877
setupSemaphoreForSignaling(skiatest::Reporter * reporter,GrBackendSemaphore * beSemaphore)878 bool VulkanTestHelper::setupSemaphoreForSignaling(skiatest::Reporter* reporter,
879 GrBackendSemaphore* beSemaphore) {
880 // Query supported info
881 VkPhysicalDeviceExternalSemaphoreInfo exSemInfo;
882 exSemInfo.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO;
883 exSemInfo.pNext = nullptr;
884 exSemInfo.handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
885
886 VkExternalSemaphoreProperties exSemProps;
887 exSemProps.sType = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES;
888 exSemProps.pNext = nullptr;
889
890 fVkGetPhysicalDeviceExternalSemaphoreProperties(fBackendContext.fPhysicalDevice, &exSemInfo,
891 &exSemProps);
892
893 if (!SkToBool(exSemProps.exportFromImportedHandleTypes &
894 VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT)) {
895 ERRORF(reporter, "HANDLE_TYPE_SYNC_FD not listed as exportFromImportedHandleTypes");
896 return false;
897 }
898 if (!SkToBool(exSemProps.compatibleHandleTypes &
899 VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT)) {
900 ERRORF(reporter, "HANDLE_TYPE_SYNC_FD not listed as compatibleHandleTypes");
901 return false;
902 }
903 if (!SkToBool(exSemProps.externalSemaphoreFeatures &
904 VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT) ||
905 !SkToBool(exSemProps.externalSemaphoreFeatures &
906 VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT)) {
907 ERRORF(reporter, "HANDLE_TYPE_SYNC_FD doesn't support export and import feature");
908 return false;
909 }
910
911 VkExportSemaphoreCreateInfo exportInfo;
912 exportInfo.sType = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO;
913 exportInfo.pNext = nullptr;
914 exportInfo.handleTypes = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
915
916 VkSemaphoreCreateInfo semaphoreInfo;
917 semaphoreInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
918 semaphoreInfo.pNext = &exportInfo;
919 semaphoreInfo.flags = 0;
920
921 VkSemaphore semaphore;
922 VkResult err = fVkCreateSemaphore(fDevice, &semaphoreInfo, nullptr, &semaphore);
923 if (VK_SUCCESS != err) {
924 ERRORF(reporter, "Failed to create signal semaphore, err: %d", err);
925 return false;
926 }
927 *beSemaphore = GrBackendSemaphores::MakeVk(semaphore);
928 return true;
929 }
930
exportSemaphore(skiatest::Reporter * reporter,const GrBackendSemaphore & beSemaphore)931 bool VulkanTestHelper::exportSemaphore(skiatest::Reporter* reporter,
932 const GrBackendSemaphore& beSemaphore) {
933 VkSemaphore semaphore = GrBackendSemaphores::GetVkSemaphore(beSemaphore);
934 if (VK_NULL_HANDLE == semaphore) {
935 ERRORF(reporter, "Invalid vulkan handle in export call");
936 return false;
937 }
938
939 VkSemaphoreGetFdInfoKHR getFdInfo;
940 getFdInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR;
941 getFdInfo.pNext = nullptr;
942 getFdInfo.semaphore = semaphore;
943 getFdInfo.handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
944
945 VkResult err = fVkGetSemaphoreFdKHR(fDevice, &getFdInfo, &fFdHandle);
946 if (VK_SUCCESS != err) {
947 ERRORF(reporter, "Failed to export signal semaphore, err: %d", err);
948 return false;
949 }
950 fSignalSemaphore = semaphore;
951 return true;
952 }
953
importAndWaitOnSemaphore(skiatest::Reporter * reporter,int fdHandle,sk_sp<SkSurface> surface)954 bool VulkanTestHelper::importAndWaitOnSemaphore(skiatest::Reporter* reporter, int fdHandle,
955 sk_sp<SkSurface> surface) {
956 VkSemaphoreCreateInfo semaphoreInfo;
957 semaphoreInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
958 semaphoreInfo.pNext = nullptr;
959 semaphoreInfo.flags = 0;
960
961 VkSemaphore semaphore;
962 VkResult err = fVkCreateSemaphore(fDevice, &semaphoreInfo, nullptr, &semaphore);
963 if (VK_SUCCESS != err) {
964 ERRORF(reporter, "Failed to create import semaphore, err: %d", err);
965 return false;
966 }
967
968 VkImportSemaphoreFdInfoKHR importInfo;
969 importInfo.sType = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR;
970 importInfo.pNext = nullptr;
971 importInfo.semaphore = semaphore;
972 importInfo.flags = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT;
973 importInfo.handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
974 importInfo.fd = fdHandle;
975
976 err = fVkImportSemaphoreFdKHR(fDevice, &importInfo);
977 if (VK_SUCCESS != err) {
978 ERRORF(reporter, "Failed to import semaphore, err: %d", err);
979 return false;
980 }
981
982 GrBackendSemaphore beSemaphore = GrBackendSemaphores::MakeVk(semaphore);
983 if (!surface->wait(1, &beSemaphore)) {
984 ERRORF(reporter, "Failed to add wait semaphore to surface");
985 fVkDestroySemaphore(fDevice, semaphore, nullptr);
986 return false;
987 }
988 return true;
989 }
990
importHardwareBufferForWrite(skiatest::Reporter * reporter,AHardwareBuffer * buffer)991 sk_sp<SkSurface> VulkanTestHelper::importHardwareBufferForWrite(skiatest::Reporter* reporter,
992 AHardwareBuffer* buffer) {
993 GrVkImageInfo imageInfo;
994 if (!this->importHardwareBuffer(reporter, buffer, true, &imageInfo)) {
995 return nullptr;
996 }
997
998 auto backendTex = GrBackendTextures::MakeVk(DEV_W, DEV_H, imageInfo);
999
1000 sk_sp<SkSurface> surface = SkSurfaces::WrapBackendTexture(fDirectContext.get(),
1001 backendTex,
1002 kTopLeft_GrSurfaceOrigin,
1003 0,
1004 kRGBA_8888_SkColorType,
1005 nullptr,
1006 nullptr);
1007
1008 if (!surface.get()) {
1009 ERRORF(reporter, "Failed to create wrapped Vulkan SkSurface");
1010 return nullptr;
1011 }
1012
1013 return surface;
1014 }
1015
get_src_color(int x,int y)1016 static SkPMColor get_src_color(int x, int y) {
1017 SkASSERT(x >= 0 && x < DEV_W);
1018 SkASSERT(y >= 0 && y < DEV_H);
1019
1020 U8CPU r = x;
1021 U8CPU g = y;
1022 U8CPU b = 0xc;
1023
1024 U8CPU a = 0xff;
1025 switch ((x+y) % 5) {
1026 case 0:
1027 a = 0xff;
1028 break;
1029 case 1:
1030 a = 0x80;
1031 break;
1032 case 2:
1033 a = 0xCC;
1034 break;
1035 case 4:
1036 a = 0x01;
1037 break;
1038 case 3:
1039 a = 0x00;
1040 break;
1041 }
1042 a = 0xff;
1043 return SkPremultiplyARGBInline(a, r, g, b);
1044 }
1045
make_src_bitmap()1046 static SkBitmap make_src_bitmap() {
1047 static SkBitmap bmp;
1048 if (bmp.isNull()) {
1049 bmp.allocN32Pixels(DEV_W, DEV_H);
1050 intptr_t pixels = reinterpret_cast<intptr_t>(bmp.getPixels());
1051 for (int y = 0; y < DEV_H; ++y) {
1052 for (int x = 0; x < DEV_W; ++x) {
1053 SkPMColor* pixel = reinterpret_cast<SkPMColor*>(
1054 pixels + y * bmp.rowBytes() + x * bmp.bytesPerPixel());
1055 *pixel = get_src_color(x, y);
1056 }
1057 }
1058 }
1059 return bmp;
1060 }
1061
check_read(skiatest::Reporter * reporter,const SkBitmap & srcBitmap,const SkBitmap & dstBitmap)1062 static bool check_read(skiatest::Reporter* reporter, const SkBitmap& srcBitmap,
1063 const SkBitmap& dstBitmap) {
1064 bool result = true;
1065 for (int y = 0; y < DEV_H && result; ++y) {
1066 for (int x = 0; x < DEV_W && result; ++x) {
1067 const uint32_t srcPixel = *srcBitmap.getAddr32(x, y);
1068 const uint32_t dstPixel = *dstBitmap.getAddr32(x, y);
1069 if (srcPixel != dstPixel) {
1070 ERRORF(reporter, "Expected readback pixel (%d, %d) value 0x%08x, got 0x%08x.",
1071 x, y, srcPixel, dstPixel);
1072 result = false;
1073 } /*else {
1074 ERRORF(reporter, "Got good readback pixel (%d, %d) value 0x%08x, got 0x%08x.",
1075 x, y, srcPixel, dstPixel);
1076
1077 }*/
1078 }
1079 }
1080 return result;
1081 }
1082
cleanup_resources(BaseTestHelper * srcHelper,BaseTestHelper * dstHelper,AHardwareBuffer * buffer)1083 static void cleanup_resources(BaseTestHelper* srcHelper, BaseTestHelper* dstHelper,
1084 AHardwareBuffer* buffer) {
1085 if (srcHelper) {
1086 srcHelper->cleanup();
1087 }
1088 if (dstHelper) {
1089 dstHelper->cleanup();
1090 }
1091 if (buffer) {
1092 AHardwareBuffer_release(buffer);
1093 }
1094 }
1095
1096 enum class SrcType {
1097 kCPU,
1098 kEGL,
1099 kVulkan,
1100 };
1101
1102 enum class DstType {
1103 kEGL,
1104 kVulkan,
1105 };
1106
run_test(skiatest::Reporter * reporter,const GrContextOptions & options,SrcType srcType,DstType dstType,bool shareSyncs)1107 void run_test(skiatest::Reporter* reporter, const GrContextOptions& options,
1108 SrcType srcType, DstType dstType, bool shareSyncs) {
1109 if (SrcType::kCPU == srcType && shareSyncs) {
1110 // We don't currently test this since we don't do any syncs in this case.
1111 return;
1112 }
1113 std::unique_ptr<BaseTestHelper> srcHelper;
1114 std::unique_ptr<BaseTestHelper> dstHelper;
1115 AHardwareBuffer* buffer = nullptr;
1116 if (SrcType::kVulkan == srcType) {
1117 srcHelper.reset(new VulkanTestHelper());
1118 } else if (SrcType::kEGL == srcType) {
1119 #ifdef SK_GL
1120 srcHelper.reset(new EGLTestHelper(options));
1121 #else
1122 SkASSERTF(false, "SrcType::kEGL used without OpenGL support.");
1123 #endif
1124 }
1125 if (srcHelper) {
1126 if (!srcHelper->init(reporter)) {
1127 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1128 return;
1129 }
1130 }
1131
1132 if (DstType::kVulkan == dstType) {
1133 dstHelper.reset(new VulkanTestHelper());
1134 } else {
1135 #ifdef SK_GL
1136 SkASSERT(DstType::kEGL == dstType);
1137 dstHelper.reset(new EGLTestHelper(options));
1138 #else
1139 SkASSERTF(false, "DstType::kEGL used without OpenGL support.");
1140 #endif
1141 }
1142 if (dstHelper) {
1143 if (!dstHelper->init(reporter)) {
1144 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1145 return;
1146 }
1147 }
1148
1149 ///////////////////////////////////////////////////////////////////////////
1150 // Setup SkBitmaps
1151 ///////////////////////////////////////////////////////////////////////////
1152
1153 SkBitmap srcBitmap = make_src_bitmap();
1154 SkBitmap dstBitmapSurface;
1155 dstBitmapSurface.allocN32Pixels(DEV_W, DEV_H);
1156 SkBitmap dstBitmapFinal;
1157 dstBitmapFinal.allocN32Pixels(DEV_W, DEV_H);
1158
1159 ///////////////////////////////////////////////////////////////////////////
1160 // Setup AHardwareBuffer
1161 ///////////////////////////////////////////////////////////////////////////
1162
1163 AHardwareBuffer_Desc hwbDesc;
1164 hwbDesc.width = DEV_W;
1165 hwbDesc.height = DEV_H;
1166 hwbDesc.layers = 1;
1167 if (SrcType::kCPU == srcType) {
1168 hwbDesc.usage = AHARDWAREBUFFER_USAGE_CPU_READ_NEVER |
1169 AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN |
1170 AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
1171 } else {
1172 hwbDesc.usage = AHARDWAREBUFFER_USAGE_CPU_READ_NEVER |
1173 AHARDWAREBUFFER_USAGE_CPU_WRITE_NEVER |
1174 AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE |
1175 AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT;
1176 }
1177 hwbDesc.format = AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM;
1178 // The following three are not used in the allocate
1179 hwbDesc.stride = 0;
1180 hwbDesc.rfu0= 0;
1181 hwbDesc.rfu1= 0;
1182
1183 if (int error = AHardwareBuffer_allocate(&hwbDesc, &buffer)) {
1184 ERRORF(reporter, "Failed to allocated hardware buffer, error: %d", error);
1185 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1186 return;
1187 }
1188
1189 if (SrcType::kCPU == srcType) {
1190 // Get actual desc for allocated buffer so we know the stride for uploading cpu data.
1191 AHardwareBuffer_describe(buffer, &hwbDesc);
1192
1193 uint32_t* bufferAddr;
1194 if (AHardwareBuffer_lock(buffer, AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN, -1, nullptr,
1195 reinterpret_cast<void**>(&bufferAddr))) {
1196 ERRORF(reporter, "Failed to lock hardware buffer");
1197 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1198 return;
1199 }
1200
1201 int bbp = srcBitmap.bytesPerPixel();
1202 uint32_t* src = (uint32_t*)srcBitmap.getPixels();
1203 uint32_t* dst = bufferAddr;
1204 for (int y = 0; y < DEV_H; ++y) {
1205 memcpy(dst, src, DEV_W * bbp);
1206 src += DEV_W;
1207 dst += hwbDesc.stride;
1208 }
1209
1210 for (int y = 0; y < DEV_H; ++y) {
1211 for (int x = 0; x < DEV_W; ++x) {
1212 const uint32_t srcPixel = *srcBitmap.getAddr32(x, y);
1213 uint32_t dstPixel = bufferAddr[y * hwbDesc.stride + x];
1214 if (srcPixel != dstPixel) {
1215 ERRORF(reporter, "CPU HWB Expected readpix (%d, %d) value 0x%08x, got 0x%08x.",
1216 x, y, srcPixel, dstPixel);
1217 }
1218 }
1219 }
1220
1221 AHardwareBuffer_unlock(buffer, nullptr);
1222
1223 } else {
1224 srcHelper->makeCurrent();
1225 sk_sp<SkSurface> surface = srcHelper->importHardwareBufferForWrite(reporter, buffer);
1226
1227 if (!surface) {
1228 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1229 return;
1230 }
1231
1232 sk_sp<SkImage> srcBmpImage = SkImages::RasterFromBitmap(srcBitmap);
1233 surface->getCanvas()->drawImage(srcBmpImage, 0, 0);
1234
1235 // If we are testing sharing of syncs, don't do a read here since it forces sychronization
1236 // to occur.
1237 if (!shareSyncs) {
1238 bool readResult = surface->readPixels(dstBitmapSurface, 0, 0);
1239 if (!readResult) {
1240 ERRORF(reporter, "Read Pixels on surface failed");
1241 surface.reset();
1242 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1243 return;
1244 }
1245 REPORTER_ASSERT(reporter, check_read(reporter, srcBitmap, dstBitmapSurface));
1246 }
1247
1248 ///////////////////////////////////////////////////////////////////////////
1249 // Cleanup GL/EGL and add syncs
1250 ///////////////////////////////////////////////////////////////////////////
1251
1252 if (shareSyncs) {
1253 if (!srcHelper->flushSurfaceAndSignalSemaphore(reporter, std::move(surface))) {
1254 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1255 return;
1256 }
1257 } else {
1258 srcHelper->releaseSurfaceToExternal(surface.get());
1259 srcHelper->doClientSync();
1260 surface.reset();
1261 srcHelper->releaseImage();
1262 }
1263 }
1264
1265 ///////////////////////////////////////////////////////////////////////////
1266 // Import the HWB into backend and draw it to a surface
1267 ///////////////////////////////////////////////////////////////////////////
1268
1269 dstHelper->makeCurrent();
1270 sk_sp<SkImage> wrappedImage = dstHelper->importHardwareBufferForRead(reporter, buffer);
1271
1272 if (!wrappedImage) {
1273 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1274 return;
1275 }
1276
1277 auto direct = dstHelper->directContext();
1278
1279 // Make SkSurface to render wrapped HWB into.
1280 SkImageInfo imageInfo = SkImageInfo::Make(DEV_W, DEV_H, kRGBA_8888_SkColorType,
1281 kPremul_SkAlphaType, nullptr);
1282
1283 sk_sp<SkSurface> dstSurf = SkSurfaces::RenderTarget(
1284 direct, skgpu::Budgeted::kNo, imageInfo, 0, kTopLeft_GrSurfaceOrigin, nullptr, false);
1285 if (!dstSurf.get()) {
1286 ERRORF(reporter, "Failed to create destination SkSurface");
1287 wrappedImage.reset();
1288 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1289 return;
1290 }
1291
1292 if (shareSyncs) {
1293 if (!dstHelper->importAndWaitOnSemaphore(reporter, srcHelper->getFdHandle(), dstSurf)) {
1294 wrappedImage.reset();
1295 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1296 return;
1297 }
1298 }
1299 dstSurf->getCanvas()->drawImage(wrappedImage, 0, 0);
1300
1301 bool readResult = dstSurf->readPixels(dstBitmapFinal, 0, 0);
1302 if (!readResult) {
1303 ERRORF(reporter, "Read Pixels failed");
1304 wrappedImage.reset();
1305 dstSurf.reset();
1306 dstHelper->doClientSync();
1307 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1308 return;
1309 }
1310
1311 REPORTER_ASSERT(reporter, check_read(reporter, srcBitmap, dstBitmapFinal));
1312
1313 dstSurf.reset();
1314 wrappedImage.reset();
1315 dstHelper->doClientSync();
1316 cleanup_resources(srcHelper.get(), dstHelper.get(), buffer);
1317 }
1318
DEF_GANESH_TEST(VulkanHardwareBuffer_CPU_Vulkan,reporter,options,CtsEnforcement::kApiLevel_T)1319 DEF_GANESH_TEST(VulkanHardwareBuffer_CPU_Vulkan, reporter, options, CtsEnforcement::kApiLevel_T) {
1320 run_test(reporter, options, SrcType::kCPU, DstType::kVulkan, false);
1321 }
1322
DEF_GANESH_TEST(VulkanHardwareBuffer_Vulkan_Vulkan,reporter,options,CtsEnforcement::kApiLevel_T)1323 DEF_GANESH_TEST(VulkanHardwareBuffer_Vulkan_Vulkan,
1324 reporter,
1325 options,
1326 CtsEnforcement::kApiLevel_T) {
1327 run_test(reporter, options, SrcType::kVulkan, DstType::kVulkan, false);
1328 }
1329
DEF_GANESH_TEST(VulkanHardwareBuffer_Vulkan_Vulkan_Syncs,reporter,options,CtsEnforcement::kApiLevel_T)1330 DEF_GANESH_TEST(VulkanHardwareBuffer_Vulkan_Vulkan_Syncs,
1331 reporter,
1332 options,
1333 CtsEnforcement::kApiLevel_T) {
1334 run_test(reporter, options, SrcType::kVulkan, DstType::kVulkan, true);
1335 }
1336
1337 #if defined(SK_GL)
DEF_GANESH_TEST(VulkanHardwareBuffer_EGL_Vulkan,reporter,options,CtsEnforcement::kApiLevel_T)1338 DEF_GANESH_TEST(VulkanHardwareBuffer_EGL_Vulkan, reporter, options, CtsEnforcement::kApiLevel_T) {
1339 run_test(reporter, options, SrcType::kEGL, DstType::kVulkan, false);
1340 }
1341
DEF_GANESH_TEST(VulkanHardwareBuffer_CPU_EGL,reporter,options,CtsEnforcement::kApiLevel_T)1342 DEF_GANESH_TEST(VulkanHardwareBuffer_CPU_EGL, reporter, options, CtsEnforcement::kApiLevel_T) {
1343 run_test(reporter, options, SrcType::kCPU, DstType::kEGL, false);
1344 }
1345
DEF_GANESH_TEST(VulkanHardwareBuffer_EGL_EGL,reporter,options,CtsEnforcement::kApiLevel_T)1346 DEF_GANESH_TEST(VulkanHardwareBuffer_EGL_EGL, reporter, options, CtsEnforcement::kApiLevel_T) {
1347 run_test(reporter, options, SrcType::kEGL, DstType::kEGL, false);
1348 }
1349
DEF_GANESH_TEST(VulkanHardwareBuffer_Vulkan_EGL,reporter,options,CtsEnforcement::kApiLevel_T)1350 DEF_GANESH_TEST(VulkanHardwareBuffer_Vulkan_EGL, reporter, options, CtsEnforcement::kApiLevel_T) {
1351 run_test(reporter, options, SrcType::kVulkan, DstType::kEGL, false);
1352 }
1353
DEF_GANESH_TEST(VulkanHardwareBuffer_EGL_EGL_Syncs,reporter,options,CtsEnforcement::kApiLevel_T)1354 DEF_GANESH_TEST(VulkanHardwareBuffer_EGL_EGL_Syncs,
1355 reporter,
1356 options,
1357 CtsEnforcement::kApiLevel_T) {
1358 run_test(reporter, options, SrcType::kEGL, DstType::kEGL, true);
1359 }
1360
DEF_GANESH_TEST(VulkanHardwareBuffer_Vulkan_EGL_Syncs,reporter,options,CtsEnforcement::kApiLevel_T)1361 DEF_GANESH_TEST(VulkanHardwareBuffer_Vulkan_EGL_Syncs,
1362 reporter,
1363 options,
1364 CtsEnforcement::kApiLevel_T) {
1365 run_test(reporter, options, SrcType::kVulkan, DstType::kEGL, true);
1366 }
1367
DEF_GANESH_TEST(VulkanHardwareBuffer_EGL_Vulkan_Syncs,reporter,options,CtsEnforcement::kApiLevel_T)1368 DEF_GANESH_TEST(VulkanHardwareBuffer_EGL_Vulkan_Syncs,
1369 reporter,
1370 options,
1371 CtsEnforcement::kApiLevel_T) {
1372 run_test(reporter, options, SrcType::kEGL, DstType::kVulkan, true);
1373 }
1374 #endif
1375
1376 #endif // defined(SK_GANESH) && defined(SK_BUILD_FOR_ANDROID) &&
1377 // __ANDROID_API__ >= 26 && defined(SK_VULKAN)
1378
1379