1 /*
2 * Copyright 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "vulkan/vulkan_core.h"
18 #define ATRACE_TAG ATRACE_TAG_GRAPHICS
19
20 #include <aidl/android/hardware/graphics/common/Dataspace.h>
21 #include <aidl/android/hardware/graphics/common/PixelFormat.h>
22 #include <android/hardware/graphics/common/1.0/types.h>
23 #include <android/hardware_buffer.h>
24 #include <grallocusage/GrallocUsageConversion.h>
25 #include <graphicsenv/GraphicsEnv.h>
26 #include <hardware/gralloc.h>
27 #include <hardware/gralloc1.h>
28 #include <log/log.h>
29 #include <sync/sync.h>
30 #include <system/window.h>
31 #include <ui/BufferQueueDefs.h>
32 #include <utils/StrongPointer.h>
33 #include <utils/Timers.h>
34 #include <utils/Trace.h>
35
36 #include <algorithm>
37 #include <unordered_set>
38 #include <vector>
39
40 #include "driver.h"
41
42 using PixelFormat = aidl::android::hardware::graphics::common::PixelFormat;
43 using DataSpace = aidl::android::hardware::graphics::common::Dataspace;
44 using android::hardware::graphics::common::V1_0::BufferUsage;
45
46 namespace vulkan {
47 namespace driver {
48
49 namespace {
50
convertGralloc1ToBufferUsage(uint64_t producerUsage,uint64_t consumerUsage)51 static uint64_t convertGralloc1ToBufferUsage(uint64_t producerUsage,
52 uint64_t consumerUsage) {
53 static_assert(uint64_t(GRALLOC1_CONSUMER_USAGE_CPU_READ_OFTEN) ==
54 uint64_t(GRALLOC1_PRODUCER_USAGE_CPU_READ_OFTEN),
55 "expected ConsumerUsage and ProducerUsage CPU_READ_OFTEN "
56 "bits to match");
57 uint64_t merged = producerUsage | consumerUsage;
58 if ((merged & (GRALLOC1_CONSUMER_USAGE_CPU_READ_OFTEN)) ==
59 GRALLOC1_CONSUMER_USAGE_CPU_READ_OFTEN) {
60 merged &= ~uint64_t(GRALLOC1_CONSUMER_USAGE_CPU_READ_OFTEN);
61 merged |= BufferUsage::CPU_READ_OFTEN;
62 }
63 if ((merged & (GRALLOC1_PRODUCER_USAGE_CPU_WRITE_OFTEN)) ==
64 GRALLOC1_PRODUCER_USAGE_CPU_WRITE_OFTEN) {
65 merged &= ~uint64_t(GRALLOC1_PRODUCER_USAGE_CPU_WRITE_OFTEN);
66 merged |= BufferUsage::CPU_WRITE_OFTEN;
67 }
68 return merged;
69 }
70
71 const VkSurfaceTransformFlagsKHR kSupportedTransforms =
72 VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR |
73 VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR |
74 VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR |
75 VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR |
76 VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR |
77 VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR |
78 VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR |
79 VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR |
80 VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR;
81
TranslateNativeToVulkanTransform(int native)82 VkSurfaceTransformFlagBitsKHR TranslateNativeToVulkanTransform(int native) {
83 // Native and Vulkan transforms are isomorphic, but are represented
84 // differently. Vulkan transforms are built up of an optional horizontal
85 // mirror, followed by a clockwise 0/90/180/270-degree rotation. Native
86 // transforms are built up from a horizontal flip, vertical flip, and
87 // 90-degree rotation, all optional but always in that order.
88
89 switch (native) {
90 case 0:
91 return VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
92 case NATIVE_WINDOW_TRANSFORM_FLIP_H:
93 return VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR;
94 case NATIVE_WINDOW_TRANSFORM_FLIP_V:
95 return VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR;
96 case NATIVE_WINDOW_TRANSFORM_ROT_180:
97 return VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR;
98 case NATIVE_WINDOW_TRANSFORM_ROT_90:
99 return VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR;
100 case NATIVE_WINDOW_TRANSFORM_FLIP_H | NATIVE_WINDOW_TRANSFORM_ROT_90:
101 return VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR;
102 case NATIVE_WINDOW_TRANSFORM_FLIP_V | NATIVE_WINDOW_TRANSFORM_ROT_90:
103 return VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR;
104 case NATIVE_WINDOW_TRANSFORM_ROT_270:
105 return VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR;
106 case NATIVE_WINDOW_TRANSFORM_INVERSE_DISPLAY:
107 default:
108 return VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
109 }
110 }
111
TranslateVulkanToNativeTransform(VkSurfaceTransformFlagBitsKHR transform)112 int TranslateVulkanToNativeTransform(VkSurfaceTransformFlagBitsKHR transform) {
113 switch (transform) {
114 case VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR:
115 return NATIVE_WINDOW_TRANSFORM_ROT_90;
116 case VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR:
117 return NATIVE_WINDOW_TRANSFORM_ROT_180;
118 case VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR:
119 return NATIVE_WINDOW_TRANSFORM_ROT_270;
120 case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR:
121 return NATIVE_WINDOW_TRANSFORM_FLIP_H;
122 case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR:
123 return NATIVE_WINDOW_TRANSFORM_FLIP_H |
124 NATIVE_WINDOW_TRANSFORM_ROT_90;
125 case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR:
126 return NATIVE_WINDOW_TRANSFORM_FLIP_V;
127 case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR:
128 return NATIVE_WINDOW_TRANSFORM_FLIP_V |
129 NATIVE_WINDOW_TRANSFORM_ROT_90;
130 case VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR:
131 case VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR:
132 default:
133 return 0;
134 }
135 }
136
InvertTransformToNative(VkSurfaceTransformFlagBitsKHR transform)137 int InvertTransformToNative(VkSurfaceTransformFlagBitsKHR transform) {
138 switch (transform) {
139 case VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR:
140 return NATIVE_WINDOW_TRANSFORM_ROT_270;
141 case VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR:
142 return NATIVE_WINDOW_TRANSFORM_ROT_180;
143 case VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR:
144 return NATIVE_WINDOW_TRANSFORM_ROT_90;
145 case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR:
146 return NATIVE_WINDOW_TRANSFORM_FLIP_H;
147 case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR:
148 return NATIVE_WINDOW_TRANSFORM_FLIP_H |
149 NATIVE_WINDOW_TRANSFORM_ROT_90;
150 case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR:
151 return NATIVE_WINDOW_TRANSFORM_FLIP_V;
152 case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR:
153 return NATIVE_WINDOW_TRANSFORM_FLIP_V |
154 NATIVE_WINDOW_TRANSFORM_ROT_90;
155 case VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR:
156 case VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR:
157 default:
158 return 0;
159 }
160 }
161
162 const static VkColorSpaceKHR colorSpaceSupportedByVkEXTSwapchainColorspace[] = {
163 VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT,
164 VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT,
165 VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT,
166 VK_COLOR_SPACE_BT709_LINEAR_EXT,
167 VK_COLOR_SPACE_BT709_NONLINEAR_EXT,
168 VK_COLOR_SPACE_BT2020_LINEAR_EXT,
169 VK_COLOR_SPACE_HDR10_ST2084_EXT,
170 VK_COLOR_SPACE_HDR10_HLG_EXT,
171 VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT,
172 VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT,
173 VK_COLOR_SPACE_PASS_THROUGH_EXT,
174 VK_COLOR_SPACE_DCI_P3_LINEAR_EXT};
175
176 const static VkColorSpaceKHR
177 colorSpaceSupportedByVkEXTSwapchainColorspaceOnFP16SurfaceOnly[] = {
178 VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT,
179 VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT};
180
181 class TimingInfo {
182 public:
TimingInfo(const VkPresentTimeGOOGLE * qp,uint64_t nativeFrameId)183 TimingInfo(const VkPresentTimeGOOGLE* qp, uint64_t nativeFrameId)
184 : vals_{qp->presentID, qp->desiredPresentTime, 0, 0, 0},
185 native_frame_id_(nativeFrameId) {}
ready() const186 bool ready() const {
187 return (timestamp_desired_present_time_ !=
188 NATIVE_WINDOW_TIMESTAMP_PENDING &&
189 timestamp_actual_present_time_ !=
190 NATIVE_WINDOW_TIMESTAMP_PENDING &&
191 timestamp_render_complete_time_ !=
192 NATIVE_WINDOW_TIMESTAMP_PENDING &&
193 timestamp_composition_latch_time_ !=
194 NATIVE_WINDOW_TIMESTAMP_PENDING);
195 }
calculate(int64_t rdur)196 void calculate(int64_t rdur) {
197 bool anyTimestampInvalid =
198 (timestamp_actual_present_time_ ==
199 NATIVE_WINDOW_TIMESTAMP_INVALID) ||
200 (timestamp_render_complete_time_ ==
201 NATIVE_WINDOW_TIMESTAMP_INVALID) ||
202 (timestamp_composition_latch_time_ ==
203 NATIVE_WINDOW_TIMESTAMP_INVALID);
204 if (anyTimestampInvalid) {
205 ALOGE("Unexpectedly received invalid timestamp.");
206 vals_.actualPresentTime = 0;
207 vals_.earliestPresentTime = 0;
208 vals_.presentMargin = 0;
209 return;
210 }
211
212 vals_.actualPresentTime =
213 static_cast<uint64_t>(timestamp_actual_present_time_);
214 int64_t margin = (timestamp_composition_latch_time_ -
215 timestamp_render_complete_time_);
216 // Calculate vals_.earliestPresentTime, and potentially adjust
217 // vals_.presentMargin. The initial value of vals_.earliestPresentTime
218 // is vals_.actualPresentTime. If we can subtract rdur (the duration
219 // of a refresh cycle) from vals_.earliestPresentTime (and also from
220 // vals_.presentMargin) and still leave a positive margin, then we can
221 // report to the application that it could have presented earlier than
222 // it did (per the extension specification). If for some reason, we
223 // can do this subtraction repeatedly, we do, since
224 // vals_.earliestPresentTime really is supposed to be the "earliest".
225 int64_t early_time = timestamp_actual_present_time_;
226 while ((margin > rdur) &&
227 ((early_time - rdur) > timestamp_composition_latch_time_)) {
228 early_time -= rdur;
229 margin -= rdur;
230 }
231 vals_.earliestPresentTime = static_cast<uint64_t>(early_time);
232 vals_.presentMargin = static_cast<uint64_t>(margin);
233 }
get_values(VkPastPresentationTimingGOOGLE * values) const234 void get_values(VkPastPresentationTimingGOOGLE* values) const {
235 *values = vals_;
236 }
237
238 public:
239 VkPastPresentationTimingGOOGLE vals_ { 0, 0, 0, 0, 0 };
240
241 uint64_t native_frame_id_ { 0 };
242 int64_t timestamp_desired_present_time_{ NATIVE_WINDOW_TIMESTAMP_PENDING };
243 int64_t timestamp_actual_present_time_ { NATIVE_WINDOW_TIMESTAMP_PENDING };
244 int64_t timestamp_render_complete_time_ { NATIVE_WINDOW_TIMESTAMP_PENDING };
245 int64_t timestamp_composition_latch_time_
246 { NATIVE_WINDOW_TIMESTAMP_PENDING };
247 };
248
249 struct Surface {
250 android::sp<ANativeWindow> window;
251 VkSwapchainKHR swapchain_handle;
252 uint64_t consumer_usage;
253
254 // Indicate whether this surface has been used by a swapchain, no matter the
255 // swapchain is still current or has been destroyed.
256 bool used_by_swapchain;
257 };
258
HandleFromSurface(Surface * surface)259 VkSurfaceKHR HandleFromSurface(Surface* surface) {
260 return VkSurfaceKHR(reinterpret_cast<uint64_t>(surface));
261 }
262
SurfaceFromHandle(VkSurfaceKHR handle)263 Surface* SurfaceFromHandle(VkSurfaceKHR handle) {
264 return reinterpret_cast<Surface*>(handle);
265 }
266
267 // Maximum number of TimingInfo structs to keep per swapchain:
268 enum { MAX_TIMING_INFOS = 10 };
269 // Minimum number of frames to look for in the past (so we don't cause
270 // syncronous requests to Surface Flinger):
271 enum { MIN_NUM_FRAMES_AGO = 5 };
272
IsSharedPresentMode(VkPresentModeKHR mode)273 bool IsSharedPresentMode(VkPresentModeKHR mode) {
274 return mode == VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR ||
275 mode == VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR;
276 }
277
278 struct Swapchain {
Swapchainvulkan::driver::__anon9e9787210111::Swapchain279 Swapchain(Surface& surface_,
280 uint32_t num_images_,
281 VkPresentModeKHR present_mode,
282 int pre_transform_,
283 int64_t refresh_duration_)
284 : surface(surface_),
285 num_images(num_images_),
286 mailbox_mode(present_mode == VK_PRESENT_MODE_MAILBOX_KHR),
287 pre_transform(pre_transform_),
288 frame_timestamps_enabled(false),
289 refresh_duration(refresh_duration_),
290 acquire_next_image_timeout(-1),
291 shared(IsSharedPresentMode(present_mode)) {
292 }
293
get_refresh_durationvulkan::driver::__anon9e9787210111::Swapchain294 VkResult get_refresh_duration(uint64_t& outRefreshDuration)
295 {
296 ANativeWindow* window = surface.window.get();
297 int err = native_window_get_refresh_cycle_duration(
298 window,
299 &refresh_duration);
300 if (err != android::OK) {
301 ALOGE("%s:native_window_get_refresh_cycle_duration failed: %s (%d)",
302 __func__, strerror(-err), err );
303 return VK_ERROR_SURFACE_LOST_KHR;
304 }
305 outRefreshDuration = refresh_duration;
306 return VK_SUCCESS;
307 }
308
309 Surface& surface;
310 uint32_t num_images;
311 bool mailbox_mode;
312 int pre_transform;
313 bool frame_timestamps_enabled;
314 int64_t refresh_duration;
315 nsecs_t acquire_next_image_timeout;
316 bool shared;
317
318 struct Image {
Imagevulkan::driver::__anon9e9787210111::Swapchain::Image319 Image()
320 : image(VK_NULL_HANDLE),
321 dequeue_fence(-1),
322 release_fence(-1),
323 dequeued(false) {}
324 VkImage image;
325 // If the image is bound to memory, an sp to the underlying gralloc buffer.
326 // Otherwise, nullptr; the image will be bound to memory as part of
327 // AcquireNextImage.
328 android::sp<ANativeWindowBuffer> buffer;
329 // The fence is only valid when the buffer is dequeued, and should be
330 // -1 any other time. When valid, we own the fd, and must ensure it is
331 // closed: either by closing it explicitly when queueing the buffer,
332 // or by passing ownership e.g. to ANativeWindow::cancelBuffer().
333 int dequeue_fence;
334 // This fence is a dup of the sync fd returned from the driver via
335 // vkQueueSignalReleaseImageANDROID upon vkQueuePresentKHR. We must
336 // ensure it is closed upon re-presenting or releasing the image.
337 int release_fence;
338 bool dequeued;
339 } images[android::BufferQueueDefs::NUM_BUFFER_SLOTS];
340
341 std::vector<TimingInfo> timing;
342 };
343
HandleFromSwapchain(Swapchain * swapchain)344 VkSwapchainKHR HandleFromSwapchain(Swapchain* swapchain) {
345 return VkSwapchainKHR(reinterpret_cast<uint64_t>(swapchain));
346 }
347
SwapchainFromHandle(VkSwapchainKHR handle)348 Swapchain* SwapchainFromHandle(VkSwapchainKHR handle) {
349 return reinterpret_cast<Swapchain*>(handle);
350 }
351
IsFencePending(int fd)352 static bool IsFencePending(int fd) {
353 if (fd < 0)
354 return false;
355
356 errno = 0;
357 return sync_wait(fd, 0 /* timeout */) == -1 && errno == ETIME;
358 }
359
ReleaseSwapchainImage(VkDevice device,bool shared_present,ANativeWindow * window,int release_fence,Swapchain::Image & image,bool defer_if_pending)360 void ReleaseSwapchainImage(VkDevice device,
361 bool shared_present,
362 ANativeWindow* window,
363 int release_fence,
364 Swapchain::Image& image,
365 bool defer_if_pending) {
366 ATRACE_CALL();
367
368 ALOG_ASSERT(release_fence == -1 || image.dequeued,
369 "ReleaseSwapchainImage: can't provide a release fence for "
370 "non-dequeued images");
371
372 if (image.dequeued) {
373 if (release_fence >= 0) {
374 // We get here from vkQueuePresentKHR. The application is
375 // responsible for creating an execution dependency chain from
376 // vkAcquireNextImage (dequeue_fence) to vkQueuePresentKHR
377 // (release_fence), so we can drop the dequeue_fence here.
378 if (image.dequeue_fence >= 0)
379 close(image.dequeue_fence);
380 } else {
381 // We get here during swapchain destruction, or various serious
382 // error cases e.g. when we can't create the release_fence during
383 // vkQueuePresentKHR. In non-error cases, the dequeue_fence should
384 // have already signalled, since the swapchain images are supposed
385 // to be idle before the swapchain is destroyed. In error cases,
386 // there may be rendering in flight to the image, but since we
387 // weren't able to create a release_fence, waiting for the
388 // dequeue_fence is about the best we can do.
389 release_fence = image.dequeue_fence;
390 }
391 image.dequeue_fence = -1;
392
393 // It's invalid to call cancelBuffer on a shared buffer
394 if (window && !shared_present) {
395 window->cancelBuffer(window, image.buffer.get(), release_fence);
396 } else {
397 if (release_fence >= 0) {
398 sync_wait(release_fence, -1 /* forever */);
399 close(release_fence);
400 }
401 }
402 release_fence = -1;
403 image.dequeued = false;
404 }
405
406 if (defer_if_pending && IsFencePending(image.release_fence))
407 return;
408
409 if (image.release_fence >= 0) {
410 close(image.release_fence);
411 image.release_fence = -1;
412 }
413
414 if (image.image) {
415 ATRACE_BEGIN("DestroyImage");
416 GetData(device).driver.DestroyImage(device, image.image, nullptr);
417 ATRACE_END();
418 image.image = VK_NULL_HANDLE;
419 }
420
421 image.buffer.clear();
422 }
423
OrphanSwapchain(VkDevice device,Swapchain * swapchain)424 void OrphanSwapchain(VkDevice device, Swapchain* swapchain) {
425 if (swapchain->surface.swapchain_handle != HandleFromSwapchain(swapchain))
426 return;
427 for (uint32_t i = 0; i < swapchain->num_images; i++) {
428 if (!swapchain->images[i].dequeued) {
429 ReleaseSwapchainImage(device, swapchain->shared, nullptr, -1,
430 swapchain->images[i], true);
431 }
432 }
433 swapchain->surface.swapchain_handle = VK_NULL_HANDLE;
434 swapchain->timing.clear();
435 }
436
get_num_ready_timings(Swapchain & swapchain)437 uint32_t get_num_ready_timings(Swapchain& swapchain) {
438 if (swapchain.timing.size() < MIN_NUM_FRAMES_AGO) {
439 return 0;
440 }
441
442 uint32_t num_ready = 0;
443 const size_t num_timings = swapchain.timing.size() - MIN_NUM_FRAMES_AGO + 1;
444 for (uint32_t i = 0; i < num_timings; i++) {
445 TimingInfo& ti = swapchain.timing[i];
446 if (ti.ready()) {
447 // This TimingInfo is ready to be reported to the user. Add it
448 // to the num_ready.
449 num_ready++;
450 continue;
451 }
452 // This TimingInfo is not yet ready to be reported to the user,
453 // and so we should look for any available timestamps that
454 // might make it ready.
455 int64_t desired_present_time = 0;
456 int64_t render_complete_time = 0;
457 int64_t composition_latch_time = 0;
458 int64_t actual_present_time = 0;
459 // Obtain timestamps:
460 int err = native_window_get_frame_timestamps(
461 swapchain.surface.window.get(), ti.native_frame_id_,
462 &desired_present_time, &render_complete_time,
463 &composition_latch_time,
464 nullptr, //&first_composition_start_time,
465 nullptr, //&last_composition_start_time,
466 nullptr, //&composition_finish_time,
467 &actual_present_time,
468 nullptr, //&dequeue_ready_time,
469 nullptr /*&reads_done_time*/);
470
471 if (err != android::OK) {
472 continue;
473 }
474
475 // Record the timestamp(s) we received, and then see if this TimingInfo
476 // is ready to be reported to the user:
477 ti.timestamp_desired_present_time_ = desired_present_time;
478 ti.timestamp_actual_present_time_ = actual_present_time;
479 ti.timestamp_render_complete_time_ = render_complete_time;
480 ti.timestamp_composition_latch_time_ = composition_latch_time;
481
482 if (ti.ready()) {
483 // The TimingInfo has received enough timestamps, and should now
484 // use those timestamps to calculate the info that should be
485 // reported to the user:
486 ti.calculate(swapchain.refresh_duration);
487 num_ready++;
488 }
489 }
490 return num_ready;
491 }
492
copy_ready_timings(Swapchain & swapchain,uint32_t * count,VkPastPresentationTimingGOOGLE * timings)493 void copy_ready_timings(Swapchain& swapchain,
494 uint32_t* count,
495 VkPastPresentationTimingGOOGLE* timings) {
496 if (swapchain.timing.empty()) {
497 *count = 0;
498 return;
499 }
500
501 size_t last_ready = swapchain.timing.size() - 1;
502 while (!swapchain.timing[last_ready].ready()) {
503 if (last_ready == 0) {
504 *count = 0;
505 return;
506 }
507 last_ready--;
508 }
509
510 uint32_t num_copied = 0;
511 int32_t num_to_remove = 0;
512 for (uint32_t i = 0; i <= last_ready && num_copied < *count; i++) {
513 const TimingInfo& ti = swapchain.timing[i];
514 if (ti.ready()) {
515 ti.get_values(&timings[num_copied]);
516 num_copied++;
517 }
518 num_to_remove++;
519 }
520
521 // Discard old frames that aren't ready if newer frames are ready.
522 // We don't expect to get the timing info for those old frames.
523 swapchain.timing.erase(swapchain.timing.begin(),
524 swapchain.timing.begin() + num_to_remove);
525
526 *count = num_copied;
527 }
528
GetNativePixelFormat(VkFormat format)529 PixelFormat GetNativePixelFormat(VkFormat format) {
530 PixelFormat native_format = PixelFormat::RGBA_8888;
531 switch (format) {
532 case VK_FORMAT_R8G8B8A8_UNORM:
533 case VK_FORMAT_R8G8B8A8_SRGB:
534 native_format = PixelFormat::RGBA_8888;
535 break;
536 case VK_FORMAT_R5G6B5_UNORM_PACK16:
537 native_format = PixelFormat::RGB_565;
538 break;
539 case VK_FORMAT_R16G16B16A16_SFLOAT:
540 native_format = PixelFormat::RGBA_FP16;
541 break;
542 case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
543 native_format = PixelFormat::RGBA_1010102;
544 break;
545 case VK_FORMAT_R8_UNORM:
546 native_format = PixelFormat::R_8;
547 break;
548 case VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16:
549 native_format = PixelFormat::RGBA_10101010;
550 break;
551 default:
552 ALOGV("unsupported swapchain format %d", format);
553 break;
554 }
555 return native_format;
556 }
557
GetNativeDataspace(VkColorSpaceKHR colorspace,VkFormat format)558 DataSpace GetNativeDataspace(VkColorSpaceKHR colorspace, VkFormat format) {
559 switch (colorspace) {
560 case VK_COLOR_SPACE_SRGB_NONLINEAR_KHR:
561 return DataSpace::SRGB;
562 case VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT:
563 return DataSpace::DISPLAY_P3;
564 case VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT:
565 return DataSpace::SCRGB_LINEAR;
566 case VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT:
567 return DataSpace::SCRGB;
568 case VK_COLOR_SPACE_DCI_P3_LINEAR_EXT:
569 return DataSpace::DCI_P3_LINEAR;
570 case VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT:
571 return DataSpace::DCI_P3;
572 case VK_COLOR_SPACE_BT709_LINEAR_EXT:
573 return DataSpace::SRGB_LINEAR;
574 case VK_COLOR_SPACE_BT709_NONLINEAR_EXT:
575 return DataSpace::SRGB;
576 case VK_COLOR_SPACE_BT2020_LINEAR_EXT:
577 if (format == VK_FORMAT_R16G16B16A16_SFLOAT) {
578 return DataSpace::BT2020_LINEAR_EXTENDED;
579 } else {
580 return DataSpace::BT2020_LINEAR;
581 }
582 case VK_COLOR_SPACE_HDR10_ST2084_EXT:
583 return DataSpace::BT2020_PQ;
584 case VK_COLOR_SPACE_DOLBYVISION_EXT:
585 return DataSpace::BT2020_PQ;
586 case VK_COLOR_SPACE_HDR10_HLG_EXT:
587 return DataSpace::BT2020_HLG;
588 case VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT:
589 return DataSpace::ADOBE_RGB_LINEAR;
590 case VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT:
591 return DataSpace::ADOBE_RGB;
592 // Pass through is intended to allow app to provide data that is passed
593 // to the display system without modification.
594 case VK_COLOR_SPACE_PASS_THROUGH_EXT:
595 return DataSpace::ARBITRARY;
596
597 default:
598 // This indicates that we don't know about the
599 // dataspace specified and we should indicate that
600 // it's unsupported
601 return DataSpace::UNKNOWN;
602 }
603 }
604
605 } // anonymous namespace
606
607 VKAPI_ATTR
CreateAndroidSurfaceKHR(VkInstance instance,const VkAndroidSurfaceCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * allocator,VkSurfaceKHR * out_surface)608 VkResult CreateAndroidSurfaceKHR(
609 VkInstance instance,
610 const VkAndroidSurfaceCreateInfoKHR* pCreateInfo,
611 const VkAllocationCallbacks* allocator,
612 VkSurfaceKHR* out_surface) {
613 ATRACE_CALL();
614
615 if (!allocator)
616 allocator = &GetData(instance).allocator;
617 void* mem = allocator->pfnAllocation(allocator->pUserData, sizeof(Surface),
618 alignof(Surface),
619 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
620 if (!mem)
621 return VK_ERROR_OUT_OF_HOST_MEMORY;
622 Surface* surface = new (mem) Surface;
623
624 surface->window = pCreateInfo->window;
625 surface->swapchain_handle = VK_NULL_HANDLE;
626 surface->used_by_swapchain = false;
627 int err = native_window_get_consumer_usage(surface->window.get(),
628 &surface->consumer_usage);
629 if (err != android::OK) {
630 ALOGE("native_window_get_consumer_usage() failed: %s (%d)",
631 strerror(-err), err);
632 surface->~Surface();
633 allocator->pfnFree(allocator->pUserData, surface);
634 return VK_ERROR_SURFACE_LOST_KHR;
635 }
636
637 err =
638 native_window_api_connect(surface->window.get(), NATIVE_WINDOW_API_EGL);
639 if (err != android::OK) {
640 ALOGE("native_window_api_connect() failed: %s (%d)", strerror(-err),
641 err);
642 surface->~Surface();
643 allocator->pfnFree(allocator->pUserData, surface);
644 return VK_ERROR_NATIVE_WINDOW_IN_USE_KHR;
645 }
646
647 *out_surface = HandleFromSurface(surface);
648 return VK_SUCCESS;
649 }
650
651 VKAPI_ATTR
DestroySurfaceKHR(VkInstance instance,VkSurfaceKHR surface_handle,const VkAllocationCallbacks * allocator)652 void DestroySurfaceKHR(VkInstance instance,
653 VkSurfaceKHR surface_handle,
654 const VkAllocationCallbacks* allocator) {
655 ATRACE_CALL();
656
657 Surface* surface = SurfaceFromHandle(surface_handle);
658 if (!surface)
659 return;
660 native_window_api_disconnect(surface->window.get(), NATIVE_WINDOW_API_EGL);
661 ALOGV_IF(surface->swapchain_handle != VK_NULL_HANDLE,
662 "destroyed VkSurfaceKHR 0x%" PRIx64
663 " has active VkSwapchainKHR 0x%" PRIx64,
664 reinterpret_cast<uint64_t>(surface_handle),
665 reinterpret_cast<uint64_t>(surface->swapchain_handle));
666 surface->~Surface();
667 if (!allocator)
668 allocator = &GetData(instance).allocator;
669 allocator->pfnFree(allocator->pUserData, surface);
670 }
671
672 VKAPI_ATTR
GetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice,uint32_t,VkSurfaceKHR,VkBool32 * supported)673 VkResult GetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice /*pdev*/,
674 uint32_t /*queue_family*/,
675 VkSurfaceKHR /*surface_handle*/,
676 VkBool32* supported) {
677 *supported = VK_TRUE;
678 return VK_SUCCESS;
679 }
680
681 VKAPI_ATTR
GetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice pdev,VkSurfaceKHR surface,VkSurfaceCapabilitiesKHR * capabilities)682 VkResult GetPhysicalDeviceSurfaceCapabilitiesKHR(
683 VkPhysicalDevice pdev,
684 VkSurfaceKHR surface,
685 VkSurfaceCapabilitiesKHR* capabilities) {
686 ATRACE_CALL();
687
688 // Implement in terms of GetPhysicalDeviceSurfaceCapabilities2KHR
689
690 VkPhysicalDeviceSurfaceInfo2KHR info2 = {
691 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR,
692 nullptr,
693 surface
694 };
695
696 VkSurfaceCapabilities2KHR caps2 = {
697 VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR,
698 nullptr,
699 {},
700 };
701
702 VkResult result = GetPhysicalDeviceSurfaceCapabilities2KHR(pdev, &info2, &caps2);
703 *capabilities = caps2.surfaceCapabilities;
704 return result;
705 }
706
707 // Does the call-twice and VK_INCOMPLETE handling for querying lists
708 // of things, where we already have the full set built in a vector.
709 template <typename T>
CopyWithIncomplete(std::vector<T> const & things,T * callerPtr,uint32_t * callerCount)710 VkResult CopyWithIncomplete(std::vector<T> const& things,
711 T* callerPtr, uint32_t* callerCount) {
712 VkResult result = VK_SUCCESS;
713 if (callerPtr) {
714 if (things.size() > *callerCount)
715 result = VK_INCOMPLETE;
716 *callerCount = std::min(uint32_t(things.size()), *callerCount);
717 std::copy(things.begin(), things.begin() + *callerCount, callerPtr);
718 } else {
719 *callerCount = things.size();
720 }
721 return result;
722 }
723
724 VKAPI_ATTR
GetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice pdev,VkSurfaceKHR surface_handle,uint32_t * count,VkSurfaceFormatKHR * formats)725 VkResult GetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice pdev,
726 VkSurfaceKHR surface_handle,
727 uint32_t* count,
728 VkSurfaceFormatKHR* formats) {
729 ATRACE_CALL();
730
731 const InstanceData& instance_data = GetData(pdev);
732
733 uint64_t consumer_usage = 0;
734 bool colorspace_ext =
735 instance_data.hook_extensions.test(ProcHook::EXT_swapchain_colorspace);
736 if (surface_handle == VK_NULL_HANDLE) {
737 ProcHook::Extension surfaceless = ProcHook::GOOGLE_surfaceless_query;
738 bool surfaceless_enabled =
739 instance_data.hook_extensions.test(surfaceless);
740 if (!surfaceless_enabled) {
741 return VK_ERROR_SURFACE_LOST_KHR;
742 }
743 // Support for VK_GOOGLE_surfaceless_query.
744
745 // TODO(b/203826952): research proper value; temporarily use the
746 // values seen on Pixel
747 consumer_usage = AHARDWAREBUFFER_USAGE_COMPOSER_OVERLAY;
748 } else {
749 Surface& surface = *SurfaceFromHandle(surface_handle);
750 consumer_usage = surface.consumer_usage;
751 }
752
753 AHardwareBuffer_Desc desc = {};
754 desc.width = 1;
755 desc.height = 1;
756 desc.layers = 1;
757 desc.usage = consumer_usage | AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE |
758 AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER;
759
760 // We must support R8G8B8A8
761 std::vector<VkSurfaceFormatKHR> all_formats = {
762 {VK_FORMAT_R8G8B8A8_UNORM, VK_COLOR_SPACE_SRGB_NONLINEAR_KHR},
763 {VK_FORMAT_R8G8B8A8_SRGB, VK_COLOR_SPACE_SRGB_NONLINEAR_KHR},
764 };
765
766 VkFormat format = VK_FORMAT_UNDEFINED;
767 if (colorspace_ext) {
768 for (VkColorSpaceKHR colorSpace :
769 colorSpaceSupportedByVkEXTSwapchainColorspace) {
770 format = VK_FORMAT_R8G8B8A8_UNORM;
771 if (GetNativeDataspace(colorSpace, format) != DataSpace::UNKNOWN) {
772 all_formats.emplace_back(
773 VkSurfaceFormatKHR{format, colorSpace});
774 }
775
776 format = VK_FORMAT_R8G8B8A8_SRGB;
777 if (GetNativeDataspace(colorSpace, format) != DataSpace::UNKNOWN) {
778 all_formats.emplace_back(
779 VkSurfaceFormatKHR{format, colorSpace});
780 }
781 }
782 }
783
784 // NOTE: Any new formats that are added must be coordinated across different
785 // Android users. This includes the ANGLE team (a layered implementation of
786 // OpenGL-ES).
787
788 format = VK_FORMAT_R5G6B5_UNORM_PACK16;
789 desc.format = AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM;
790 if (AHardwareBuffer_isSupported(&desc)) {
791 all_formats.emplace_back(
792 VkSurfaceFormatKHR{format, VK_COLOR_SPACE_SRGB_NONLINEAR_KHR});
793 if (colorspace_ext) {
794 for (VkColorSpaceKHR colorSpace :
795 colorSpaceSupportedByVkEXTSwapchainColorspace) {
796 if (GetNativeDataspace(colorSpace, format) !=
797 DataSpace::UNKNOWN) {
798 all_formats.emplace_back(
799 VkSurfaceFormatKHR{format, colorSpace});
800 }
801 }
802 }
803 }
804
805 format = VK_FORMAT_R16G16B16A16_SFLOAT;
806 desc.format = AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT;
807 if (AHardwareBuffer_isSupported(&desc)) {
808 all_formats.emplace_back(
809 VkSurfaceFormatKHR{format, VK_COLOR_SPACE_SRGB_NONLINEAR_KHR});
810 if (colorspace_ext) {
811 for (VkColorSpaceKHR colorSpace :
812 colorSpaceSupportedByVkEXTSwapchainColorspace) {
813 if (GetNativeDataspace(colorSpace, format) !=
814 DataSpace::UNKNOWN) {
815 all_formats.emplace_back(
816 VkSurfaceFormatKHR{format, colorSpace});
817 }
818 }
819
820 for (
821 VkColorSpaceKHR colorSpace :
822 colorSpaceSupportedByVkEXTSwapchainColorspaceOnFP16SurfaceOnly) {
823 if (GetNativeDataspace(colorSpace, format) !=
824 DataSpace::UNKNOWN) {
825 all_formats.emplace_back(
826 VkSurfaceFormatKHR{format, colorSpace});
827 }
828 }
829 }
830 }
831
832 format = VK_FORMAT_A2B10G10R10_UNORM_PACK32;
833 desc.format = AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM;
834 if (AHardwareBuffer_isSupported(&desc)) {
835 all_formats.emplace_back(
836 VkSurfaceFormatKHR{format, VK_COLOR_SPACE_SRGB_NONLINEAR_KHR});
837 if (colorspace_ext) {
838 for (VkColorSpaceKHR colorSpace :
839 colorSpaceSupportedByVkEXTSwapchainColorspace) {
840 if (GetNativeDataspace(colorSpace, format) !=
841 DataSpace::UNKNOWN) {
842 all_formats.emplace_back(
843 VkSurfaceFormatKHR{format, colorSpace});
844 }
845 }
846 }
847 }
848
849 format = VK_FORMAT_R8_UNORM;
850 desc.format = AHARDWAREBUFFER_FORMAT_R8_UNORM;
851 if (AHardwareBuffer_isSupported(&desc)) {
852 if (colorspace_ext) {
853 all_formats.emplace_back(
854 VkSurfaceFormatKHR{format, VK_COLOR_SPACE_PASS_THROUGH_EXT});
855 }
856 }
857
858 bool rgba10x6_formats_ext = false;
859 uint32_t exts_count;
860 const auto& driver = GetData(pdev).driver;
861 driver.EnumerateDeviceExtensionProperties(pdev, nullptr, &exts_count,
862 nullptr);
863 std::vector<VkExtensionProperties> props(exts_count);
864 driver.EnumerateDeviceExtensionProperties(pdev, nullptr, &exts_count,
865 props.data());
866 for (uint32_t i = 0; i < exts_count; i++) {
867 VkExtensionProperties prop = props[i];
868 if (strcmp(prop.extensionName,
869 VK_EXT_RGBA10X6_FORMATS_EXTENSION_NAME) == 0) {
870 rgba10x6_formats_ext = true;
871 }
872 }
873 format = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16;
874 desc.format = AHARDWAREBUFFER_FORMAT_R10G10B10A10_UNORM;
875 if (AHardwareBuffer_isSupported(&desc) && rgba10x6_formats_ext) {
876 all_formats.emplace_back(
877 VkSurfaceFormatKHR{format, VK_COLOR_SPACE_SRGB_NONLINEAR_KHR});
878 if (colorspace_ext) {
879 for (VkColorSpaceKHR colorSpace :
880 colorSpaceSupportedByVkEXTSwapchainColorspace) {
881 if (GetNativeDataspace(colorSpace, format) !=
882 DataSpace::UNKNOWN) {
883 all_formats.emplace_back(
884 VkSurfaceFormatKHR{format, colorSpace});
885 }
886 }
887 }
888 }
889
890 // NOTE: Any new formats that are added must be coordinated across different
891 // Android users. This includes the ANGLE team (a layered implementation of
892 // OpenGL-ES).
893
894 return CopyWithIncomplete(all_formats, formats, count);
895 }
896
897 VKAPI_ATTR
GetPhysicalDeviceSurfaceCapabilities2KHR(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,VkSurfaceCapabilities2KHR * pSurfaceCapabilities)898 VkResult GetPhysicalDeviceSurfaceCapabilities2KHR(
899 VkPhysicalDevice physicalDevice,
900 const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
901 VkSurfaceCapabilities2KHR* pSurfaceCapabilities) {
902 ATRACE_CALL();
903
904 auto surface = pSurfaceInfo->surface;
905 auto capabilities = &pSurfaceCapabilities->surfaceCapabilities;
906
907 VkSurfacePresentModeEXT const *pPresentMode = nullptr;
908 for (auto pNext = reinterpret_cast<VkBaseInStructure const *>(pSurfaceInfo->pNext);
909 pNext; pNext = reinterpret_cast<VkBaseInStructure const *>(pNext->pNext)) {
910 switch (pNext->sType) {
911 case VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_EXT:
912 pPresentMode = reinterpret_cast<VkSurfacePresentModeEXT const *>(pNext);
913 break;
914
915 default:
916 break;
917 }
918 }
919
920 int err;
921 int width, height;
922 int transform_hint;
923 int max_buffer_count;
924 int min_undequeued_buffers;
925 if (surface == VK_NULL_HANDLE) {
926 const InstanceData& instance_data = GetData(physicalDevice);
927 ProcHook::Extension surfaceless = ProcHook::GOOGLE_surfaceless_query;
928 bool surfaceless_enabled =
929 instance_data.hook_extensions.test(surfaceless);
930 if (!surfaceless_enabled) {
931 // It is an error to pass a surface==VK_NULL_HANDLE unless the
932 // VK_GOOGLE_surfaceless_query extension is enabled
933 return VK_ERROR_SURFACE_LOST_KHR;
934 }
935 // Support for VK_GOOGLE_surfaceless_query. The primary purpose of this
936 // extension for this function is for
937 // VkSurfaceProtectedCapabilitiesKHR::supportsProtected. The following
938 // four values cannot be known without a surface. Default values will
939 // be supplied anyway, but cannot be relied upon.
940 width = 0xFFFFFFFF;
941 height = 0xFFFFFFFF;
942 transform_hint = VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR;
943 capabilities->minImageCount = 0xFFFFFFFF;
944 capabilities->maxImageCount = 0xFFFFFFFF;
945 } else {
946 ANativeWindow* window = SurfaceFromHandle(surface)->window.get();
947
948 err = window->query(window, NATIVE_WINDOW_DEFAULT_WIDTH, &width);
949 if (err != android::OK) {
950 ALOGE("NATIVE_WINDOW_DEFAULT_WIDTH query failed: %s (%d)",
951 strerror(-err), err);
952 return VK_ERROR_SURFACE_LOST_KHR;
953 }
954 err = window->query(window, NATIVE_WINDOW_DEFAULT_HEIGHT, &height);
955 if (err != android::OK) {
956 ALOGE("NATIVE_WINDOW_DEFAULT_WIDTH query failed: %s (%d)",
957 strerror(-err), err);
958 return VK_ERROR_SURFACE_LOST_KHR;
959 }
960
961 err = window->query(window, NATIVE_WINDOW_TRANSFORM_HINT,
962 &transform_hint);
963 if (err != android::OK) {
964 ALOGE("NATIVE_WINDOW_TRANSFORM_HINT query failed: %s (%d)",
965 strerror(-err), err);
966 return VK_ERROR_SURFACE_LOST_KHR;
967 }
968
969 err = window->query(window, NATIVE_WINDOW_MAX_BUFFER_COUNT,
970 &max_buffer_count);
971 if (err != android::OK) {
972 ALOGE("NATIVE_WINDOW_MAX_BUFFER_COUNT query failed: %s (%d)",
973 strerror(-err), err);
974 return VK_ERROR_SURFACE_LOST_KHR;
975 }
976
977 err = window->query(window, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
978 &min_undequeued_buffers);
979 if (err != android::OK) {
980 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)",
981 strerror(-err), err);
982 return VK_ERROR_SURFACE_LOST_KHR;
983 }
984
985 // Additional buffer count over min_undequeued_buffers in vulkan came from 2 total
986 // being technically enough for fifo (although a poor experience) vs 3 being the
987 // absolute minimum for mailbox to be useful. So min_undequeued_buffers + 2 is sensible
988 static constexpr int default_additional_buffers = 2;
989
990 if(pPresentMode != nullptr) {
991 switch (pPresentMode->presentMode) {
992 case VK_PRESENT_MODE_IMMEDIATE_KHR:
993 ALOGE("Swapchain present mode VK_PRESENT_MODE_IMMEDIATE_KHR is not supported");
994 break;
995 case VK_PRESENT_MODE_MAILBOX_KHR:
996 case VK_PRESENT_MODE_FIFO_KHR:
997 capabilities->minImageCount = std::min(max_buffer_count,
998 min_undequeued_buffers + default_additional_buffers);
999 capabilities->maxImageCount = static_cast<uint32_t>(max_buffer_count);
1000 break;
1001 case VK_PRESENT_MODE_FIFO_RELAXED_KHR:
1002 ALOGE("Swapchain present mode VK_PRESENT_MODE_FIFO_RELEAXED_KHR "
1003 "is not supported");
1004 break;
1005 case VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR:
1006 case VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR:
1007 capabilities->minImageCount = 1;
1008 capabilities->maxImageCount = 1;
1009 break;
1010
1011 default:
1012 ALOGE("Unrecognized swapchain present mode %u is not supported",
1013 pPresentMode->presentMode);
1014 break;
1015 }
1016 } else {
1017 capabilities->minImageCount = std::min(max_buffer_count,
1018 min_undequeued_buffers + default_additional_buffers);
1019 capabilities->maxImageCount = static_cast<uint32_t>(max_buffer_count);
1020 }
1021 }
1022
1023 capabilities->currentExtent =
1024 VkExtent2D{static_cast<uint32_t>(width), static_cast<uint32_t>(height)};
1025
1026 // TODO(http://b/134182502): Figure out what the max extent should be.
1027 capabilities->minImageExtent = VkExtent2D{1, 1};
1028 capabilities->maxImageExtent = VkExtent2D{4096, 4096};
1029
1030 if (capabilities->maxImageExtent.height <
1031 capabilities->currentExtent.height) {
1032 capabilities->maxImageExtent.height =
1033 capabilities->currentExtent.height;
1034 }
1035
1036 if (capabilities->maxImageExtent.width <
1037 capabilities->currentExtent.width) {
1038 capabilities->maxImageExtent.width = capabilities->currentExtent.width;
1039 }
1040
1041 capabilities->maxImageArrayLayers = 1;
1042
1043 capabilities->supportedTransforms = kSupportedTransforms;
1044 capabilities->currentTransform =
1045 TranslateNativeToVulkanTransform(transform_hint);
1046
1047 // On Android, window composition is a WindowManager property, not something
1048 // associated with the bufferqueue. It can't be changed from here.
1049 capabilities->supportedCompositeAlpha = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR;
1050
1051 capabilities->supportedUsageFlags =
1052 VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT |
1053 VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT |
1054 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
1055 VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
1056
1057 for (auto pNext = reinterpret_cast<VkBaseOutStructure*>(pSurfaceCapabilities->pNext);
1058 pNext; pNext = reinterpret_cast<VkBaseOutStructure*>(pNext->pNext)) {
1059
1060 switch (pNext->sType) {
1061 case VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR: {
1062 VkSharedPresentSurfaceCapabilitiesKHR* shared_caps =
1063 reinterpret_cast<VkSharedPresentSurfaceCapabilitiesKHR*>(pNext);
1064 // Claim same set of usage flags are supported for
1065 // shared present modes as for other modes.
1066 shared_caps->sharedPresentSupportedUsageFlags =
1067 pSurfaceCapabilities->surfaceCapabilities
1068 .supportedUsageFlags;
1069 } break;
1070
1071 case VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR: {
1072 VkSurfaceProtectedCapabilitiesKHR* protected_caps =
1073 reinterpret_cast<VkSurfaceProtectedCapabilitiesKHR*>(pNext);
1074 protected_caps->supportsProtected = VK_TRUE;
1075 } break;
1076
1077 case VK_STRUCTURE_TYPE_SURFACE_PRESENT_SCALING_CAPABILITIES_EXT: {
1078 VkSurfacePresentScalingCapabilitiesEXT* scaling_caps =
1079 reinterpret_cast<VkSurfacePresentScalingCapabilitiesEXT*>(pNext);
1080 // By default, Android stretches the buffer to fit the window,
1081 // without preserving aspect ratio. Other modes are technically possible
1082 // but consult with CoGS team before exposing them here!
1083 scaling_caps->supportedPresentScaling = VK_PRESENT_SCALING_STRETCH_BIT_EXT;
1084
1085 // Since we always scale, we don't support any gravity.
1086 scaling_caps->supportedPresentGravityX = 0;
1087 scaling_caps->supportedPresentGravityY = 0;
1088
1089 // Scaled image limits are just the basic image limits
1090 scaling_caps->minScaledImageExtent = capabilities->minImageExtent;
1091 scaling_caps->maxScaledImageExtent = capabilities->maxImageExtent;
1092 } break;
1093
1094 case VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_COMPATIBILITY_EXT: {
1095 VkSurfacePresentModeCompatibilityEXT* mode_caps =
1096 reinterpret_cast<VkSurfacePresentModeCompatibilityEXT*>(pNext);
1097
1098 ALOG_ASSERT(pPresentMode,
1099 "querying VkSurfacePresentModeCompatibilityEXT "
1100 "requires VkSurfacePresentModeEXT to be provided");
1101 std::vector<VkPresentModeKHR> compatibleModes;
1102 compatibleModes.push_back(pPresentMode->presentMode);
1103
1104 switch (pPresentMode->presentMode) {
1105 // Shared modes are both compatible with each other.
1106 case VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR:
1107 compatibleModes.push_back(VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR);
1108 break;
1109 case VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR:
1110 compatibleModes.push_back(VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR);
1111 break;
1112 default:
1113 // Other modes are only compatible with themselves.
1114 // TODO: consider whether switching between FIFO and MAILBOX is reasonable
1115 break;
1116 }
1117
1118 // Note: this does not generate VK_INCOMPLETE since we're nested inside
1119 // a larger query and there would be no way to determine exactly where it came from.
1120 CopyWithIncomplete(compatibleModes, mode_caps->pPresentModes,
1121 &mode_caps->presentModeCount);
1122 } break;
1123
1124 default:
1125 // Ignore all other extension structs
1126 break;
1127 }
1128 }
1129
1130 return VK_SUCCESS;
1131 }
1132
1133 VKAPI_ATTR
GetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,uint32_t * pSurfaceFormatCount,VkSurfaceFormat2KHR * pSurfaceFormats)1134 VkResult GetPhysicalDeviceSurfaceFormats2KHR(
1135 VkPhysicalDevice physicalDevice,
1136 const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
1137 uint32_t* pSurfaceFormatCount,
1138 VkSurfaceFormat2KHR* pSurfaceFormats) {
1139 ATRACE_CALL();
1140
1141 if (!pSurfaceFormats) {
1142 return GetPhysicalDeviceSurfaceFormatsKHR(physicalDevice,
1143 pSurfaceInfo->surface,
1144 pSurfaceFormatCount, nullptr);
1145 }
1146
1147 // temp vector for forwarding; we'll marshal it into the pSurfaceFormats
1148 // after the call.
1149 std::vector<VkSurfaceFormatKHR> surface_formats(*pSurfaceFormatCount);
1150 VkResult result = GetPhysicalDeviceSurfaceFormatsKHR(
1151 physicalDevice, pSurfaceInfo->surface, pSurfaceFormatCount,
1152 surface_formats.data());
1153
1154 if (result != VK_SUCCESS && result != VK_INCOMPLETE) {
1155 return result;
1156 }
1157
1158 const auto& driver = GetData(physicalDevice).driver;
1159
1160 // marshal results individually due to stride difference.
1161 uint32_t formats_to_marshal = *pSurfaceFormatCount;
1162 for (uint32_t i = 0u; i < formats_to_marshal; i++) {
1163 pSurfaceFormats[i].surfaceFormat = surface_formats[i];
1164
1165 // Query the compression properties for the surface format
1166 VkSurfaceFormat2KHR* pSurfaceFormat = &pSurfaceFormats[i];
1167 while (pSurfaceFormat->pNext) {
1168 pSurfaceFormat =
1169 reinterpret_cast<VkSurfaceFormat2KHR*>(pSurfaceFormat->pNext);
1170 switch (pSurfaceFormat->sType) {
1171 case VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_PROPERTIES_EXT: {
1172 VkImageCompressionPropertiesEXT* surfaceCompressionProps =
1173 reinterpret_cast<VkImageCompressionPropertiesEXT*>(
1174 pSurfaceFormat);
1175
1176 if (surfaceCompressionProps &&
1177 (driver.GetPhysicalDeviceImageFormatProperties2KHR ||
1178 driver.GetPhysicalDeviceImageFormatProperties2)) {
1179 VkPhysicalDeviceImageFormatInfo2 imageFormatInfo = {};
1180 imageFormatInfo.sType =
1181 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2;
1182 imageFormatInfo.format =
1183 pSurfaceFormats[i].surfaceFormat.format;
1184 imageFormatInfo.type = VK_IMAGE_TYPE_2D;
1185 imageFormatInfo.usage =
1186 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
1187 imageFormatInfo.pNext = nullptr;
1188
1189 VkImageCompressionControlEXT compressionControl = {};
1190 compressionControl.sType =
1191 VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT;
1192 compressionControl.pNext = imageFormatInfo.pNext;
1193 compressionControl.flags =
1194 VK_IMAGE_COMPRESSION_FIXED_RATE_DEFAULT_EXT;
1195
1196 imageFormatInfo.pNext = &compressionControl;
1197
1198 VkImageCompressionPropertiesEXT compressionProps = {};
1199 compressionProps.sType =
1200 VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_PROPERTIES_EXT;
1201 compressionProps.pNext = nullptr;
1202
1203 VkImageFormatProperties2KHR imageFormatProps = {};
1204 imageFormatProps.sType =
1205 VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR;
1206 imageFormatProps.pNext = &compressionProps;
1207
1208 VkResult compressionRes =
1209 GetPhysicalDeviceImageFormatProperties2(
1210 physicalDevice, &imageFormatInfo,
1211 &imageFormatProps);
1212 if (compressionRes == VK_SUCCESS) {
1213 surfaceCompressionProps->imageCompressionFlags =
1214 compressionProps.imageCompressionFlags;
1215 surfaceCompressionProps
1216 ->imageCompressionFixedRateFlags =
1217 compressionProps.imageCompressionFixedRateFlags;
1218 } else if (compressionRes ==
1219 VK_ERROR_OUT_OF_HOST_MEMORY ||
1220 compressionRes ==
1221 VK_ERROR_OUT_OF_DEVICE_MEMORY) {
1222 return compressionRes;
1223 } else {
1224 // For any of the *_NOT_SUPPORTED errors we continue
1225 // onto the next format
1226 continue;
1227 }
1228 }
1229 } break;
1230
1231 default:
1232 // Ignore all other extension structs
1233 break;
1234 }
1235 }
1236 }
1237
1238 return result;
1239 }
1240
1241 VKAPI_ATTR
GetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice pdev,VkSurfaceKHR surface,uint32_t * count,VkPresentModeKHR * modes)1242 VkResult GetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice pdev,
1243 VkSurfaceKHR surface,
1244 uint32_t* count,
1245 VkPresentModeKHR* modes) {
1246 ATRACE_CALL();
1247
1248 int err;
1249 int query_value;
1250 std::vector<VkPresentModeKHR> present_modes;
1251 if (surface == VK_NULL_HANDLE) {
1252 const InstanceData& instance_data = GetData(pdev);
1253 ProcHook::Extension surfaceless = ProcHook::GOOGLE_surfaceless_query;
1254 bool surfaceless_enabled =
1255 instance_data.hook_extensions.test(surfaceless);
1256 if (!surfaceless_enabled) {
1257 return VK_ERROR_SURFACE_LOST_KHR;
1258 }
1259 // Support for VK_GOOGLE_surfaceless_query. The primary purpose of this
1260 // extension for this function is for
1261 // VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR and
1262 // VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR. We technically cannot
1263 // know if VK_PRESENT_MODE_SHARED_MAILBOX_KHR is supported without a
1264 // surface, and that cannot be relied upon. Therefore, don't return it.
1265 present_modes.push_back(VK_PRESENT_MODE_FIFO_KHR);
1266 } else {
1267 ANativeWindow* window = SurfaceFromHandle(surface)->window.get();
1268
1269 err = window->query(window, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
1270 &query_value);
1271 if (err != android::OK || query_value < 0) {
1272 ALOGE(
1273 "NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d) "
1274 "value=%d",
1275 strerror(-err), err, query_value);
1276 return VK_ERROR_SURFACE_LOST_KHR;
1277 }
1278 uint32_t min_undequeued_buffers = static_cast<uint32_t>(query_value);
1279
1280 err =
1281 window->query(window, NATIVE_WINDOW_MAX_BUFFER_COUNT, &query_value);
1282 if (err != android::OK || query_value < 0) {
1283 ALOGE(
1284 "NATIVE_WINDOW_MAX_BUFFER_COUNT query failed: %s (%d) value=%d",
1285 strerror(-err), err, query_value);
1286 return VK_ERROR_SURFACE_LOST_KHR;
1287 }
1288 uint32_t max_buffer_count = static_cast<uint32_t>(query_value);
1289
1290 if (min_undequeued_buffers + 1 < max_buffer_count)
1291 present_modes.push_back(VK_PRESENT_MODE_MAILBOX_KHR);
1292 present_modes.push_back(VK_PRESENT_MODE_FIFO_KHR);
1293 }
1294
1295 VkPhysicalDevicePresentationPropertiesANDROID present_properties;
1296 QueryPresentationProperties(pdev, &present_properties);
1297 if (present_properties.sharedImage) {
1298 present_modes.push_back(VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR);
1299 present_modes.push_back(VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR);
1300 }
1301
1302 return CopyWithIncomplete(present_modes, modes, count);
1303 }
1304
1305 VKAPI_ATTR
GetDeviceGroupPresentCapabilitiesKHR(VkDevice,VkDeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities)1306 VkResult GetDeviceGroupPresentCapabilitiesKHR(
1307 VkDevice,
1308 VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities) {
1309 ATRACE_CALL();
1310
1311 ALOGV_IF(pDeviceGroupPresentCapabilities->sType !=
1312 VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR,
1313 "vkGetDeviceGroupPresentCapabilitiesKHR: invalid "
1314 "VkDeviceGroupPresentCapabilitiesKHR structure type %d",
1315 pDeviceGroupPresentCapabilities->sType);
1316
1317 memset(pDeviceGroupPresentCapabilities->presentMask, 0,
1318 sizeof(pDeviceGroupPresentCapabilities->presentMask));
1319
1320 // assume device group of size 1
1321 pDeviceGroupPresentCapabilities->presentMask[0] = 1 << 0;
1322 pDeviceGroupPresentCapabilities->modes =
1323 VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR;
1324
1325 return VK_SUCCESS;
1326 }
1327
1328 VKAPI_ATTR
GetDeviceGroupSurfacePresentModesKHR(VkDevice,VkSurfaceKHR,VkDeviceGroupPresentModeFlagsKHR * pModes)1329 VkResult GetDeviceGroupSurfacePresentModesKHR(
1330 VkDevice,
1331 VkSurfaceKHR,
1332 VkDeviceGroupPresentModeFlagsKHR* pModes) {
1333 ATRACE_CALL();
1334
1335 *pModes = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR;
1336 return VK_SUCCESS;
1337 }
1338
1339 VKAPI_ATTR
GetPhysicalDevicePresentRectanglesKHR(VkPhysicalDevice,VkSurfaceKHR surface,uint32_t * pRectCount,VkRect2D * pRects)1340 VkResult GetPhysicalDevicePresentRectanglesKHR(VkPhysicalDevice,
1341 VkSurfaceKHR surface,
1342 uint32_t* pRectCount,
1343 VkRect2D* pRects) {
1344 ATRACE_CALL();
1345
1346 if (!pRects) {
1347 *pRectCount = 1;
1348 } else {
1349 uint32_t count = std::min(*pRectCount, 1u);
1350 bool incomplete = *pRectCount < 1;
1351
1352 *pRectCount = count;
1353
1354 if (incomplete) {
1355 return VK_INCOMPLETE;
1356 }
1357
1358 int err;
1359 ANativeWindow* window = SurfaceFromHandle(surface)->window.get();
1360
1361 int width = 0, height = 0;
1362 err = window->query(window, NATIVE_WINDOW_DEFAULT_WIDTH, &width);
1363 if (err != android::OK) {
1364 ALOGE("NATIVE_WINDOW_DEFAULT_WIDTH query failed: %s (%d)",
1365 strerror(-err), err);
1366 }
1367 err = window->query(window, NATIVE_WINDOW_DEFAULT_HEIGHT, &height);
1368 if (err != android::OK) {
1369 ALOGE("NATIVE_WINDOW_DEFAULT_WIDTH query failed: %s (%d)",
1370 strerror(-err), err);
1371 }
1372
1373 pRects[0].offset.x = 0;
1374 pRects[0].offset.y = 0;
1375 pRects[0].extent = VkExtent2D{static_cast<uint32_t>(width),
1376 static_cast<uint32_t>(height)};
1377 }
1378 return VK_SUCCESS;
1379 }
1380
DestroySwapchainInternal(VkDevice device,VkSwapchainKHR swapchain_handle,const VkAllocationCallbacks * allocator)1381 static void DestroySwapchainInternal(VkDevice device,
1382 VkSwapchainKHR swapchain_handle,
1383 const VkAllocationCallbacks* allocator) {
1384 ATRACE_CALL();
1385
1386 const auto& dispatch = GetData(device).driver;
1387 Swapchain* swapchain = SwapchainFromHandle(swapchain_handle);
1388 if (!swapchain) {
1389 return;
1390 }
1391
1392 bool active = swapchain->surface.swapchain_handle == swapchain_handle;
1393 ANativeWindow* window = active ? swapchain->surface.window.get() : nullptr;
1394
1395 if (window && swapchain->frame_timestamps_enabled) {
1396 native_window_enable_frame_timestamps(window, false);
1397 }
1398
1399 for (uint32_t i = 0; i < swapchain->num_images; i++) {
1400 ReleaseSwapchainImage(device, swapchain->shared, window, -1,
1401 swapchain->images[i], false);
1402 }
1403
1404 if (active) {
1405 swapchain->surface.swapchain_handle = VK_NULL_HANDLE;
1406 }
1407
1408 if (!allocator) {
1409 allocator = &GetData(device).allocator;
1410 }
1411
1412 swapchain->~Swapchain();
1413 allocator->pfnFree(allocator->pUserData, swapchain);
1414 }
1415
getProducerUsage(const VkDevice & device,const VkSwapchainCreateInfoKHR * create_info,const VkSwapchainImageUsageFlagsANDROID swapchain_image_usage,bool create_protected_swapchain,uint64_t * producer_usage)1416 static VkResult getProducerUsage(const VkDevice& device,
1417 const VkSwapchainCreateInfoKHR* create_info,
1418 const VkSwapchainImageUsageFlagsANDROID swapchain_image_usage,
1419 bool create_protected_swapchain,
1420 uint64_t* producer_usage) {
1421 // Get the physical device to query the appropriate producer usage
1422 const VkPhysicalDevice& pdev = GetData(device).driver_physical_device;
1423 const InstanceData& instance_data = GetData(pdev);
1424 const InstanceDriverTable& instance_dispatch = instance_data.driver;
1425 if (instance_dispatch.GetPhysicalDeviceImageFormatProperties2 ||
1426 instance_dispatch.GetPhysicalDeviceImageFormatProperties2KHR) {
1427 // Look through the create_info pNext chain passed to createSwapchainKHR
1428 // for an image compression control struct.
1429 // if one is found AND the appropriate extensions are enabled, create a
1430 // VkImageCompressionControlEXT structure to pass on to
1431 // GetPhysicalDeviceImageFormatProperties2
1432 void* compression_control_pNext = nullptr;
1433 VkImageCompressionControlEXT image_compression = {};
1434 const VkSwapchainCreateInfoKHR* create_infos = create_info;
1435 while (create_infos->pNext) {
1436 create_infos = reinterpret_cast<const VkSwapchainCreateInfoKHR*>(create_infos->pNext);
1437 switch (create_infos->sType) {
1438 case VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT: {
1439 const VkImageCompressionControlEXT* compression_infos =
1440 reinterpret_cast<const VkImageCompressionControlEXT*>(create_infos);
1441 image_compression = *compression_infos;
1442 image_compression.pNext = nullptr;
1443 compression_control_pNext = &image_compression;
1444 } break;
1445 default:
1446 // Ignore all other info structs
1447 break;
1448 }
1449 }
1450
1451 // call GetPhysicalDeviceImageFormatProperties2KHR
1452 VkPhysicalDeviceExternalImageFormatInfo external_image_format_info = {
1453 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO,
1454 .pNext = compression_control_pNext,
1455 .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID,
1456 };
1457
1458 // AHB does not have an sRGB format so we can't pass it to GPDIFP
1459 // We need to convert the format to unorm if it is srgb
1460 VkFormat format = create_info->imageFormat;
1461 if (format == VK_FORMAT_R8G8B8A8_SRGB) {
1462 format = VK_FORMAT_R8G8B8A8_UNORM;
1463 }
1464
1465 VkPhysicalDeviceImageFormatInfo2 image_format_info = {
1466 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2,
1467 .pNext = &external_image_format_info,
1468 .format = format,
1469 .type = VK_IMAGE_TYPE_2D,
1470 .tiling = VK_IMAGE_TILING_OPTIMAL,
1471 .usage = create_info->imageUsage,
1472 .flags = create_protected_swapchain ? VK_IMAGE_CREATE_PROTECTED_BIT : 0u,
1473 };
1474
1475 // If supporting mutable format swapchain add the mutable format flag
1476 if (create_info->flags & VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR) {
1477 image_format_info.flags |= VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
1478 image_format_info.flags |= VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR;
1479 }
1480
1481 VkAndroidHardwareBufferUsageANDROID ahb_usage;
1482 ahb_usage.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID;
1483 ahb_usage.pNext = nullptr;
1484
1485 VkImageFormatProperties2 image_format_properties;
1486 image_format_properties.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2;
1487 image_format_properties.pNext = &ahb_usage;
1488
1489 VkResult result = GetPhysicalDeviceImageFormatProperties2(
1490 pdev, &image_format_info, &image_format_properties);
1491 if (result != VK_SUCCESS) {
1492 ALOGE(
1493 "VkGetPhysicalDeviceImageFormatProperties2 for AHB usage "
1494 "failed: %d",
1495 result);
1496 return VK_ERROR_SURFACE_LOST_KHR;
1497 }
1498
1499 // Determine if USAGE_FRONT_BUFFER is needed.
1500 // GPDIFP2 has no means of using VkSwapchainImageUsageFlagsANDROID when
1501 // querying for producer_usage. So androidHardwareBufferUsage will not
1502 // contain USAGE_FRONT_BUFFER. We need to manually check for usage here.
1503 if (!(swapchain_image_usage & VK_SWAPCHAIN_IMAGE_USAGE_SHARED_BIT_ANDROID)) {
1504 *producer_usage = ahb_usage.androidHardwareBufferUsage;
1505 return VK_SUCCESS;
1506 }
1507
1508 // Check if USAGE_FRONT_BUFFER is supported for this swapchain
1509 AHardwareBuffer_Desc ahb_desc = {
1510 .width = create_info->imageExtent.width,
1511 .height = create_info->imageExtent.height,
1512 .layers = create_info->imageArrayLayers,
1513 .format = create_info->imageFormat,
1514 .usage = ahb_usage.androidHardwareBufferUsage | AHARDWAREBUFFER_USAGE_FRONT_BUFFER,
1515 .stride = 0, // stride is always ignored when calling isSupported()
1516 };
1517
1518 // If FRONT_BUFFER is not supported,
1519 // then we need to call GetSwapchainGrallocUsageXAndroid below
1520 if (AHardwareBuffer_isSupported(&ahb_desc)) {
1521 *producer_usage = ahb_usage.androidHardwareBufferUsage;
1522 *producer_usage |= AHARDWAREBUFFER_USAGE_FRONT_BUFFER;
1523 return VK_SUCCESS;
1524 }
1525 }
1526
1527 uint64_t native_usage = 0;
1528 void* usage_info_pNext = nullptr;
1529 VkResult result;
1530 VkImageCompressionControlEXT image_compression = {};
1531 const auto& dispatch = GetData(device).driver;
1532 if (dispatch.GetSwapchainGrallocUsage4ANDROID) {
1533 ATRACE_BEGIN("GetSwapchainGrallocUsage4ANDROID");
1534 VkGrallocUsageInfo2ANDROID gralloc_usage_info = {};
1535 gralloc_usage_info.sType =
1536 VK_STRUCTURE_TYPE_GRALLOC_USAGE_INFO_2_ANDROID;
1537 gralloc_usage_info.format = create_info->imageFormat;
1538 gralloc_usage_info.imageUsage = create_info->imageUsage;
1539 gralloc_usage_info.swapchainImageUsage = swapchain_image_usage;
1540
1541 // Look through the pNext chain for an image compression control struct
1542 // if one is found AND the appropriate extensions are enabled,
1543 // append it to be the gralloc usage pNext chain
1544 const VkSwapchainCreateInfoKHR* create_infos = create_info;
1545 while (create_infos->pNext) {
1546 create_infos = reinterpret_cast<const VkSwapchainCreateInfoKHR*>(
1547 create_infos->pNext);
1548 switch (create_infos->sType) {
1549 case VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT: {
1550 const VkImageCompressionControlEXT* compression_infos =
1551 reinterpret_cast<const VkImageCompressionControlEXT*>(
1552 create_infos);
1553 image_compression = *compression_infos;
1554 image_compression.pNext = nullptr;
1555 usage_info_pNext = &image_compression;
1556 } break;
1557
1558 default:
1559 // Ignore all other info structs
1560 break;
1561 }
1562 }
1563 gralloc_usage_info.pNext = usage_info_pNext;
1564
1565 result = dispatch.GetSwapchainGrallocUsage4ANDROID(
1566 device, &gralloc_usage_info, &native_usage);
1567 ATRACE_END();
1568 if (result != VK_SUCCESS) {
1569 ALOGE("vkGetSwapchainGrallocUsage4ANDROID failed: %d", result);
1570 return VK_ERROR_SURFACE_LOST_KHR;
1571 }
1572 } else if (dispatch.GetSwapchainGrallocUsage3ANDROID) {
1573 ATRACE_BEGIN("GetSwapchainGrallocUsage3ANDROID");
1574 VkGrallocUsageInfoANDROID gralloc_usage_info = {};
1575 gralloc_usage_info.sType = VK_STRUCTURE_TYPE_GRALLOC_USAGE_INFO_ANDROID;
1576 gralloc_usage_info.format = create_info->imageFormat;
1577 gralloc_usage_info.imageUsage = create_info->imageUsage;
1578
1579 // Look through the pNext chain for an image compression control struct
1580 // if one is found AND the appropriate extensions are enabled,
1581 // append it to be the gralloc usage pNext chain
1582 const VkSwapchainCreateInfoKHR* create_infos = create_info;
1583 while (create_infos->pNext) {
1584 create_infos = reinterpret_cast<const VkSwapchainCreateInfoKHR*>(
1585 create_infos->pNext);
1586 switch (create_infos->sType) {
1587 case VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT: {
1588 const VkImageCompressionControlEXT* compression_infos =
1589 reinterpret_cast<const VkImageCompressionControlEXT*>(
1590 create_infos);
1591 image_compression = *compression_infos;
1592 image_compression.pNext = nullptr;
1593 usage_info_pNext = &image_compression;
1594 } break;
1595
1596 default:
1597 // Ignore all other info structs
1598 break;
1599 }
1600 }
1601 gralloc_usage_info.pNext = usage_info_pNext;
1602
1603 result = dispatch.GetSwapchainGrallocUsage3ANDROID(
1604 device, &gralloc_usage_info, &native_usage);
1605 ATRACE_END();
1606 if (result != VK_SUCCESS) {
1607 ALOGE("vkGetSwapchainGrallocUsage3ANDROID failed: %d", result);
1608 return VK_ERROR_SURFACE_LOST_KHR;
1609 }
1610 } else if (dispatch.GetSwapchainGrallocUsage2ANDROID) {
1611 uint64_t consumer_usage, producer_usage;
1612 ATRACE_BEGIN("GetSwapchainGrallocUsage2ANDROID");
1613 result = dispatch.GetSwapchainGrallocUsage2ANDROID(
1614 device, create_info->imageFormat, create_info->imageUsage,
1615 swapchain_image_usage, &consumer_usage, &producer_usage);
1616 ATRACE_END();
1617 if (result != VK_SUCCESS) {
1618 ALOGE("vkGetSwapchainGrallocUsage2ANDROID failed: %d", result);
1619 return VK_ERROR_SURFACE_LOST_KHR;
1620 }
1621 native_usage =
1622 convertGralloc1ToBufferUsage(producer_usage, consumer_usage);
1623 } else if (dispatch.GetSwapchainGrallocUsageANDROID) {
1624 ATRACE_BEGIN("GetSwapchainGrallocUsageANDROID");
1625 int32_t legacy_usage = 0;
1626 result = dispatch.GetSwapchainGrallocUsageANDROID(
1627 device, create_info->imageFormat, create_info->imageUsage,
1628 &legacy_usage);
1629 ATRACE_END();
1630 if (result != VK_SUCCESS) {
1631 ALOGE("vkGetSwapchainGrallocUsageANDROID failed: %d", result);
1632 return VK_ERROR_SURFACE_LOST_KHR;
1633 }
1634 native_usage = static_cast<uint64_t>(legacy_usage);
1635 }
1636 *producer_usage = native_usage;
1637
1638 return VK_SUCCESS;
1639 }
1640
1641 VKAPI_ATTR
CreateSwapchainKHR(VkDevice device,const VkSwapchainCreateInfoKHR * create_info,const VkAllocationCallbacks * allocator,VkSwapchainKHR * swapchain_handle)1642 VkResult CreateSwapchainKHR(VkDevice device,
1643 const VkSwapchainCreateInfoKHR* create_info,
1644 const VkAllocationCallbacks* allocator,
1645 VkSwapchainKHR* swapchain_handle) {
1646 ATRACE_CALL();
1647
1648 int err;
1649 VkResult result = VK_SUCCESS;
1650
1651 ALOGV("vkCreateSwapchainKHR: surface=0x%" PRIx64
1652 " minImageCount=%u imageFormat=%u imageColorSpace=%u"
1653 " imageExtent=%ux%u imageUsage=%#x preTransform=%u presentMode=%u"
1654 " oldSwapchain=0x%" PRIx64,
1655 reinterpret_cast<uint64_t>(create_info->surface),
1656 create_info->minImageCount, create_info->imageFormat,
1657 create_info->imageColorSpace, create_info->imageExtent.width,
1658 create_info->imageExtent.height, create_info->imageUsage,
1659 create_info->preTransform, create_info->presentMode,
1660 reinterpret_cast<uint64_t>(create_info->oldSwapchain));
1661
1662 if (!allocator)
1663 allocator = &GetData(device).allocator;
1664
1665 PixelFormat native_pixel_format =
1666 GetNativePixelFormat(create_info->imageFormat);
1667 DataSpace native_dataspace = GetNativeDataspace(
1668 create_info->imageColorSpace, create_info->imageFormat);
1669 if (native_dataspace == DataSpace::UNKNOWN) {
1670 ALOGE(
1671 "CreateSwapchainKHR(VkSwapchainCreateInfoKHR.imageColorSpace = %d) "
1672 "failed: Unsupported color space",
1673 create_info->imageColorSpace);
1674 return VK_ERROR_INITIALIZATION_FAILED;
1675 }
1676
1677 ALOGV_IF(create_info->imageArrayLayers != 1,
1678 "swapchain imageArrayLayers=%u not supported",
1679 create_info->imageArrayLayers);
1680 ALOGV_IF((create_info->preTransform & ~kSupportedTransforms) != 0,
1681 "swapchain preTransform=%#x not supported",
1682 create_info->preTransform);
1683 ALOGV_IF(!(create_info->presentMode == VK_PRESENT_MODE_FIFO_KHR ||
1684 create_info->presentMode == VK_PRESENT_MODE_MAILBOX_KHR ||
1685 create_info->presentMode == VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR ||
1686 create_info->presentMode == VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR),
1687 "swapchain presentMode=%u not supported",
1688 create_info->presentMode);
1689
1690 Surface& surface = *SurfaceFromHandle(create_info->surface);
1691
1692 if (surface.swapchain_handle != create_info->oldSwapchain) {
1693 ALOGV("Can't create a swapchain for VkSurfaceKHR 0x%" PRIx64
1694 " because it already has active swapchain 0x%" PRIx64
1695 " but VkSwapchainCreateInfo::oldSwapchain=0x%" PRIx64,
1696 reinterpret_cast<uint64_t>(create_info->surface),
1697 reinterpret_cast<uint64_t>(surface.swapchain_handle),
1698 reinterpret_cast<uint64_t>(create_info->oldSwapchain));
1699 return VK_ERROR_NATIVE_WINDOW_IN_USE_KHR;
1700 }
1701 if (create_info->oldSwapchain != VK_NULL_HANDLE)
1702 OrphanSwapchain(device, SwapchainFromHandle(create_info->oldSwapchain));
1703
1704 // -- Reset the native window --
1705 // The native window might have been used previously, and had its properties
1706 // changed from defaults. That will affect the answer we get for queries
1707 // like MIN_UNDEQUED_BUFFERS. Reset to a known/default state before we
1708 // attempt such queries.
1709
1710 // The native window only allows dequeueing all buffers before any have
1711 // been queued, since after that point at least one is assumed to be in
1712 // non-FREE state at any given time. Disconnecting and re-connecting
1713 // orphans the previous buffers, getting us back to the state where we can
1714 // dequeue all buffers.
1715 //
1716 // This is not necessary if the surface was never used previously.
1717 //
1718 // TODO(http://b/134186185) recycle swapchain images more efficiently
1719 ANativeWindow* window = surface.window.get();
1720 if (surface.used_by_swapchain) {
1721 err = native_window_api_disconnect(window, NATIVE_WINDOW_API_EGL);
1722 ALOGW_IF(err != android::OK,
1723 "native_window_api_disconnect failed: %s (%d)", strerror(-err),
1724 err);
1725 err = native_window_api_connect(window, NATIVE_WINDOW_API_EGL);
1726 ALOGW_IF(err != android::OK,
1727 "native_window_api_connect failed: %s (%d)", strerror(-err),
1728 err);
1729 }
1730
1731 err =
1732 window->perform(window, NATIVE_WINDOW_SET_DEQUEUE_TIMEOUT, nsecs_t{-1});
1733 if (err != android::OK) {
1734 ALOGE("window->perform(SET_DEQUEUE_TIMEOUT) failed: %s (%d)",
1735 strerror(-err), err);
1736 return VK_ERROR_SURFACE_LOST_KHR;
1737 }
1738
1739 int swap_interval =
1740 create_info->presentMode == VK_PRESENT_MODE_MAILBOX_KHR ? 0 : 1;
1741 err = window->setSwapInterval(window, swap_interval);
1742 if (err != android::OK) {
1743 ALOGE("native_window->setSwapInterval(1) failed: %s (%d)",
1744 strerror(-err), err);
1745 return VK_ERROR_SURFACE_LOST_KHR;
1746 }
1747
1748 err = native_window_set_shared_buffer_mode(window, false);
1749 if (err != android::OK) {
1750 ALOGE("native_window_set_shared_buffer_mode(false) failed: %s (%d)",
1751 strerror(-err), err);
1752 return VK_ERROR_SURFACE_LOST_KHR;
1753 }
1754
1755 err = native_window_set_auto_refresh(window, false);
1756 if (err != android::OK) {
1757 ALOGE("native_window_set_auto_refresh(false) failed: %s (%d)",
1758 strerror(-err), err);
1759 return VK_ERROR_SURFACE_LOST_KHR;
1760 }
1761
1762 // -- Configure the native window --
1763
1764 const auto& dispatch = GetData(device).driver;
1765
1766 err = native_window_set_buffers_format(
1767 window, static_cast<int>(native_pixel_format));
1768 if (err != android::OK) {
1769 ALOGE("native_window_set_buffers_format(%s) failed: %s (%d)",
1770 toString(native_pixel_format).c_str(), strerror(-err), err);
1771 return VK_ERROR_SURFACE_LOST_KHR;
1772 }
1773
1774 /* Respect consumer default dataspace upon HAL_DATASPACE_ARBITRARY. */
1775 if (native_dataspace != DataSpace::ARBITRARY) {
1776 err = native_window_set_buffers_data_space(
1777 window, static_cast<android_dataspace_t>(native_dataspace));
1778 if (err != android::OK) {
1779 ALOGE("native_window_set_buffers_data_space(%d) failed: %s (%d)",
1780 native_dataspace, strerror(-err), err);
1781 return VK_ERROR_SURFACE_LOST_KHR;
1782 }
1783 }
1784
1785 err = native_window_set_buffers_dimensions(
1786 window, static_cast<int>(create_info->imageExtent.width),
1787 static_cast<int>(create_info->imageExtent.height));
1788 if (err != android::OK) {
1789 ALOGE("native_window_set_buffers_dimensions(%d,%d) failed: %s (%d)",
1790 create_info->imageExtent.width, create_info->imageExtent.height,
1791 strerror(-err), err);
1792 return VK_ERROR_SURFACE_LOST_KHR;
1793 }
1794
1795 // VkSwapchainCreateInfo::preTransform indicates the transformation the app
1796 // applied during rendering. native_window_set_transform() expects the
1797 // inverse: the transform the app is requesting that the compositor perform
1798 // during composition. With native windows, pre-transform works by rendering
1799 // with the same transform the compositor is applying (as in Vulkan), but
1800 // then requesting the inverse transform, so that when the compositor does
1801 // it's job the two transforms cancel each other out and the compositor ends
1802 // up applying an identity transform to the app's buffer.
1803 err = native_window_set_buffers_transform(
1804 window, InvertTransformToNative(create_info->preTransform));
1805 if (err != android::OK) {
1806 ALOGE("native_window_set_buffers_transform(%d) failed: %s (%d)",
1807 InvertTransformToNative(create_info->preTransform),
1808 strerror(-err), err);
1809 return VK_ERROR_SURFACE_LOST_KHR;
1810 }
1811
1812 err = native_window_set_scaling_mode(
1813 window, NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
1814 if (err != android::OK) {
1815 ALOGE("native_window_set_scaling_mode(SCALE_TO_WINDOW) failed: %s (%d)",
1816 strerror(-err), err);
1817 return VK_ERROR_SURFACE_LOST_KHR;
1818 }
1819
1820 VkSwapchainImageUsageFlagsANDROID swapchain_image_usage = 0;
1821 if (IsSharedPresentMode(create_info->presentMode)) {
1822 swapchain_image_usage |= VK_SWAPCHAIN_IMAGE_USAGE_SHARED_BIT_ANDROID;
1823 err = native_window_set_shared_buffer_mode(window, true);
1824 if (err != android::OK) {
1825 ALOGE("native_window_set_shared_buffer_mode failed: %s (%d)", strerror(-err), err);
1826 return VK_ERROR_SURFACE_LOST_KHR;
1827 }
1828 }
1829
1830 if (create_info->presentMode == VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR) {
1831 err = native_window_set_auto_refresh(window, true);
1832 if (err != android::OK) {
1833 ALOGE("native_window_set_auto_refresh failed: %s (%d)", strerror(-err), err);
1834 return VK_ERROR_SURFACE_LOST_KHR;
1835 }
1836 }
1837
1838 int query_value;
1839 // TODO: Now that we are calling into GPDSC2 directly, this query may be redundant
1840 // the call to std::max(min_buffer_count, num_images) may be redundant as well
1841 err = window->query(window, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
1842 &query_value);
1843 if (err != android::OK || query_value < 0) {
1844 ALOGE("window->query failed: %s (%d) value=%d", strerror(-err), err,
1845 query_value);
1846 return VK_ERROR_SURFACE_LOST_KHR;
1847 }
1848 const uint32_t min_undequeued_buffers = static_cast<uint32_t>(query_value);
1849
1850 // Lower layer insists that we have at least min_undequeued_buffers + 1
1851 // buffers. This is wasteful and we'd like to relax it in the shared case,
1852 // but not all the pieces are in place for that to work yet. Note we only
1853 // lie to the lower layer--we don't want to give the app back a swapchain
1854 // with extra images (which they can't actually use!).
1855 const uint32_t min_buffer_count = min_undequeued_buffers + 1;
1856
1857 // Call into GPDSC2 to get the minimum and maximum allowable buffer count for the surface of
1858 // interest. This step is only necessary if the app requests a number of images
1859 // (create_info->minImageCount) that is less or more than the surface capabilities.
1860 // An app should be calling GPDSC2 and using those values to set create_info, but in the
1861 // event that the app has hard-coded image counts an error can occur
1862 VkSurfacePresentModeEXT present_mode = {
1863 VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_EXT,
1864 nullptr,
1865 create_info->presentMode
1866 };
1867 VkPhysicalDeviceSurfaceInfo2KHR surface_info2 = {
1868 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR,
1869 &present_mode,
1870 create_info->surface
1871 };
1872 VkSurfaceCapabilities2KHR surface_capabilities2 = {
1873 VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR,
1874 nullptr,
1875 {},
1876 };
1877 result = GetPhysicalDeviceSurfaceCapabilities2KHR(GetData(device).driver_physical_device,
1878 &surface_info2, &surface_capabilities2);
1879
1880 uint32_t num_images = create_info->minImageCount;
1881 num_images = std::clamp(num_images,
1882 surface_capabilities2.surfaceCapabilities.minImageCount,
1883 surface_capabilities2.surfaceCapabilities.maxImageCount);
1884
1885 const uint32_t buffer_count = std::max(min_buffer_count, num_images);
1886 err = native_window_set_buffer_count(window, buffer_count);
1887 if (err != android::OK) {
1888 ALOGE("native_window_set_buffer_count(%d) failed: %s (%d)", buffer_count,
1889 strerror(-err), err);
1890 return VK_ERROR_SURFACE_LOST_KHR;
1891 }
1892
1893 // In shared mode the num_images must be one regardless of how many
1894 // buffers were allocated for the buffer queue.
1895 if (swapchain_image_usage & VK_SWAPCHAIN_IMAGE_USAGE_SHARED_BIT_ANDROID) {
1896 num_images = 1;
1897 }
1898
1899 VkImageFormatListCreateInfo extra_mutable_formats = {
1900 .sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR,
1901 };
1902 VkImageFormatListCreateInfo* extra_mutable_formats_ptr;
1903
1904 // Look through the create_info pNext chain passed to createSwapchainKHR
1905 // for an image compression control struct.
1906 // if one is found AND the appropriate extensions are enabled, create a
1907 // VkImageCompressionControlEXT structure to pass on to VkImageCreateInfo
1908 // TODO check for imageCompressionControlSwapchain feature is enabled
1909 void* usage_info_pNext = nullptr;
1910 VkImageCompressionControlEXT image_compression = {};
1911 const VkSwapchainCreateInfoKHR* create_infos = create_info;
1912 while (create_infos->pNext) {
1913 create_infos = reinterpret_cast<const VkSwapchainCreateInfoKHR*>(create_infos->pNext);
1914 switch (create_infos->sType) {
1915 case VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT: {
1916 const VkImageCompressionControlEXT* compression_infos =
1917 reinterpret_cast<const VkImageCompressionControlEXT*>(create_infos);
1918 image_compression = *compression_infos;
1919 image_compression.pNext = nullptr;
1920 usage_info_pNext = &image_compression;
1921 } break;
1922 case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO: {
1923 const VkImageFormatListCreateInfo* format_list =
1924 reinterpret_cast<const VkImageFormatListCreateInfo*>(
1925 create_infos);
1926 if (create_info->flags &
1927 VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR) {
1928 if (format_list && format_list->viewFormatCount > 0 &&
1929 format_list->pViewFormats) {
1930 extra_mutable_formats.viewFormatCount =
1931 format_list->viewFormatCount;
1932 extra_mutable_formats.pViewFormats =
1933 format_list->pViewFormats;
1934 extra_mutable_formats_ptr = &extra_mutable_formats;
1935 } else {
1936 ALOGE(
1937 "vk_swapchain_create_mutable_format_bit_khr was "
1938 "set during swapchain creation but no valid "
1939 "vkimageformatlistcreateinfo was found in the "
1940 "pnext chain");
1941 return VK_ERROR_INITIALIZATION_FAILED;
1942 }
1943 }
1944 } break;
1945 default:
1946 // Ignore all other info structs
1947 break;
1948 }
1949 }
1950
1951 // Get the appropriate native_usage for the images
1952 // Get the consumer usage
1953 uint64_t native_usage = surface.consumer_usage;
1954 // Determine if the swapchain is protected
1955 bool create_protected_swapchain = false;
1956 if (create_info->flags & VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR) {
1957 create_protected_swapchain = true;
1958 native_usage |= BufferUsage::PROTECTED;
1959 }
1960 // Get the producer usage
1961 uint64_t producer_usage;
1962 result = getProducerUsage(device, create_info, swapchain_image_usage, create_protected_swapchain, &producer_usage);
1963 if (result != VK_SUCCESS) {
1964 return result;
1965 }
1966 native_usage |= producer_usage;
1967
1968 err = native_window_set_usage(window, native_usage);
1969 if (err != android::OK) {
1970 ALOGE("native_window_set_usage failed: %s (%d)", strerror(-err), err);
1971 return VK_ERROR_SURFACE_LOST_KHR;
1972 }
1973
1974 int transform_hint;
1975 err = window->query(window, NATIVE_WINDOW_TRANSFORM_HINT, &transform_hint);
1976 if (err != android::OK) {
1977 ALOGE("NATIVE_WINDOW_TRANSFORM_HINT query failed: %s (%d)",
1978 strerror(-err), err);
1979 return VK_ERROR_SURFACE_LOST_KHR;
1980 }
1981
1982 int64_t refresh_duration;
1983 err = native_window_get_refresh_cycle_duration(window, &refresh_duration);
1984 if (err != android::OK) {
1985 ALOGE("native_window_get_refresh_cycle_duration query failed: %s (%d)",
1986 strerror(-err), err);
1987 return VK_ERROR_SURFACE_LOST_KHR;
1988 }
1989 // -- Allocate our Swapchain object --
1990 // After this point, we must deallocate the swapchain on error.
1991
1992 void* mem = allocator->pfnAllocation(allocator->pUserData,
1993 sizeof(Swapchain), alignof(Swapchain),
1994 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1995
1996 if (!mem)
1997 return VK_ERROR_OUT_OF_HOST_MEMORY;
1998
1999 Swapchain* swapchain = new (mem)
2000 Swapchain(surface, num_images, create_info->presentMode,
2001 TranslateVulkanToNativeTransform(create_info->preTransform),
2002 refresh_duration);
2003 VkSwapchainImageCreateInfoANDROID swapchain_image_create = {
2004 #pragma clang diagnostic push
2005 #pragma clang diagnostic ignored "-Wold-style-cast"
2006 .sType = VK_STRUCTURE_TYPE_SWAPCHAIN_IMAGE_CREATE_INFO_ANDROID,
2007 #pragma clang diagnostic pop
2008 .pNext = usage_info_pNext,
2009 .usage = swapchain_image_usage,
2010 };
2011 VkNativeBufferANDROID image_native_buffer = {
2012 #pragma clang diagnostic push
2013 #pragma clang diagnostic ignored "-Wold-style-cast"
2014 .sType = VK_STRUCTURE_TYPE_NATIVE_BUFFER_ANDROID,
2015 #pragma clang diagnostic pop
2016 .pNext = &swapchain_image_create,
2017 };
2018
2019 VkImageCreateInfo image_create = {
2020 .sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
2021 .pNext = nullptr,
2022 .flags = create_protected_swapchain ? VK_IMAGE_CREATE_PROTECTED_BIT : 0u,
2023 .imageType = VK_IMAGE_TYPE_2D,
2024 .format = create_info->imageFormat,
2025 .extent = {
2026 create_info->imageExtent.width,
2027 create_info->imageExtent.height,
2028 1
2029 },
2030 .mipLevels = 1,
2031 .arrayLayers = 1,
2032 .samples = VK_SAMPLE_COUNT_1_BIT,
2033 .tiling = VK_IMAGE_TILING_OPTIMAL,
2034 .usage = create_info->imageUsage,
2035 .sharingMode = create_info->imageSharingMode,
2036 .queueFamilyIndexCount = create_info->queueFamilyIndexCount,
2037 .pQueueFamilyIndices = create_info->pQueueFamilyIndices,
2038 };
2039
2040 if (create_info->flags & VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR) {
2041 image_create.flags |= VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
2042 image_create.flags |= VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR;
2043 }
2044
2045 // Note: don't do deferred allocation for shared present modes. There's only one buffer
2046 // involved so very little benefit.
2047 if ((create_info->flags & VK_SWAPCHAIN_CREATE_DEFERRED_MEMORY_ALLOCATION_BIT_EXT) &&
2048 !IsSharedPresentMode(create_info->presentMode)) {
2049 // Don't want to touch the underlying gralloc buffers yet;
2050 // instead just create unbound VkImages which will later be bound to memory inside
2051 // AcquireNextImage.
2052 VkImageSwapchainCreateInfoKHR image_swapchain_create = {
2053 .sType = VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR,
2054 .pNext = extra_mutable_formats_ptr,
2055 .swapchain = HandleFromSwapchain(swapchain),
2056 };
2057 image_create.pNext = &image_swapchain_create;
2058
2059 for (uint32_t i = 0; i < num_images; i++) {
2060 Swapchain::Image& img = swapchain->images[i];
2061 img.buffer = nullptr;
2062 img.dequeued = false;
2063
2064 result = dispatch.CreateImage(device, &image_create, nullptr, &img.image);
2065 if (result != VK_SUCCESS) {
2066 ALOGD("vkCreateImage w/ for deferred swapchain image failed: %u", result);
2067 break;
2068 }
2069 }
2070 } else {
2071 // -- Dequeue all buffers and create a VkImage for each --
2072 // Any failures during or after this must cancel the dequeued buffers.
2073
2074 for (uint32_t i = 0; i < num_images; i++) {
2075 Swapchain::Image& img = swapchain->images[i];
2076
2077 ANativeWindowBuffer* buffer;
2078 err = window->dequeueBuffer(window, &buffer, &img.dequeue_fence);
2079 if (err != android::OK) {
2080 ALOGE("dequeueBuffer[%u] failed: %s (%d)", i, strerror(-err), err);
2081 switch (-err) {
2082 case ENOMEM:
2083 result = VK_ERROR_OUT_OF_DEVICE_MEMORY;
2084 break;
2085 default:
2086 result = VK_ERROR_SURFACE_LOST_KHR;
2087 break;
2088 }
2089 break;
2090 }
2091 img.buffer = buffer;
2092 img.dequeued = true;
2093
2094 image_native_buffer.handle = img.buffer->handle;
2095 image_native_buffer.stride = img.buffer->stride;
2096 image_native_buffer.format = img.buffer->format;
2097 image_native_buffer.usage = int(img.buffer->usage);
2098 android_convertGralloc0To1Usage(int(img.buffer->usage),
2099 &image_native_buffer.usage2.producer,
2100 &image_native_buffer.usage2.consumer);
2101 image_native_buffer.usage3 = img.buffer->usage;
2102 image_native_buffer.ahb =
2103 ANativeWindowBuffer_getHardwareBuffer(img.buffer.get());
2104 image_create.pNext = &image_native_buffer;
2105
2106 if (extra_mutable_formats_ptr) {
2107 extra_mutable_formats_ptr->pNext = image_create.pNext;
2108 image_create.pNext = extra_mutable_formats_ptr;
2109 }
2110
2111 ATRACE_BEGIN("CreateImage");
2112 result =
2113 dispatch.CreateImage(device, &image_create, nullptr, &img.image);
2114 ATRACE_END();
2115 if (result != VK_SUCCESS) {
2116 ALOGD("vkCreateImage w/ native buffer failed: %u", result);
2117 break;
2118 }
2119 }
2120
2121 // -- Cancel all buffers, returning them to the queue --
2122 // If an error occurred before, also destroy the VkImage and release the
2123 // buffer reference. Otherwise, we retain a strong reference to the buffer.
2124 for (uint32_t i = 0; i < num_images; i++) {
2125 Swapchain::Image& img = swapchain->images[i];
2126 if (img.dequeued) {
2127 if (!swapchain->shared) {
2128 window->cancelBuffer(window, img.buffer.get(),
2129 img.dequeue_fence);
2130 img.dequeue_fence = -1;
2131 img.dequeued = false;
2132 }
2133 }
2134 }
2135 }
2136
2137 if (result != VK_SUCCESS) {
2138 DestroySwapchainInternal(device, HandleFromSwapchain(swapchain),
2139 allocator);
2140 return result;
2141 }
2142
2143 if (transform_hint != swapchain->pre_transform) {
2144 // Log that the app is not doing pre-rotation.
2145 android::GraphicsEnv::getInstance().setTargetStats(
2146 android::GpuStatsInfo::Stats::FALSE_PREROTATION);
2147 }
2148
2149 // Set stats for creating a Vulkan swapchain
2150 android::GraphicsEnv::getInstance().setTargetStats(
2151 android::GpuStatsInfo::Stats::CREATED_VULKAN_SWAPCHAIN);
2152
2153 surface.used_by_swapchain = true;
2154 surface.swapchain_handle = HandleFromSwapchain(swapchain);
2155 *swapchain_handle = surface.swapchain_handle;
2156 return VK_SUCCESS;
2157 }
2158
2159 VKAPI_ATTR
DestroySwapchainKHR(VkDevice device,VkSwapchainKHR swapchain_handle,const VkAllocationCallbacks * allocator)2160 void DestroySwapchainKHR(VkDevice device,
2161 VkSwapchainKHR swapchain_handle,
2162 const VkAllocationCallbacks* allocator) {
2163 ATRACE_CALL();
2164
2165 DestroySwapchainInternal(device, swapchain_handle, allocator);
2166 }
2167
2168 VKAPI_ATTR
GetSwapchainImagesKHR(VkDevice,VkSwapchainKHR swapchain_handle,uint32_t * count,VkImage * images)2169 VkResult GetSwapchainImagesKHR(VkDevice,
2170 VkSwapchainKHR swapchain_handle,
2171 uint32_t* count,
2172 VkImage* images) {
2173 ATRACE_CALL();
2174
2175 Swapchain& swapchain = *SwapchainFromHandle(swapchain_handle);
2176 ALOGW_IF(swapchain.surface.swapchain_handle != swapchain_handle,
2177 "getting images for non-active swapchain 0x%" PRIx64
2178 "; only dequeued image handles are valid",
2179 reinterpret_cast<uint64_t>(swapchain_handle));
2180 VkResult result = VK_SUCCESS;
2181 if (images) {
2182 uint32_t n = swapchain.num_images;
2183 if (*count < swapchain.num_images) {
2184 n = *count;
2185 result = VK_INCOMPLETE;
2186 }
2187 for (uint32_t i = 0; i < n; i++)
2188 images[i] = swapchain.images[i].image;
2189 *count = n;
2190 } else {
2191 *count = swapchain.num_images;
2192 }
2193 return result;
2194 }
2195
2196 VKAPI_ATTR
AcquireNextImageKHR(VkDevice device,VkSwapchainKHR swapchain_handle,uint64_t timeout,VkSemaphore semaphore,VkFence vk_fence,uint32_t * image_index)2197 VkResult AcquireNextImageKHR(VkDevice device,
2198 VkSwapchainKHR swapchain_handle,
2199 uint64_t timeout,
2200 VkSemaphore semaphore,
2201 VkFence vk_fence,
2202 uint32_t* image_index) {
2203 ATRACE_CALL();
2204
2205 Swapchain& swapchain = *SwapchainFromHandle(swapchain_handle);
2206 ANativeWindow* window = swapchain.surface.window.get();
2207 VkResult result;
2208 int err;
2209
2210 if (swapchain.surface.swapchain_handle != swapchain_handle)
2211 return VK_ERROR_OUT_OF_DATE_KHR;
2212
2213 if (swapchain.shared) {
2214 // In shared mode, we keep the buffer dequeued all the time, so we don't
2215 // want to dequeue a buffer here. Instead, just ask the driver to ensure
2216 // the semaphore and fence passed to us will be signalled.
2217 *image_index = 0;
2218 result = GetData(device).driver.AcquireImageANDROID(
2219 device, swapchain.images[*image_index].image, -1, semaphore, vk_fence);
2220 return result;
2221 }
2222
2223 const nsecs_t acquire_next_image_timeout =
2224 timeout > (uint64_t)std::numeric_limits<nsecs_t>::max() ? -1 : timeout;
2225 if (acquire_next_image_timeout != swapchain.acquire_next_image_timeout) {
2226 // Cache the timeout to avoid the duplicate binder cost.
2227 err = window->perform(window, NATIVE_WINDOW_SET_DEQUEUE_TIMEOUT,
2228 acquire_next_image_timeout);
2229 if (err != android::OK) {
2230 ALOGE("window->perform(SET_DEQUEUE_TIMEOUT) failed: %s (%d)",
2231 strerror(-err), err);
2232 return VK_ERROR_SURFACE_LOST_KHR;
2233 }
2234 swapchain.acquire_next_image_timeout = acquire_next_image_timeout;
2235 }
2236
2237 ANativeWindowBuffer* buffer;
2238 int fence_fd;
2239 err = window->dequeueBuffer(window, &buffer, &fence_fd);
2240 if (err == android::TIMED_OUT || err == android::INVALID_OPERATION) {
2241 ALOGW("dequeueBuffer timed out: %s (%d)", strerror(-err), err);
2242 return timeout ? VK_TIMEOUT : VK_NOT_READY;
2243 } else if (err != android::OK) {
2244 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), err);
2245 return VK_ERROR_SURFACE_LOST_KHR;
2246 }
2247
2248 uint32_t idx;
2249 for (idx = 0; idx < swapchain.num_images; idx++) {
2250 if (swapchain.images[idx].buffer.get() == buffer) {
2251 swapchain.images[idx].dequeued = true;
2252 swapchain.images[idx].dequeue_fence = fence_fd;
2253 break;
2254 }
2255 }
2256
2257 // If this is a deferred alloc swapchain, this may be the first time we've
2258 // seen a particular buffer. If so, there should be an empty slot. Find it,
2259 // and bind the gralloc buffer to the VkImage for that slot. If there is no
2260 // empty slot, then we dequeued an unexpected buffer. Non-deferred swapchains
2261 // will also take this path, but will never have an empty slot since we
2262 // populated them all upfront.
2263 if (idx == swapchain.num_images) {
2264 for (idx = 0; idx < swapchain.num_images; idx++) {
2265 if (!swapchain.images[idx].buffer) {
2266 // Note: this structure is technically required for
2267 // Vulkan correctness, even though the driver is probably going
2268 // to use everything from the VkNativeBufferANDROID below.
2269 // This is kindof silly, but it's how we did the ANB
2270 // side of VK_KHR_swapchain v69, so we're stuck with it unless
2271 // we want to go tinkering with the ANB spec some more.
2272 VkBindImageMemorySwapchainInfoKHR bimsi = {
2273 .sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR,
2274 .pNext = nullptr,
2275 .swapchain = swapchain_handle,
2276 .imageIndex = idx,
2277 };
2278 VkNativeBufferANDROID nb = {
2279 .sType = VK_STRUCTURE_TYPE_NATIVE_BUFFER_ANDROID,
2280 .pNext = &bimsi,
2281 .handle = buffer->handle,
2282 .stride = buffer->stride,
2283 .format = buffer->format,
2284 .usage = int(buffer->usage),
2285 .usage3 = buffer->usage,
2286 .ahb = ANativeWindowBuffer_getHardwareBuffer(buffer),
2287 };
2288 android_convertGralloc0To1Usage(int(buffer->usage),
2289 &nb.usage2.producer,
2290 &nb.usage2.consumer);
2291 VkBindImageMemoryInfo bimi = {
2292 .sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO,
2293 .pNext = &nb,
2294 .image = swapchain.images[idx].image,
2295 .memory = VK_NULL_HANDLE,
2296 .memoryOffset = 0,
2297 };
2298 result = GetData(device).driver.BindImageMemory2(device, 1, &bimi);
2299 if (result != VK_SUCCESS) {
2300 // This shouldn't really happen. If it does, something is probably
2301 // unrecoverably wrong with the swapchain and its images. Cancel
2302 // the buffer and declare the swapchain broken.
2303 ALOGE("failed to do deferred gralloc buffer bind");
2304 window->cancelBuffer(window, buffer, fence_fd);
2305 return VK_ERROR_OUT_OF_DATE_KHR;
2306 }
2307
2308 swapchain.images[idx].dequeued = true;
2309 swapchain.images[idx].dequeue_fence = fence_fd;
2310 swapchain.images[idx].buffer = buffer;
2311 break;
2312 }
2313 }
2314 }
2315
2316 // The buffer doesn't match any slot. This shouldn't normally happen, but is
2317 // possible if the bufferqueue is reconfigured behind libvulkan's back. If this
2318 // happens, just declare the swapchain to be broken and the app will recreate it.
2319 if (idx == swapchain.num_images) {
2320 ALOGE("dequeueBuffer returned unrecognized buffer");
2321 window->cancelBuffer(window, buffer, fence_fd);
2322 return VK_ERROR_OUT_OF_DATE_KHR;
2323 }
2324
2325 int fence_clone = -1;
2326 if (fence_fd != -1) {
2327 fence_clone = dup(fence_fd);
2328 if (fence_clone == -1) {
2329 ALOGE("dup(fence) failed, stalling until signalled: %s (%d)",
2330 strerror(errno), errno);
2331 sync_wait(fence_fd, -1 /* forever */);
2332 }
2333 }
2334
2335 result = GetData(device).driver.AcquireImageANDROID(
2336 device, swapchain.images[idx].image, fence_clone, semaphore, vk_fence);
2337 if (result != VK_SUCCESS) {
2338 // NOTE: we're relying on AcquireImageANDROID to close fence_clone,
2339 // even if the call fails. We could close it ourselves on failure, but
2340 // that would create a race condition if the driver closes it on a
2341 // failure path: some other thread might create an fd with the same
2342 // number between the time the driver closes it and the time we close
2343 // it. We must assume one of: the driver *always* closes it even on
2344 // failure, or *never* closes it on failure.
2345 window->cancelBuffer(window, buffer, fence_fd);
2346 swapchain.images[idx].dequeued = false;
2347 swapchain.images[idx].dequeue_fence = -1;
2348 return result;
2349 }
2350
2351 *image_index = idx;
2352 return VK_SUCCESS;
2353 }
2354
2355 VKAPI_ATTR
AcquireNextImage2KHR(VkDevice device,const VkAcquireNextImageInfoKHR * pAcquireInfo,uint32_t * pImageIndex)2356 VkResult AcquireNextImage2KHR(VkDevice device,
2357 const VkAcquireNextImageInfoKHR* pAcquireInfo,
2358 uint32_t* pImageIndex) {
2359 ATRACE_CALL();
2360
2361 return AcquireNextImageKHR(device, pAcquireInfo->swapchain,
2362 pAcquireInfo->timeout, pAcquireInfo->semaphore,
2363 pAcquireInfo->fence, pImageIndex);
2364 }
2365
WorstPresentResult(VkResult a,VkResult b)2366 static VkResult WorstPresentResult(VkResult a, VkResult b) {
2367 // See the error ranking for vkQueuePresentKHR at the end of section 29.6
2368 // (in spec version 1.0.14).
2369 static const VkResult kWorstToBest[] = {
2370 VK_ERROR_DEVICE_LOST,
2371 VK_ERROR_SURFACE_LOST_KHR,
2372 VK_ERROR_OUT_OF_DATE_KHR,
2373 VK_ERROR_OUT_OF_DEVICE_MEMORY,
2374 VK_ERROR_OUT_OF_HOST_MEMORY,
2375 VK_SUBOPTIMAL_KHR,
2376 };
2377 for (auto result : kWorstToBest) {
2378 if (a == result || b == result)
2379 return result;
2380 }
2381 ALOG_ASSERT(a == VK_SUCCESS, "invalid vkQueuePresentKHR result %d", a);
2382 ALOG_ASSERT(b == VK_SUCCESS, "invalid vkQueuePresentKHR result %d", b);
2383 return a != VK_SUCCESS ? a : b;
2384 }
2385
2386 // KHR_incremental_present aspect of QueuePresentKHR
SetSwapchainSurfaceDamage(ANativeWindow * window,const VkPresentRegionKHR * pRegion)2387 static void SetSwapchainSurfaceDamage(ANativeWindow *window, const VkPresentRegionKHR *pRegion) {
2388 std::vector<android_native_rect_t> rects(pRegion->rectangleCount);
2389 for (auto i = 0u; i < pRegion->rectangleCount; i++) {
2390 auto const& rect = pRegion->pRectangles[i];
2391 if (rect.layer > 0) {
2392 ALOGV("vkQueuePresentKHR ignoring invalid layer (%u); using layer 0 instead",
2393 rect.layer);
2394 }
2395
2396 rects[i].left = rect.offset.x;
2397 rects[i].bottom = rect.offset.y;
2398 rects[i].right = rect.offset.x + rect.extent.width;
2399 rects[i].top = rect.offset.y + rect.extent.height;
2400 }
2401 native_window_set_surface_damage(window, rects.data(), rects.size());
2402 }
2403
2404 // GOOGLE_display_timing aspect of QueuePresentKHR
SetSwapchainFrameTimestamp(Swapchain & swapchain,const VkPresentTimeGOOGLE * pTime)2405 static void SetSwapchainFrameTimestamp(Swapchain &swapchain, const VkPresentTimeGOOGLE *pTime) {
2406 ANativeWindow *window = swapchain.surface.window.get();
2407
2408 // We don't know whether the app will actually use GOOGLE_display_timing
2409 // with a particular swapchain until QueuePresent; enable it on the BQ
2410 // now if needed
2411 if (!swapchain.frame_timestamps_enabled) {
2412 ALOGV("Calling native_window_enable_frame_timestamps(true)");
2413 native_window_enable_frame_timestamps(window, true);
2414 swapchain.frame_timestamps_enabled = true;
2415 }
2416
2417 // Record the nativeFrameId so it can be later correlated to
2418 // this present.
2419 uint64_t nativeFrameId = 0;
2420 int err = native_window_get_next_frame_id(
2421 window, &nativeFrameId);
2422 if (err != android::OK) {
2423 ALOGE("Failed to get next native frame ID.");
2424 }
2425
2426 // Add a new timing record with the user's presentID and
2427 // the nativeFrameId.
2428 swapchain.timing.emplace_back(pTime, nativeFrameId);
2429 if (swapchain.timing.size() > MAX_TIMING_INFOS) {
2430 swapchain.timing.erase(
2431 swapchain.timing.begin(),
2432 swapchain.timing.begin() + swapchain.timing.size() - MAX_TIMING_INFOS);
2433 }
2434 if (pTime->desiredPresentTime) {
2435 ALOGV(
2436 "Calling native_window_set_buffers_timestamp(%" PRId64 ")",
2437 pTime->desiredPresentTime);
2438 native_window_set_buffers_timestamp(
2439 window,
2440 static_cast<int64_t>(pTime->desiredPresentTime));
2441 }
2442 }
2443
2444 // EXT_swapchain_maintenance1 present mode change
SetSwapchainPresentMode(ANativeWindow * window,VkPresentModeKHR mode)2445 static bool SetSwapchainPresentMode(ANativeWindow *window, VkPresentModeKHR mode) {
2446 // There is no dynamic switching between non-shared present modes.
2447 // All we support is switching between demand and continuous refresh.
2448 if (!IsSharedPresentMode(mode))
2449 return true;
2450
2451 int err = native_window_set_auto_refresh(window,
2452 mode == VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR);
2453 if (err != android::OK) {
2454 ALOGE("native_window_set_auto_refresh() failed: %s (%d)",
2455 strerror(-err), err);
2456 return false;
2457 }
2458
2459 return true;
2460 }
2461
PresentOneSwapchain(VkQueue queue,Swapchain & swapchain,uint32_t imageIndex,const VkPresentRegionKHR * pRegion,const VkPresentTimeGOOGLE * pTime,VkFence presentFence,const VkPresentModeKHR * pPresentMode,uint32_t waitSemaphoreCount,const VkSemaphore * pWaitSemaphores)2462 static VkResult PresentOneSwapchain(
2463 VkQueue queue,
2464 Swapchain& swapchain,
2465 uint32_t imageIndex,
2466 const VkPresentRegionKHR *pRegion,
2467 const VkPresentTimeGOOGLE *pTime,
2468 VkFence presentFence,
2469 const VkPresentModeKHR *pPresentMode,
2470 uint32_t waitSemaphoreCount,
2471 const VkSemaphore *pWaitSemaphores) {
2472
2473 VkDevice device = GetData(queue).driver_device;
2474 const auto& dispatch = GetData(queue).driver;
2475
2476 Swapchain::Image& img = swapchain.images[imageIndex];
2477 VkResult swapchain_result = VK_SUCCESS;
2478 VkResult result;
2479 int err;
2480
2481 // XXX: long standing issue: QueueSignalReleaseImageANDROID consumes the
2482 // wait semaphores, so this doesn't actually work for the multiple swapchain
2483 // case.
2484 int fence = -1;
2485 result = dispatch.QueueSignalReleaseImageANDROID(
2486 queue, waitSemaphoreCount,
2487 pWaitSemaphores, img.image, &fence);
2488 if (result != VK_SUCCESS) {
2489 ALOGE("QueueSignalReleaseImageANDROID failed: %d", result);
2490 swapchain_result = result;
2491 }
2492 if (img.release_fence >= 0)
2493 close(img.release_fence);
2494 img.release_fence = fence < 0 ? -1 : dup(fence);
2495
2496 if (swapchain.surface.swapchain_handle == HandleFromSwapchain(&swapchain)) {
2497 ANativeWindow* window = swapchain.surface.window.get();
2498 if (swapchain_result == VK_SUCCESS) {
2499
2500 if (presentFence != VK_NULL_HANDLE) {
2501 int fence_copy = fence < 0 ? -1 : dup(fence);
2502 VkImportFenceFdInfoKHR iffi = {
2503 VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR,
2504 nullptr,
2505 presentFence,
2506 VK_FENCE_IMPORT_TEMPORARY_BIT,
2507 VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT,
2508 fence_copy,
2509 };
2510 if (VK_SUCCESS != dispatch.ImportFenceFdKHR(device, &iffi) && fence_copy >= 0) {
2511 // ImportFenceFdKHR takes ownership only if it succeeds
2512 close(fence_copy);
2513 }
2514 }
2515
2516 if (pRegion) {
2517 SetSwapchainSurfaceDamage(window, pRegion);
2518 }
2519 if (pTime) {
2520 SetSwapchainFrameTimestamp(swapchain, pTime);
2521 }
2522 if (pPresentMode) {
2523 if (!SetSwapchainPresentMode(window, *pPresentMode))
2524 swapchain_result = WorstPresentResult(swapchain_result,
2525 VK_ERROR_SURFACE_LOST_KHR);
2526 }
2527
2528 err = window->queueBuffer(window, img.buffer.get(), fence);
2529 // queueBuffer always closes fence, even on error
2530 if (err != android::OK) {
2531 ALOGE("queueBuffer failed: %s (%d)", strerror(-err), err);
2532 swapchain_result = WorstPresentResult(
2533 swapchain_result, VK_ERROR_SURFACE_LOST_KHR);
2534 } else {
2535 if (img.dequeue_fence >= 0) {
2536 close(img.dequeue_fence);
2537 img.dequeue_fence = -1;
2538 }
2539 img.dequeued = false;
2540 }
2541
2542 // If the swapchain is in shared mode, immediately dequeue the
2543 // buffer so it can be presented again without an intervening
2544 // call to AcquireNextImageKHR. We expect to get the same buffer
2545 // back from every call to dequeueBuffer in this mode.
2546 if (swapchain.shared && swapchain_result == VK_SUCCESS) {
2547 ANativeWindowBuffer* buffer;
2548 int fence_fd;
2549 err = window->dequeueBuffer(window, &buffer, &fence_fd);
2550 if (err != android::OK) {
2551 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), err);
2552 swapchain_result = WorstPresentResult(swapchain_result,
2553 VK_ERROR_SURFACE_LOST_KHR);
2554 } else if (img.buffer != buffer) {
2555 ALOGE("got wrong image back for shared swapchain");
2556 swapchain_result = WorstPresentResult(swapchain_result,
2557 VK_ERROR_SURFACE_LOST_KHR);
2558 } else {
2559 img.dequeue_fence = fence_fd;
2560 img.dequeued = true;
2561 }
2562 }
2563 }
2564 if (swapchain_result != VK_SUCCESS) {
2565 OrphanSwapchain(device, &swapchain);
2566 }
2567 // Android will only return VK_SUBOPTIMAL_KHR for vkQueuePresentKHR,
2568 // and only when the window's transform/rotation changes. Extent
2569 // changes will not cause VK_SUBOPTIMAL_KHR because of the
2570 // application issues that were caused when the following transform
2571 // change was added.
2572 int window_transform_hint;
2573 err = window->query(window, NATIVE_WINDOW_TRANSFORM_HINT,
2574 &window_transform_hint);
2575 if (err != android::OK) {
2576 ALOGE("NATIVE_WINDOW_TRANSFORM_HINT query failed: %s (%d)",
2577 strerror(-err), err);
2578 swapchain_result = WorstPresentResult(
2579 swapchain_result, VK_ERROR_SURFACE_LOST_KHR);
2580 }
2581 if (swapchain.pre_transform != window_transform_hint) {
2582 swapchain_result =
2583 WorstPresentResult(swapchain_result, VK_SUBOPTIMAL_KHR);
2584 }
2585 } else {
2586 ReleaseSwapchainImage(device, swapchain.shared, nullptr, fence,
2587 img, true);
2588 swapchain_result = VK_ERROR_OUT_OF_DATE_KHR;
2589 }
2590
2591 return swapchain_result;
2592 }
2593
2594 VKAPI_ATTR
QueuePresentKHR(VkQueue queue,const VkPresentInfoKHR * present_info)2595 VkResult QueuePresentKHR(VkQueue queue, const VkPresentInfoKHR* present_info) {
2596 ATRACE_CALL();
2597
2598 ALOGV_IF(present_info->sType != VK_STRUCTURE_TYPE_PRESENT_INFO_KHR,
2599 "vkQueuePresentKHR: invalid VkPresentInfoKHR structure type %d",
2600 present_info->sType);
2601
2602 VkResult final_result = VK_SUCCESS;
2603
2604 // Look at the pNext chain for supported extension structs:
2605 const VkPresentRegionsKHR* present_regions = nullptr;
2606 const VkPresentTimesInfoGOOGLE* present_times = nullptr;
2607 const VkSwapchainPresentFenceInfoEXT* present_fences = nullptr;
2608 const VkSwapchainPresentModeInfoEXT* present_modes = nullptr;
2609
2610 const VkPresentRegionsKHR* next =
2611 reinterpret_cast<const VkPresentRegionsKHR*>(present_info->pNext);
2612 while (next) {
2613 switch (next->sType) {
2614 case VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR:
2615 present_regions = next;
2616 break;
2617 case VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE:
2618 present_times =
2619 reinterpret_cast<const VkPresentTimesInfoGOOGLE*>(next);
2620 break;
2621 case VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_FENCE_INFO_EXT:
2622 present_fences =
2623 reinterpret_cast<const VkSwapchainPresentFenceInfoEXT*>(next);
2624 break;
2625 case VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_MODE_INFO_EXT:
2626 present_modes =
2627 reinterpret_cast<const VkSwapchainPresentModeInfoEXT*>(next);
2628 break;
2629 default:
2630 ALOGV("QueuePresentKHR ignoring unrecognized pNext->sType = %x",
2631 next->sType);
2632 break;
2633 }
2634 next = reinterpret_cast<const VkPresentRegionsKHR*>(next->pNext);
2635 }
2636 ALOGV_IF(
2637 present_regions &&
2638 present_regions->swapchainCount != present_info->swapchainCount,
2639 "VkPresentRegions::swapchainCount != VkPresentInfo::swapchainCount");
2640 ALOGV_IF(present_times &&
2641 present_times->swapchainCount != present_info->swapchainCount,
2642 "VkPresentTimesInfoGOOGLE::swapchainCount != "
2643 "VkPresentInfo::swapchainCount");
2644 ALOGV_IF(present_fences &&
2645 present_fences->swapchainCount != present_info->swapchainCount,
2646 "VkSwapchainPresentFenceInfoEXT::swapchainCount != "
2647 "VkPresentInfo::swapchainCount");
2648 ALOGV_IF(present_modes &&
2649 present_modes->swapchainCount != present_info->swapchainCount,
2650 "VkSwapchainPresentModeInfoEXT::swapchainCount != "
2651 "VkPresentInfo::swapchainCount");
2652
2653 const VkPresentRegionKHR* regions =
2654 (present_regions) ? present_regions->pRegions : nullptr;
2655 const VkPresentTimeGOOGLE* times =
2656 (present_times) ? present_times->pTimes : nullptr;
2657
2658 for (uint32_t sc = 0; sc < present_info->swapchainCount; sc++) {
2659 Swapchain& swapchain =
2660 *SwapchainFromHandle(present_info->pSwapchains[sc]);
2661
2662 VkResult swapchain_result = PresentOneSwapchain(
2663 queue,
2664 swapchain,
2665 present_info->pImageIndices[sc],
2666 (regions && !swapchain.mailbox_mode) ? ®ions[sc] : nullptr,
2667 times ? ×[sc] : nullptr,
2668 present_fences ? present_fences->pFences[sc] : VK_NULL_HANDLE,
2669 present_modes ? &present_modes->pPresentModes[sc] : nullptr,
2670 present_info->waitSemaphoreCount,
2671 present_info->pWaitSemaphores);
2672
2673 if (present_info->pResults)
2674 present_info->pResults[sc] = swapchain_result;
2675
2676 if (swapchain_result != final_result)
2677 final_result = WorstPresentResult(final_result, swapchain_result);
2678 }
2679
2680 return final_result;
2681 }
2682
2683 VKAPI_ATTR
GetRefreshCycleDurationGOOGLE(VkDevice,VkSwapchainKHR swapchain_handle,VkRefreshCycleDurationGOOGLE * pDisplayTimingProperties)2684 VkResult GetRefreshCycleDurationGOOGLE(
2685 VkDevice,
2686 VkSwapchainKHR swapchain_handle,
2687 VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties) {
2688 ATRACE_CALL();
2689
2690 Swapchain& swapchain = *SwapchainFromHandle(swapchain_handle);
2691 VkResult result = swapchain.get_refresh_duration(pDisplayTimingProperties->refreshDuration);
2692
2693 return result;
2694 }
2695
2696 VKAPI_ATTR
GetPastPresentationTimingGOOGLE(VkDevice,VkSwapchainKHR swapchain_handle,uint32_t * count,VkPastPresentationTimingGOOGLE * timings)2697 VkResult GetPastPresentationTimingGOOGLE(
2698 VkDevice,
2699 VkSwapchainKHR swapchain_handle,
2700 uint32_t* count,
2701 VkPastPresentationTimingGOOGLE* timings) {
2702 ATRACE_CALL();
2703
2704 Swapchain& swapchain = *SwapchainFromHandle(swapchain_handle);
2705 if (swapchain.surface.swapchain_handle != swapchain_handle) {
2706 return VK_ERROR_OUT_OF_DATE_KHR;
2707 }
2708
2709 ANativeWindow* window = swapchain.surface.window.get();
2710 VkResult result = VK_SUCCESS;
2711
2712 if (!swapchain.frame_timestamps_enabled) {
2713 ALOGV("Calling native_window_enable_frame_timestamps(true)");
2714 native_window_enable_frame_timestamps(window, true);
2715 swapchain.frame_timestamps_enabled = true;
2716 }
2717
2718 if (timings) {
2719 // Get the latest ready timing count before copying, since the copied
2720 // timing info will be erased in copy_ready_timings function.
2721 uint32_t n = get_num_ready_timings(swapchain);
2722 copy_ready_timings(swapchain, count, timings);
2723 // Check the *count here against the recorded ready timing count, since
2724 // *count can be overwritten per spec describes.
2725 if (*count < n) {
2726 result = VK_INCOMPLETE;
2727 }
2728 } else {
2729 *count = get_num_ready_timings(swapchain);
2730 }
2731
2732 return result;
2733 }
2734
2735 VKAPI_ATTR
GetSwapchainStatusKHR(VkDevice,VkSwapchainKHR swapchain_handle)2736 VkResult GetSwapchainStatusKHR(
2737 VkDevice,
2738 VkSwapchainKHR swapchain_handle) {
2739 ATRACE_CALL();
2740
2741 Swapchain& swapchain = *SwapchainFromHandle(swapchain_handle);
2742 VkResult result = VK_SUCCESS;
2743
2744 if (swapchain.surface.swapchain_handle != swapchain_handle) {
2745 return VK_ERROR_OUT_OF_DATE_KHR;
2746 }
2747
2748 // TODO(b/143296009): Implement this function properly
2749
2750 return result;
2751 }
2752
SetHdrMetadataEXT(VkDevice,uint32_t swapchainCount,const VkSwapchainKHR * pSwapchains,const VkHdrMetadataEXT * pHdrMetadataEXTs)2753 VKAPI_ATTR void SetHdrMetadataEXT(
2754 VkDevice,
2755 uint32_t swapchainCount,
2756 const VkSwapchainKHR* pSwapchains,
2757 const VkHdrMetadataEXT* pHdrMetadataEXTs) {
2758 ATRACE_CALL();
2759
2760 for (uint32_t idx = 0; idx < swapchainCount; idx++) {
2761 Swapchain* swapchain = SwapchainFromHandle(pSwapchains[idx]);
2762 if (!swapchain)
2763 continue;
2764
2765 if (swapchain->surface.swapchain_handle != pSwapchains[idx]) continue;
2766
2767 ANativeWindow* window = swapchain->surface.window.get();
2768
2769 VkHdrMetadataEXT vulkanMetadata = pHdrMetadataEXTs[idx];
2770 const android_smpte2086_metadata smpteMetdata = {
2771 {vulkanMetadata.displayPrimaryRed.x,
2772 vulkanMetadata.displayPrimaryRed.y},
2773 {vulkanMetadata.displayPrimaryGreen.x,
2774 vulkanMetadata.displayPrimaryGreen.y},
2775 {vulkanMetadata.displayPrimaryBlue.x,
2776 vulkanMetadata.displayPrimaryBlue.y},
2777 {vulkanMetadata.whitePoint.x, vulkanMetadata.whitePoint.y},
2778 vulkanMetadata.maxLuminance,
2779 vulkanMetadata.minLuminance};
2780 native_window_set_buffers_smpte2086_metadata(window, &smpteMetdata);
2781
2782 const android_cta861_3_metadata cta8613Metadata = {
2783 vulkanMetadata.maxContentLightLevel,
2784 vulkanMetadata.maxFrameAverageLightLevel};
2785 native_window_set_buffers_cta861_3_metadata(window, &cta8613Metadata);
2786 }
2787
2788 return;
2789 }
2790
InterceptBindImageMemory2(uint32_t bind_info_count,const VkBindImageMemoryInfo * bind_infos,std::vector<VkNativeBufferANDROID> * out_native_buffers,std::vector<VkBindImageMemoryInfo> * out_bind_infos)2791 static void InterceptBindImageMemory2(
2792 uint32_t bind_info_count,
2793 const VkBindImageMemoryInfo* bind_infos,
2794 std::vector<VkNativeBufferANDROID>* out_native_buffers,
2795 std::vector<VkBindImageMemoryInfo>* out_bind_infos) {
2796 out_native_buffers->clear();
2797 out_bind_infos->clear();
2798
2799 if (!bind_info_count)
2800 return;
2801
2802 std::unordered_set<uint32_t> intercepted_indexes;
2803
2804 for (uint32_t idx = 0; idx < bind_info_count; idx++) {
2805 auto info = reinterpret_cast<const VkBindImageMemorySwapchainInfoKHR*>(
2806 bind_infos[idx].pNext);
2807 while (info &&
2808 info->sType !=
2809 VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR) {
2810 info = reinterpret_cast<const VkBindImageMemorySwapchainInfoKHR*>(
2811 info->pNext);
2812 }
2813
2814 if (!info)
2815 continue;
2816
2817 ALOG_ASSERT(info->swapchain != VK_NULL_HANDLE,
2818 "swapchain handle must not be NULL");
2819 const Swapchain* swapchain = SwapchainFromHandle(info->swapchain);
2820 ALOG_ASSERT(
2821 info->imageIndex < swapchain->num_images,
2822 "imageIndex must be less than the number of images in swapchain");
2823
2824 ANativeWindowBuffer* buffer =
2825 swapchain->images[info->imageIndex].buffer.get();
2826 VkNativeBufferANDROID native_buffer = {
2827 #pragma clang diagnostic push
2828 #pragma clang diagnostic ignored "-Wold-style-cast"
2829 .sType = VK_STRUCTURE_TYPE_NATIVE_BUFFER_ANDROID,
2830 #pragma clang diagnostic pop
2831 .pNext = bind_infos[idx].pNext,
2832 .handle = buffer->handle,
2833 .stride = buffer->stride,
2834 .format = buffer->format,
2835 .usage = int(buffer->usage),
2836 .usage3 = buffer->usage,
2837 .ahb = ANativeWindowBuffer_getHardwareBuffer(buffer),
2838 };
2839 android_convertGralloc0To1Usage(int(buffer->usage),
2840 &native_buffer.usage2.producer,
2841 &native_buffer.usage2.consumer);
2842 // Reserve enough space to avoid letting re-allocation invalidate the
2843 // addresses of the elements inside.
2844 out_native_buffers->reserve(bind_info_count);
2845 out_native_buffers->emplace_back(native_buffer);
2846
2847 // Reserve the space now since we know how much is needed now.
2848 out_bind_infos->reserve(bind_info_count);
2849 out_bind_infos->emplace_back(bind_infos[idx]);
2850 out_bind_infos->back().pNext = &out_native_buffers->back();
2851
2852 intercepted_indexes.insert(idx);
2853 }
2854
2855 if (intercepted_indexes.empty())
2856 return;
2857
2858 for (uint32_t idx = 0; idx < bind_info_count; idx++) {
2859 if (intercepted_indexes.count(idx))
2860 continue;
2861 out_bind_infos->emplace_back(bind_infos[idx]);
2862 }
2863 }
2864
2865 VKAPI_ATTR
BindImageMemory2(VkDevice device,uint32_t bindInfoCount,const VkBindImageMemoryInfo * pBindInfos)2866 VkResult BindImageMemory2(VkDevice device,
2867 uint32_t bindInfoCount,
2868 const VkBindImageMemoryInfo* pBindInfos) {
2869 ATRACE_CALL();
2870
2871 // out_native_buffers is for maintaining the lifecycle of the constructed
2872 // VkNativeBufferANDROID objects inside InterceptBindImageMemory2.
2873 std::vector<VkNativeBufferANDROID> out_native_buffers;
2874 std::vector<VkBindImageMemoryInfo> out_bind_infos;
2875 InterceptBindImageMemory2(bindInfoCount, pBindInfos, &out_native_buffers,
2876 &out_bind_infos);
2877 return GetData(device).driver.BindImageMemory2(
2878 device, bindInfoCount,
2879 out_bind_infos.empty() ? pBindInfos : out_bind_infos.data());
2880 }
2881
2882 VKAPI_ATTR
BindImageMemory2KHR(VkDevice device,uint32_t bindInfoCount,const VkBindImageMemoryInfo * pBindInfos)2883 VkResult BindImageMemory2KHR(VkDevice device,
2884 uint32_t bindInfoCount,
2885 const VkBindImageMemoryInfo* pBindInfos) {
2886 ATRACE_CALL();
2887
2888 std::vector<VkNativeBufferANDROID> out_native_buffers;
2889 std::vector<VkBindImageMemoryInfo> out_bind_infos;
2890 InterceptBindImageMemory2(bindInfoCount, pBindInfos, &out_native_buffers,
2891 &out_bind_infos);
2892 return GetData(device).driver.BindImageMemory2KHR(
2893 device, bindInfoCount,
2894 out_bind_infos.empty() ? pBindInfos : out_bind_infos.data());
2895 }
2896
2897 VKAPI_ATTR
ReleaseSwapchainImagesEXT(VkDevice,const VkReleaseSwapchainImagesInfoEXT * pReleaseInfo)2898 VkResult ReleaseSwapchainImagesEXT(VkDevice /*device*/,
2899 const VkReleaseSwapchainImagesInfoEXT* pReleaseInfo) {
2900 ATRACE_CALL();
2901
2902 Swapchain& swapchain = *SwapchainFromHandle(pReleaseInfo->swapchain);
2903 ANativeWindow* window = swapchain.surface.window.get();
2904
2905 // If in shared present mode, don't actually release the image back to the BQ.
2906 // Both sides share it forever.
2907 if (swapchain.shared)
2908 return VK_SUCCESS;
2909
2910 for (uint32_t i = 0; i < pReleaseInfo->imageIndexCount; i++) {
2911 Swapchain::Image& img = swapchain.images[pReleaseInfo->pImageIndices[i]];
2912 window->cancelBuffer(window, img.buffer.get(), img.dequeue_fence);
2913
2914 // cancelBuffer has taken ownership of the dequeue fence
2915 img.dequeue_fence = -1;
2916 // if we're still holding a release fence, get rid of it now
2917 if (img.release_fence >= 0) {
2918 close(img.release_fence);
2919 img.release_fence = -1;
2920 }
2921 img.dequeued = false;
2922 }
2923
2924 return VK_SUCCESS;
2925 }
2926
2927 } // namespace driver
2928 } // namespace vulkan
2929