1 // Copyright 2019 The Android Open Source Project
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14
15 #include <cstdarg>
16 #include <cstdint>
17
18 #include "FrameBuffer.h"
19 #include "GfxStreamAgents.h"
20 #include "VirtioGpuFrontend.h"
21 #include "aemu/base/Metrics.h"
22 #include "aemu/base/system/System.h"
23 #include "gfxstream/Strings.h"
24 #include "gfxstream/host/Features.h"
25 #include "gfxstream/host/Tracing.h"
26 #include "host-common/FeatureControl.h"
27 #include "host-common/GraphicsAgentFactory.h"
28 #include "host-common/android_pipe_common.h"
29 #include "host-common/android_pipe_device.h"
30 #include "host-common/globals.h"
31 #include "host-common/opengles-pipe.h"
32 #include "host-common/opengles.h"
33 #include "host-common/refcount-pipe.h"
34 #include "host-common/vm_operations.h"
35 #include "vulkan/VulkanDispatch.h"
36 #include "render-utils/RenderLib.h"
37 #include "vk_util.h"
38
39 extern "C" {
40 #include "gfxstream/virtio-gpu-gfxstream-renderer-unstable.h"
41 #include "gfxstream/virtio-gpu-gfxstream-renderer.h"
42 #include "host-common/goldfish_pipe.h"
43 } // extern "C"
44
45 #define MAX_DEBUG_BUFFER_SIZE 512
46 #define ELLIPSIS "...\0"
47 #define ELLIPSIS_LEN 4
48
49 // Define the typedef for emulogger
50 typedef void (*emulogger)(char severity, const char* file, unsigned int line,
51 int64_t timestamp_us, const char* message);
52
53 // Template to enable the method call if gfxstream_logger_t equals emulogger
54 template<typename T>
55 typename std::enable_if<std::is_same<T, emulogger>::value, bool>::type
call_logger_if_valid(T logger,char severity,const char * file,unsigned int line,int64_t timestamp_us,const char * message)56 call_logger_if_valid(T logger, char severity, const char* file, unsigned int line, int64_t timestamp_us, const char* message) {
57 // Call the logger and return true if the type matches
58 if (!logger) { return false; }
59 logger(severity, file, line, timestamp_us, message);
60 return true;
61 }
62
63 // Template for invalid logger types (returns false if types don't match)
64 template<typename T>
65 typename std::enable_if<!std::is_same<T, emulogger>::value, bool>::type
call_logger_if_valid(T,char,const char *,unsigned int,int64_t,const char *)66 call_logger_if_valid(T, char, const char*, unsigned int, int64_t, const char*) {
67 // Return false if the type doesn't match
68 return false;
69 }
70
71 void* globalUserData = nullptr;
72 stream_renderer_debug_callback globalDebugCallback = nullptr;
73
append_truncation_marker(char * buf,int remaining_size)74 static void append_truncation_marker(char* buf, int remaining_size) {
75 // Safely append truncation marker "..." if buffer has enough space
76 if (remaining_size >= ELLIPSIS_LEN) {
77 strncpy(buf + remaining_size - ELLIPSIS_LEN, ELLIPSIS, ELLIPSIS_LEN);
78 } else if (remaining_size >= 1) {
79 buf[remaining_size - 1] = '\0';
80 } else {
81 // Oh oh.. In theory this shouldn't happen.
82 assert(false);
83 }
84 }
85
log_with_prefix(char * & buf,int & remaining_size,const char * file,int line,const char * pretty_function)86 static void log_with_prefix(char*& buf, int& remaining_size, const char* file, int line,
87 const char* pretty_function) {
88 // Add logging prefix if necessary
89 int formatted_len = snprintf(buf, remaining_size, "[%s(%d)] %s ", file, line, pretty_function);
90
91 // Handle potential truncation
92 if (formatted_len >= remaining_size) {
93 append_truncation_marker(buf, remaining_size);
94 remaining_size = 0;
95 } else {
96 buf += formatted_len; // Adjust buf
97 remaining_size -= formatted_len; // Reduce remaining buffer size
98 }
99 }
100
translate_severity(uint32_t type)101 static char translate_severity(uint32_t type) {
102 switch (type) {
103 case STREAM_RENDERER_DEBUG_ERROR:
104 return 'E';
105 case STREAM_RENDERER_DEBUG_WARN:
106 return 'W';
107 case STREAM_RENDERER_DEBUG_INFO:
108 return 'I';
109 case STREAM_RENDERER_DEBUG_DEBUG:
110 return 'D';
111 default:
112 return 'D';
113 }
114 }
115
116 using android::AndroidPipe;
117 using android::base::ManagedDescriptor;
118 using android::base::MetricsLogger;
119 using gfxstream::host::VirtioGpuFrontend;
120
sFrontend()121 static VirtioGpuFrontend* sFrontend() {
122 static VirtioGpuFrontend* p = new VirtioGpuFrontend;
123 return p;
124 }
125
126 extern "C" {
127
stream_renderer_log(uint32_t type,const char * file,int line,const char * pretty_function,const char * format,...)128 void stream_renderer_log(uint32_t type, const char* file, int line, const char* pretty_function,
129 const char* format, ...) {
130
131 char printbuf[MAX_DEBUG_BUFFER_SIZE];
132 char* buf = printbuf;
133 int remaining_size = MAX_DEBUG_BUFFER_SIZE;
134 static_assert(MAX_DEBUG_BUFFER_SIZE > 4);
135
136 // Add the logging prefix if needed
137 #ifdef CONFIG_AEMU
138 static gfxstream_logger_t gfx_logger = get_gfx_stream_logger();
139 if (!gfx_logger) {
140 log_with_prefix(buf, remaining_size, file, line, pretty_function);
141 }
142 #else
143 log_with_prefix(buf, remaining_size, file, line, pretty_function);
144 #endif
145
146 // Format the message with variable arguments
147 va_list args;
148 va_start(args, format);
149 int formatted_len = vsnprintf(buf, remaining_size, format, args);
150 va_end(args);
151
152 // Handle potential truncation
153 if (formatted_len >= remaining_size) {
154 append_truncation_marker(buf, remaining_size);
155 }
156
157 #ifdef CONFIG_AEMU
158 // Forward to emulator?
159 if (call_logger_if_valid(gfx_logger, translate_severity(type), file, line, 0, printbuf)) {
160 return;
161 }
162 #endif
163
164 // To a gfxstream debugger?
165 if (globalUserData && globalDebugCallback) {
166 struct stream_renderer_debug debug = {0};
167 debug.debug_type = type;
168 debug.message = &printbuf[0];
169 globalDebugCallback(globalUserData, &debug);
170 } else {
171 // Cannot use logging routines, fallback to stderr
172 fprintf(stderr, "stream_renderer_log error: %s\n", printbuf);
173 }
174 }
175
stream_renderer_resource_create(struct stream_renderer_resource_create_args * args,struct iovec * iov,uint32_t num_iovs)176 VG_EXPORT int stream_renderer_resource_create(struct stream_renderer_resource_create_args* args,
177 struct iovec* iov, uint32_t num_iovs) {
178 GFXSTREAM_TRACE_EVENT(GFXSTREAM_TRACE_STREAM_RENDERER_CATEGORY,
179 "stream_renderer_resource_create()");
180
181 return sFrontend()->createResource(args, iov, num_iovs);
182 }
183
stream_renderer_resource_unref(uint32_t res_handle)184 VG_EXPORT void stream_renderer_resource_unref(uint32_t res_handle) {
185 GFXSTREAM_TRACE_EVENT(GFXSTREAM_TRACE_STREAM_RENDERER_CATEGORY,
186 "stream_renderer_resource_unref()");
187
188 sFrontend()->unrefResource(res_handle);
189 }
190
stream_renderer_context_destroy(uint32_t handle)191 VG_EXPORT void stream_renderer_context_destroy(uint32_t handle) {
192 GFXSTREAM_TRACE_EVENT(GFXSTREAM_TRACE_STREAM_RENDERER_CATEGORY,
193 "stream_renderer_context_destroy()");
194
195 sFrontend()->destroyContext(handle);
196 }
197
stream_renderer_submit_cmd(struct stream_renderer_command * cmd)198 VG_EXPORT int stream_renderer_submit_cmd(struct stream_renderer_command* cmd) {
199 GFXSTREAM_TRACE_EVENT(GFXSTREAM_TRACE_STREAM_RENDERER_CATEGORY, "stream_renderer_submit_cmd()");
200
201 return sFrontend()->submitCmd(cmd);
202 }
203
stream_renderer_transfer_read_iov(uint32_t handle,uint32_t ctx_id,uint32_t level,uint32_t stride,uint32_t layer_stride,struct stream_renderer_box * box,uint64_t offset,struct iovec * iov,int iovec_cnt)204 VG_EXPORT int stream_renderer_transfer_read_iov(uint32_t handle, uint32_t ctx_id, uint32_t level,
205 uint32_t stride, uint32_t layer_stride,
206 struct stream_renderer_box* box, uint64_t offset,
207 struct iovec* iov, int iovec_cnt) {
208 GFXSTREAM_TRACE_EVENT(GFXSTREAM_TRACE_STREAM_RENDERER_CATEGORY,
209 "stream_renderer_transfer_read_iov()");
210
211 return sFrontend()->transferReadIov(handle, offset, box, iov, iovec_cnt);
212 }
213
stream_renderer_transfer_write_iov(uint32_t handle,uint32_t ctx_id,int level,uint32_t stride,uint32_t layer_stride,struct stream_renderer_box * box,uint64_t offset,struct iovec * iovec,unsigned int iovec_cnt)214 VG_EXPORT int stream_renderer_transfer_write_iov(uint32_t handle, uint32_t ctx_id, int level,
215 uint32_t stride, uint32_t layer_stride,
216 struct stream_renderer_box* box, uint64_t offset,
217 struct iovec* iovec, unsigned int iovec_cnt) {
218 GFXSTREAM_TRACE_EVENT(GFXSTREAM_TRACE_STREAM_RENDERER_CATEGORY,
219 "stream_renderer_transfer_write_iov()");
220
221 return sFrontend()->transferWriteIov(handle, offset, box, iovec, iovec_cnt);
222 }
223
stream_renderer_get_cap_set(uint32_t set,uint32_t * max_ver,uint32_t * max_size)224 VG_EXPORT void stream_renderer_get_cap_set(uint32_t set, uint32_t* max_ver, uint32_t* max_size) {
225 GFXSTREAM_TRACE_EVENT(GFXSTREAM_TRACE_STREAM_RENDERER_CATEGORY,
226 "stream_renderer_get_cap_set()");
227
228 GFXSTREAM_TRACE_NAME_TRACK(GFXSTREAM_TRACE_TRACK_FOR_CURRENT_THREAD(),
229 "Main Virtio Gpu Thread");
230
231 // `max_ver` not useful
232 return sFrontend()->getCapset(set, max_size);
233 }
234
stream_renderer_fill_caps(uint32_t set,uint32_t version,void * caps)235 VG_EXPORT void stream_renderer_fill_caps(uint32_t set, uint32_t version, void* caps) {
236 GFXSTREAM_TRACE_EVENT(GFXSTREAM_TRACE_STREAM_RENDERER_CATEGORY, "stream_renderer_fill_caps()");
237
238 // `version` not useful
239 return sFrontend()->fillCaps(set, caps);
240 }
241
stream_renderer_resource_attach_iov(int res_handle,struct iovec * iov,int num_iovs)242 VG_EXPORT int stream_renderer_resource_attach_iov(int res_handle, struct iovec* iov, int num_iovs) {
243 GFXSTREAM_TRACE_EVENT(GFXSTREAM_TRACE_STREAM_RENDERER_CATEGORY,
244 "stream_renderer_resource_attach_iov()");
245
246 return sFrontend()->attachIov(res_handle, iov, num_iovs);
247 }
248
stream_renderer_resource_detach_iov(int res_handle,struct iovec ** iov,int * num_iovs)249 VG_EXPORT void stream_renderer_resource_detach_iov(int res_handle, struct iovec** iov,
250 int* num_iovs) {
251 GFXSTREAM_TRACE_EVENT(GFXSTREAM_TRACE_STREAM_RENDERER_CATEGORY,
252 "stream_renderer_resource_detach_iov()");
253
254 return sFrontend()->detachIov(res_handle);
255 }
256
stream_renderer_ctx_attach_resource(int ctx_id,int res_handle)257 VG_EXPORT void stream_renderer_ctx_attach_resource(int ctx_id, int res_handle) {
258 GFXSTREAM_TRACE_EVENT(GFXSTREAM_TRACE_STREAM_RENDERER_CATEGORY,
259 "stream_renderer_ctx_attach_resource()");
260
261 sFrontend()->attachResource(ctx_id, res_handle);
262 }
263
stream_renderer_ctx_detach_resource(int ctx_id,int res_handle)264 VG_EXPORT void stream_renderer_ctx_detach_resource(int ctx_id, int res_handle) {
265 GFXSTREAM_TRACE_EVENT(GFXSTREAM_TRACE_STREAM_RENDERER_CATEGORY,
266 "stream_renderer_ctx_detach_resource()");
267
268 sFrontend()->detachResource(ctx_id, res_handle);
269 }
270
stream_renderer_resource_get_info(int res_handle,struct stream_renderer_resource_info * info)271 VG_EXPORT int stream_renderer_resource_get_info(int res_handle,
272 struct stream_renderer_resource_info* info) {
273 GFXSTREAM_TRACE_EVENT(GFXSTREAM_TRACE_STREAM_RENDERER_CATEGORY,
274 "stream_renderer_resource_get_info()");
275
276 return sFrontend()->getResourceInfo(res_handle, info);
277 }
278
stream_renderer_flush(uint32_t res_handle)279 VG_EXPORT void stream_renderer_flush(uint32_t res_handle) {
280 GFXSTREAM_TRACE_EVENT(GFXSTREAM_TRACE_STREAM_RENDERER_CATEGORY, "stream_renderer_flush()");
281
282 sFrontend()->flushResource(res_handle);
283 }
284
stream_renderer_create_blob(uint32_t ctx_id,uint32_t res_handle,const struct stream_renderer_create_blob * create_blob,const struct iovec * iovecs,uint32_t num_iovs,const struct stream_renderer_handle * handle)285 VG_EXPORT int stream_renderer_create_blob(uint32_t ctx_id, uint32_t res_handle,
286 const struct stream_renderer_create_blob* create_blob,
287 const struct iovec* iovecs, uint32_t num_iovs,
288 const struct stream_renderer_handle* handle) {
289 GFXSTREAM_TRACE_EVENT(GFXSTREAM_TRACE_STREAM_RENDERER_CATEGORY,
290 "stream_renderer_create_blob()");
291
292 sFrontend()->createBlob(ctx_id, res_handle, create_blob, handle);
293 return 0;
294 }
295
stream_renderer_export_blob(uint32_t res_handle,struct stream_renderer_handle * handle)296 VG_EXPORT int stream_renderer_export_blob(uint32_t res_handle,
297 struct stream_renderer_handle* handle) {
298 GFXSTREAM_TRACE_EVENT(GFXSTREAM_TRACE_STREAM_RENDERER_CATEGORY,
299 "stream_renderer_export_blob()");
300
301 return sFrontend()->exportBlob(res_handle, handle);
302 }
303
stream_renderer_resource_map(uint32_t res_handle,void ** hvaOut,uint64_t * sizeOut)304 VG_EXPORT int stream_renderer_resource_map(uint32_t res_handle, void** hvaOut, uint64_t* sizeOut) {
305 GFXSTREAM_TRACE_EVENT(GFXSTREAM_TRACE_STREAM_RENDERER_CATEGORY,
306 "stream_renderer_resource_map()");
307
308 return sFrontend()->resourceMap(res_handle, hvaOut, sizeOut);
309 }
310
stream_renderer_resource_unmap(uint32_t res_handle)311 VG_EXPORT int stream_renderer_resource_unmap(uint32_t res_handle) {
312 GFXSTREAM_TRACE_EVENT(GFXSTREAM_TRACE_STREAM_RENDERER_CATEGORY,
313 "stream_renderer_resource_unmap()");
314
315 return sFrontend()->resourceUnmap(res_handle);
316 }
317
stream_renderer_context_create(uint32_t ctx_id,uint32_t nlen,const char * name,uint32_t context_init)318 VG_EXPORT int stream_renderer_context_create(uint32_t ctx_id, uint32_t nlen, const char* name,
319 uint32_t context_init) {
320 GFXSTREAM_TRACE_EVENT(GFXSTREAM_TRACE_STREAM_RENDERER_CATEGORY,
321 "stream_renderer_context_create()");
322
323 return sFrontend()->createContext(ctx_id, nlen, name, context_init);
324 }
325
stream_renderer_create_fence(const struct stream_renderer_fence * fence)326 VG_EXPORT int stream_renderer_create_fence(const struct stream_renderer_fence* fence) {
327 GFXSTREAM_TRACE_EVENT(GFXSTREAM_TRACE_STREAM_RENDERER_CATEGORY,
328 "stream_renderer_create_fence()");
329
330 if (fence->flags & STREAM_RENDERER_FLAG_FENCE_SHAREABLE) {
331 int ret = sFrontend()->acquireContextFence(fence->ctx_id, fence->fence_id);
332 if (ret) {
333 return ret;
334 }
335 }
336
337 if (fence->flags & STREAM_RENDERER_FLAG_FENCE_RING_IDX) {
338 sFrontend()->createFence(fence->fence_id, VirtioGpuRingContextSpecific{
339 .mCtxId = fence->ctx_id,
340 .mRingIdx = fence->ring_idx,
341 });
342 } else {
343 sFrontend()->createFence(fence->fence_id, VirtioGpuRingGlobal{});
344 }
345
346 return 0;
347 }
348
stream_renderer_export_fence(uint64_t fence_id,struct stream_renderer_handle * handle)349 VG_EXPORT int stream_renderer_export_fence(uint64_t fence_id,
350 struct stream_renderer_handle* handle) {
351 GFXSTREAM_TRACE_EVENT(GFXSTREAM_TRACE_STREAM_RENDERER_CATEGORY,
352 "stream_renderer_export_fence()");
353
354 return sFrontend()->exportFence(fence_id, handle);
355 }
356
stream_renderer_platform_import_resource(int res_handle,int res_info,void * resource)357 VG_EXPORT int stream_renderer_platform_import_resource(int res_handle, int res_info,
358 void* resource) {
359 GFXSTREAM_TRACE_EVENT(GFXSTREAM_TRACE_STREAM_RENDERER_CATEGORY,
360 "stream_renderer_platform_import_resource()");
361
362 return sFrontend()->platformImportResource(res_handle, res_info, resource);
363 }
364
stream_renderer_platform_create_shared_egl_context()365 VG_EXPORT void* stream_renderer_platform_create_shared_egl_context() {
366 GFXSTREAM_TRACE_EVENT(GFXSTREAM_TRACE_STREAM_RENDERER_CATEGORY,
367 "stream_renderer_platform_create_shared_egl_context()");
368
369 return sFrontend()->platformCreateSharedEglContext();
370 }
371
stream_renderer_platform_destroy_shared_egl_context(void * context)372 VG_EXPORT int stream_renderer_platform_destroy_shared_egl_context(void* context) {
373 GFXSTREAM_TRACE_EVENT(GFXSTREAM_TRACE_STREAM_RENDERER_CATEGORY,
374 "stream_renderer_platform_destroy_shared_egl_context()");
375
376 return sFrontend()->platformDestroySharedEglContext(context);
377 }
378
stream_renderer_wait_sync_resource(uint32_t res_handle)379 VG_EXPORT int stream_renderer_wait_sync_resource(uint32_t res_handle) {
380 GFXSTREAM_TRACE_EVENT(GFXSTREAM_TRACE_STREAM_RENDERER_CATEGORY,
381 "stream_renderer_wait_sync_resource()");
382
383 return sFrontend()->waitSyncResource(res_handle);
384 }
385
stream_renderer_resource_map_info(uint32_t res_handle,uint32_t * map_info)386 VG_EXPORT int stream_renderer_resource_map_info(uint32_t res_handle, uint32_t* map_info) {
387 GFXSTREAM_TRACE_EVENT(GFXSTREAM_TRACE_STREAM_RENDERER_CATEGORY,
388 "stream_renderer_resource_map_info()");
389
390 return sFrontend()->resourceMapInfo(res_handle, map_info);
391 }
392
stream_renderer_vulkan_info(uint32_t res_handle,struct stream_renderer_vulkan_info * vulkan_info)393 VG_EXPORT int stream_renderer_vulkan_info(uint32_t res_handle,
394 struct stream_renderer_vulkan_info* vulkan_info) {
395 GFXSTREAM_TRACE_EVENT(GFXSTREAM_TRACE_STREAM_RENDERER_CATEGORY,
396 "stream_renderer_vulkan_info()");
397
398 return sFrontend()->vulkanInfo(res_handle, vulkan_info);
399 }
400
stream_renderer_suspend()401 VG_EXPORT int stream_renderer_suspend() {
402 GFXSTREAM_TRACE_EVENT(GFXSTREAM_TRACE_STREAM_RENDERER_CATEGORY, "stream_renderer_suspend()");
403
404 // TODO: move pauseAllPreSave() here after kumquat updated.
405
406 return 0;
407 }
408
stream_renderer_snapshot(const char * dir)409 VG_EXPORT int stream_renderer_snapshot(const char* dir) {
410 GFXSTREAM_TRACE_EVENT(GFXSTREAM_TRACE_STREAM_RENDERER_CATEGORY, "stream_renderer_snapshot()");
411
412 #ifdef GFXSTREAM_BUILD_WITH_SNAPSHOT_FRONTEND_SUPPORT
413 return sFrontend()->snapshot(dir);
414 #else
415 stream_renderer_error("Snapshot save requested without support.");
416 return -EINVAL;
417 #endif
418 }
419
stream_renderer_restore(const char * dir)420 VG_EXPORT int stream_renderer_restore(const char* dir) {
421 GFXSTREAM_TRACE_EVENT(GFXSTREAM_TRACE_STREAM_RENDERER_CATEGORY, "stream_renderer_restore()");
422
423 #ifdef GFXSTREAM_BUILD_WITH_SNAPSHOT_FRONTEND_SUPPORT
424 return sFrontend()->restore(dir);
425 #else
426 stream_renderer_error("Snapshot save requested without support.");
427 return -EINVAL;
428 #endif
429 }
430
stream_renderer_resume()431 VG_EXPORT int stream_renderer_resume() {
432 GFXSTREAM_TRACE_EVENT(GFXSTREAM_TRACE_STREAM_RENDERER_CATEGORY, "stream_renderer_resume()");
433
434 // TODO: move resumeAll() here after kumquat updated.
435
436 return 0;
437 }
438
439 static const GoldfishPipeServiceOps goldfish_pipe_service_ops = {
440 // guest_open()
__anon494716740102() 441 [](GoldfishHwPipe* hwPipe) -> GoldfishHostPipe* {
442 return static_cast<GoldfishHostPipe*>(android_pipe_guest_open(hwPipe));
443 },
444 // guest_open_with_flags()
__anon494716740202() 445 [](GoldfishHwPipe* hwPipe, uint32_t flags) -> GoldfishHostPipe* {
446 return static_cast<GoldfishHostPipe*>(android_pipe_guest_open_with_flags(hwPipe, flags));
447 },
448 // guest_close()
__anon494716740302() 449 [](GoldfishHostPipe* hostPipe, GoldfishPipeCloseReason reason) {
450 static_assert((int)GOLDFISH_PIPE_CLOSE_GRACEFUL == (int)PIPE_CLOSE_GRACEFUL,
451 "Invalid PIPE_CLOSE_GRACEFUL value");
452 static_assert((int)GOLDFISH_PIPE_CLOSE_REBOOT == (int)PIPE_CLOSE_REBOOT,
453 "Invalid PIPE_CLOSE_REBOOT value");
454 static_assert((int)GOLDFISH_PIPE_CLOSE_LOAD_SNAPSHOT == (int)PIPE_CLOSE_LOAD_SNAPSHOT,
455 "Invalid PIPE_CLOSE_LOAD_SNAPSHOT value");
456 static_assert((int)GOLDFISH_PIPE_CLOSE_ERROR == (int)PIPE_CLOSE_ERROR,
457 "Invalid PIPE_CLOSE_ERROR value");
458
459 android_pipe_guest_close(hostPipe, static_cast<PipeCloseReason>(reason));
460 },
461 // guest_pre_load()
__anon494716740402() 462 [](QEMUFile* file) { (void)file; },
463 // guest_post_load()
__anon494716740502() 464 [](QEMUFile* file) { (void)file; },
465 // guest_pre_save()
__anon494716740602() 466 [](QEMUFile* file) { (void)file; },
467 // guest_post_save()
__anon494716740702() 468 [](QEMUFile* file) { (void)file; },
469 // guest_load()
__anon494716740802() 470 [](QEMUFile* file, GoldfishHwPipe* hwPipe, char* force_close) -> GoldfishHostPipe* {
471 (void)file;
472 (void)hwPipe;
473 (void)force_close;
474 return nullptr;
475 },
476 // guest_save()
__anon494716740902() 477 [](GoldfishHostPipe* hostPipe, QEMUFile* file) {
478 (void)hostPipe;
479 (void)file;
480 },
481 // guest_poll()
__anon494716740a02() 482 [](GoldfishHostPipe* hostPipe) {
483 static_assert((int)GOLDFISH_PIPE_POLL_IN == (int)PIPE_POLL_IN, "invalid POLL_IN values");
484 static_assert((int)GOLDFISH_PIPE_POLL_OUT == (int)PIPE_POLL_OUT, "invalid POLL_OUT values");
485 static_assert((int)GOLDFISH_PIPE_POLL_HUP == (int)PIPE_POLL_HUP, "invalid POLL_HUP values");
486
487 return static_cast<GoldfishPipePollFlags>(android_pipe_guest_poll(hostPipe));
488 },
489 // guest_recv()
__anon494716740b02() 490 [](GoldfishHostPipe* hostPipe, GoldfishPipeBuffer* buffers, int numBuffers) -> int {
491 // NOTE: Assumes that AndroidPipeBuffer and GoldfishPipeBuffer
492 // have exactly the same layout.
493 static_assert(sizeof(AndroidPipeBuffer) == sizeof(GoldfishPipeBuffer),
494 "Invalid PipeBuffer sizes");
495 // We can't use a static_assert with offsetof() because in msvc, it uses
496 // reinterpret_cast.
497 // TODO: Add runtime assertion instead?
498 // https://developercommunity.visualstudio.com/content/problem/22196/static-assert-cannot-compile-constexprs-method-tha.html
499 #ifndef _MSC_VER
500 static_assert(offsetof(AndroidPipeBuffer, data) == offsetof(GoldfishPipeBuffer, data),
501 "Invalid PipeBuffer::data offsets");
502 static_assert(offsetof(AndroidPipeBuffer, size) == offsetof(GoldfishPipeBuffer, size),
503 "Invalid PipeBuffer::size offsets");
504 #endif
505 return android_pipe_guest_recv(hostPipe, reinterpret_cast<AndroidPipeBuffer*>(buffers),
506 numBuffers);
507 },
508 // wait_guest_recv()
__anon494716740c02() 509 [](GoldfishHostPipe* hostPipe) { android_pipe_wait_guest_recv(hostPipe); },
510 // guest_send()
__anon494716740d02() 511 [](GoldfishHostPipe** hostPipe, const GoldfishPipeBuffer* buffers, int numBuffers) -> int {
512 return android_pipe_guest_send(reinterpret_cast<void**>(hostPipe),
513 reinterpret_cast<const AndroidPipeBuffer*>(buffers),
514 numBuffers);
515 },
516 // wait_guest_send()
__anon494716740e02() 517 [](GoldfishHostPipe* hostPipe) { android_pipe_wait_guest_send(hostPipe); },
518 // guest_wake_on()
__anon494716740f02() 519 [](GoldfishHostPipe* hostPipe, GoldfishPipeWakeFlags wakeFlags) {
520 android_pipe_guest_wake_on(hostPipe, static_cast<int>(wakeFlags));
521 },
522 // dma_add_buffer()
__anon494716741002() 523 [](void* pipe, uint64_t paddr, uint64_t sz) {
524 // not considered for virtio
525 },
526 // dma_remove_buffer()
__anon494716741102() 527 [](uint64_t paddr) {
528 // not considered for virtio
529 },
530 // dma_invalidate_host_mappings()
__anon494716741202() 531 []() {
532 // not considered for virtio
533 },
534 // dma_reset_host_mappings()
__anon494716741302() 535 []() {
536 // not considered for virtio
537 },
538 // dma_save_mappings()
__anon494716741402() 539 [](QEMUFile* file) { (void)file; },
540 // dma_load_mappings()
__anon494716741502() 541 [](QEMUFile* file) { (void)file; },
542 };
543
stream_renderer_opengles_init(uint32_t display_width,uint32_t display_height,int renderer_flags,gfxstream::host::FeatureSet features)544 static int stream_renderer_opengles_init(uint32_t display_width, uint32_t display_height,
545 int renderer_flags, gfxstream::host::FeatureSet features) {
546 stream_renderer_debug("start. display dimensions: width %u height %u, renderer flags: 0x%x",
547 display_width, display_height, renderer_flags);
548
549 // Flags processing
550
551 // TODO: hook up "gfxstream egl" to the renderer flags
552 // STREAM_RENDERER_FLAGS_USE_EGL_BIT in crosvm
553 // as it's specified from launch_cvd.
554 // At the moment, use ANDROID_GFXSTREAM_EGL=1
555 // For test on GCE
556 if (android::base::getEnvironmentVariable("ANDROID_GFXSTREAM_EGL") == "1") {
557 android::base::setEnvironmentVariable("ANDROID_EGL_ON_EGL", "1");
558 android::base::setEnvironmentVariable("ANDROID_EMUGL_LOG_PRINT", "1");
559 android::base::setEnvironmentVariable("ANDROID_EMUGL_VERBOSE", "1");
560 }
561 // end for test on GCE
562
563 android::base::setEnvironmentVariable("ANDROID_EMU_HEADLESS", "1");
564
565 bool egl2eglByEnv = android::base::getEnvironmentVariable("ANDROID_EGL_ON_EGL") == "1";
566 bool egl2eglByFlag = renderer_flags & STREAM_RENDERER_FLAGS_USE_EGL_BIT;
567 bool enable_egl2egl = egl2eglByFlag || egl2eglByEnv;
568 if (enable_egl2egl) {
569 android::base::setEnvironmentVariable("ANDROID_GFXSTREAM_EGL", "1");
570 android::base::setEnvironmentVariable("ANDROID_EGL_ON_EGL", "1");
571 }
572
573 bool surfaceless = renderer_flags & STREAM_RENDERER_FLAGS_USE_SURFACELESS_BIT;
574
575 android::featurecontrol::productFeatureOverride();
576
577 gfxstream::vk::vkDispatch(false /* don't use test ICD */);
578
579 auto androidHw = aemu_get_android_hw();
580
581 androidHw->hw_gltransport_asg_writeBufferSize = 1048576;
582 androidHw->hw_gltransport_asg_writeStepSize = 262144;
583 androidHw->hw_gltransport_asg_dataRingSize = 524288;
584 androidHw->hw_gltransport_drawFlushInterval = 10000;
585
586 EmuglConfig config;
587
588 // Make all the console agents available.
589 android::emulation::injectGraphicsAgents(android::emulation::GfxStreamGraphicsAgentFactory());
590
591 emuglConfig_init(&config, true /* gpu enabled */, "auto",
592 enable_egl2egl ? "swiftshader_indirect" : "host", 64, /* bitness */
593 surfaceless, /* no window */
594 false, /* blocklisted */
595 false, /* has guest renderer */
596 WINSYS_GLESBACKEND_PREFERENCE_AUTO, true /* force host gpu vulkan */);
597
598 emuglConfig_setupEnv(&config);
599
600 android_prepareOpenglesEmulation();
601
602 {
603 static gfxstream::RenderLibPtr renderLibPtr = gfxstream::initLibrary();
604 android_setOpenglesEmulation(renderLibPtr.get(), nullptr, nullptr);
605 }
606
607 int maj;
608 int min;
609 android_startOpenglesRenderer(display_width, display_height, 1, 28, getGraphicsAgents()->vm,
610 getGraphicsAgents()->emu, getGraphicsAgents()->multi_display,
611 &features, &maj, &min);
612
613 char* vendor = nullptr;
614 char* renderer = nullptr;
615 char* version = nullptr;
616
617 android_getOpenglesHardwareStrings(&vendor, &renderer, &version);
618
619 stream_renderer_info("GL strings; [%s] [%s] [%s].", vendor, renderer, version);
620
621 auto openglesRenderer = android_getOpenglesRenderer();
622
623 if (!openglesRenderer) {
624 stream_renderer_error("No renderer started, fatal");
625 return -EINVAL;
626 }
627
628 address_space_set_vm_operations(getGraphicsAgents()->vm);
629 android_init_opengles_pipe();
630 android_opengles_pipe_set_recv_mode(2 /* virtio-gpu */);
631 android_init_refcount_pipe();
632
633 return 0;
634 }
635
636 namespace {
637
parseGfxstreamFeatures(const int renderer_flags,const std::string & renderer_features,gfxstream::host::FeatureSet & features)638 int parseGfxstreamFeatures(const int renderer_flags,
639 const std::string& renderer_features,
640 gfxstream::host::FeatureSet& features) {
641 GFXSTREAM_SET_FEATURE_ON_CONDITION(
642 &features, ExternalBlob,
643 renderer_flags & STREAM_RENDERER_FLAGS_USE_EXTERNAL_BLOB);
644 GFXSTREAM_SET_FEATURE_ON_CONDITION(&features, VulkanExternalSync,
645 renderer_flags & STREAM_RENDERER_FLAGS_VULKAN_EXTERNAL_SYNC);
646 GFXSTREAM_SET_FEATURE_ON_CONDITION(
647 &features, GlAsyncSwap, false);
648 GFXSTREAM_SET_FEATURE_ON_CONDITION(
649 &features, GlDirectMem, false);
650 GFXSTREAM_SET_FEATURE_ON_CONDITION(
651 &features, GlDma, false);
652 GFXSTREAM_SET_FEATURE_ON_CONDITION(
653 &features, GlesDynamicVersion, true);
654 GFXSTREAM_SET_FEATURE_ON_CONDITION(
655 &features, GlPipeChecksum, false);
656 GFXSTREAM_SET_FEATURE_ON_CONDITION(
657 &features, GuestVulkanOnly,
658 (renderer_flags & STREAM_RENDERER_FLAGS_USE_VK_BIT) &&
659 !(renderer_flags & STREAM_RENDERER_FLAGS_USE_GLES_BIT));
660 GFXSTREAM_SET_FEATURE_ON_CONDITION(
661 &features, HostComposition, true);
662 GFXSTREAM_SET_FEATURE_ON_CONDITION(
663 &features, NativeTextureDecompression, false);
664 GFXSTREAM_SET_FEATURE_ON_CONDITION(
665 &features, NoDelayCloseColorBuffer, true);
666 GFXSTREAM_SET_FEATURE_ON_CONDITION(
667 &features, PlayStoreImage,
668 !(renderer_flags & STREAM_RENDERER_FLAGS_USE_GLES_BIT));
669 GFXSTREAM_SET_FEATURE_ON_CONDITION(
670 &features, RefCountPipe,
671 /*Resources are ref counted via guest file objects.*/ false);
672 GFXSTREAM_SET_FEATURE_ON_CONDITION(
673 &features, SystemBlob,
674 renderer_flags & STREAM_RENDERER_FLAGS_USE_SYSTEM_BLOB);
675 GFXSTREAM_SET_FEATURE_ON_CONDITION(
676 &features, VirtioGpuFenceContexts, true);
677 GFXSTREAM_SET_FEATURE_ON_CONDITION(
678 &features, VirtioGpuNativeSync, true);
679 GFXSTREAM_SET_FEATURE_ON_CONDITION(
680 &features, VirtioGpuNext, true);
681 GFXSTREAM_SET_FEATURE_ON_CONDITION(
682 &features, Vulkan,
683 renderer_flags & STREAM_RENDERER_FLAGS_USE_VK_BIT);
684 GFXSTREAM_SET_FEATURE_ON_CONDITION(
685 &features, VulkanBatchedDescriptorSetUpdate, true);
686 GFXSTREAM_SET_FEATURE_ON_CONDITION(
687 &features, VulkanIgnoredHandles, true);
688 GFXSTREAM_SET_FEATURE_ON_CONDITION(
689 &features, VulkanNativeSwapchain,
690 renderer_flags & STREAM_RENDERER_FLAGS_VULKAN_NATIVE_SWAPCHAIN_BIT);
691 GFXSTREAM_SET_FEATURE_ON_CONDITION(
692 &features, VulkanNullOptionalStrings, true);
693 GFXSTREAM_SET_FEATURE_ON_CONDITION(
694 &features, VulkanQueueSubmitWithCommands, true);
695 GFXSTREAM_SET_FEATURE_ON_CONDITION(
696 &features, VulkanShaderFloat16Int8, true);
697 GFXSTREAM_SET_FEATURE_ON_CONDITION(
698 &features, VulkanSnapshots,
699 android::base::getEnvironmentVariable("ANDROID_GFXSTREAM_CAPTURE_VK_SNAPSHOT") == "1");
700
701 for (const std::string& renderer_feature : gfxstream::Split(renderer_features, ",")) {
702 if (renderer_feature.empty()) continue;
703
704 const std::vector<std::string>& parts = gfxstream::Split(renderer_feature, ":");
705 if (parts.size() != 2) {
706 stream_renderer_error("Error: invalid renderer features: %s",
707 renderer_features.c_str());
708 return -EINVAL;
709 }
710
711 const std::string& feature_name = parts[0];
712
713 auto feature_it = features.map.find(feature_name);
714 if (feature_it == features.map.end()) {
715 stream_renderer_error("Error: invalid renderer feature: '%s'", feature_name.c_str());
716 return -EINVAL;
717 }
718
719 const std::string& feature_status = parts[1];
720 if (feature_status != "enabled" && feature_status != "disabled") {
721 stream_renderer_error("Error: invalid option %s for renderer feature: %s",
722 feature_status.c_str(), feature_name.c_str());
723 return -EINVAL;
724 }
725
726 auto& feature_info = feature_it->second;
727 feature_info->enabled = feature_status == "enabled";
728 feature_info->reason = "Overridden via STREAM_RENDERER_PARAM_RENDERER_FEATURES";
729
730 stream_renderer_error("Gfxstream feature %s %s", feature_name.c_str(),
731 feature_status.c_str());
732 }
733
734 if (features.SystemBlob.enabled) {
735 if (!features.ExternalBlob.enabled) {
736 stream_renderer_error("The SystemBlob features requires the ExternalBlob feature.");
737 return -EINVAL;
738 }
739 #ifndef _WIN32
740 stream_renderer_warn("Warning: USE_SYSTEM_BLOB has only been tested on Windows");
741 #endif
742 }
743 if (features.VulkanNativeSwapchain.enabled && !features.Vulkan.enabled) {
744 stream_renderer_error("can't enable vulkan native swapchain, Vulkan is disabled");
745 return -EINVAL;
746 }
747
748 return 0;
749 }
750
751 } // namespace
752
stream_renderer_init(struct stream_renderer_param * stream_renderer_params,uint64_t num_params)753 VG_EXPORT int stream_renderer_init(struct stream_renderer_param* stream_renderer_params,
754 uint64_t num_params) {
755 // Required parameters.
756 std::unordered_set<uint64_t> required_params{STREAM_RENDERER_PARAM_USER_DATA,
757 STREAM_RENDERER_PARAM_RENDERER_FLAGS,
758 STREAM_RENDERER_PARAM_FENCE_CALLBACK};
759
760 // String names of the parameters.
761 std::unordered_map<uint64_t, std::string> param_strings{
762 {STREAM_RENDERER_PARAM_USER_DATA, "USER_DATA"},
763 {STREAM_RENDERER_PARAM_RENDERER_FLAGS, "RENDERER_FLAGS"},
764 {STREAM_RENDERER_PARAM_FENCE_CALLBACK, "FENCE_CALLBACK"},
765 {STREAM_RENDERER_PARAM_WIN0_WIDTH, "WIN0_WIDTH"},
766 {STREAM_RENDERER_PARAM_WIN0_HEIGHT, "WIN0_HEIGHT"},
767 {STREAM_RENDERER_PARAM_DEBUG_CALLBACK, "DEBUG_CALLBACK"},
768 {STREAM_RENDERER_SKIP_OPENGLES_INIT, "SKIP_OPENGLES_INIT"},
769 {STREAM_RENDERER_PARAM_METRICS_CALLBACK_ADD_INSTANT_EVENT,
770 "METRICS_CALLBACK_ADD_INSTANT_EVENT"},
771 {STREAM_RENDERER_PARAM_METRICS_CALLBACK_ADD_INSTANT_EVENT_WITH_DESCRIPTOR,
772 "METRICS_CALLBACK_ADD_INSTANT_EVENT_WITH_DESCRIPTOR"},
773 {STREAM_RENDERER_PARAM_METRICS_CALLBACK_ADD_INSTANT_EVENT_WITH_METRIC,
774 "METRICS_CALLBACK_ADD_INSTANT_EVENT_WITH_METRIC"},
775 {STREAM_RENDERER_PARAM_METRICS_CALLBACK_ADD_VULKAN_OUT_OF_MEMORY_EVENT,
776 "METRICS_CALLBACK_ADD_VULKAN_OUT_OF_MEMORY_EVENT"},
777 {STREAM_RENDERER_PARAM_METRICS_CALLBACK_SET_ANNOTATION, "METRICS_CALLBACK_SET_ANNOTATION"},
778 {STREAM_RENDERER_PARAM_METRICS_CALLBACK_ABORT, "METRICS_CALLBACK_ABORT"}};
779
780 // Print full values for these parameters:
781 // Values here must not be pointers (e.g. callback functions), to avoid potentially identifying
782 // someone via ASLR. Pointers in ASLR are randomized on boot, which means pointers may be
783 // different between users but similar across a single user's sessions.
784 // As a convenience, any value <= 4096 is also printed, to catch small or null pointer errors.
785 std::unordered_set<uint64_t> printed_param_values{STREAM_RENDERER_PARAM_RENDERER_FLAGS,
786 STREAM_RENDERER_PARAM_WIN0_WIDTH,
787 STREAM_RENDERER_PARAM_WIN0_HEIGHT};
788
789 // We may have unknown parameters, so this function is lenient.
790 auto get_param_string = [&](uint64_t key) -> std::string {
791 auto param_string = param_strings.find(key);
792 if (param_string != param_strings.end()) {
793 return param_string->second;
794 } else {
795 return "Unknown param with key=" + std::to_string(key);
796 }
797 };
798
799 // Initialization data.
800 uint32_t display_width = 0;
801 uint32_t display_height = 0;
802 void* renderer_cookie = nullptr;
803 int renderer_flags = 0;
804 std::string renderer_features_str;
805 stream_renderer_fence_callback fence_callback = nullptr;
806 bool skip_opengles = false;
807
808 // Iterate all parameters that we support.
809 stream_renderer_debug("Reading stream renderer parameters:");
810 for (uint64_t i = 0; i < num_params; ++i) {
811 stream_renderer_param& param = stream_renderer_params[i];
812
813 // Print out parameter we are processing. See comment above `printed_param_values` before
814 // adding new prints.
815 if (printed_param_values.find(param.key) != printed_param_values.end() ||
816 param.value <= 4096) {
817 stream_renderer_debug("%s - %llu", get_param_string(param.key).c_str(),
818 static_cast<unsigned long long>(param.value));
819 } else {
820 // If not full value, print that it was passed.
821 stream_renderer_debug("%s", get_param_string(param.key).c_str());
822 }
823
824 // Removing every param we process will leave required_params empty if all provided.
825 required_params.erase(param.key);
826
827 switch (param.key) {
828 case STREAM_RENDERER_PARAM_NULL:
829 break;
830 case STREAM_RENDERER_PARAM_USER_DATA: {
831 renderer_cookie = reinterpret_cast<void*>(static_cast<uintptr_t>(param.value));
832 globalUserData = renderer_cookie;
833 break;
834 }
835 case STREAM_RENDERER_PARAM_RENDERER_FLAGS: {
836 renderer_flags = static_cast<int>(param.value);
837 break;
838 }
839 case STREAM_RENDERER_PARAM_FENCE_CALLBACK: {
840 fence_callback = reinterpret_cast<stream_renderer_fence_callback>(
841 static_cast<uintptr_t>(param.value));
842 break;
843 }
844 case STREAM_RENDERER_PARAM_WIN0_WIDTH: {
845 display_width = static_cast<uint32_t>(param.value);
846 break;
847 }
848 case STREAM_RENDERER_PARAM_WIN0_HEIGHT: {
849 display_height = static_cast<uint32_t>(param.value);
850 break;
851 }
852 case STREAM_RENDERER_PARAM_DEBUG_CALLBACK: {
853 globalDebugCallback = reinterpret_cast<stream_renderer_debug_callback>(
854 static_cast<uintptr_t>(param.value));
855 break;
856 }
857 case STREAM_RENDERER_SKIP_OPENGLES_INIT: {
858 skip_opengles = static_cast<bool>(param.value);
859 break;
860 }
861 case STREAM_RENDERER_PARAM_METRICS_CALLBACK_ADD_INSTANT_EVENT: {
862 MetricsLogger::add_instant_event_callback =
863 reinterpret_cast<stream_renderer_param_metrics_callback_add_instant_event>(
864 static_cast<uintptr_t>(param.value));
865 break;
866 }
867 case STREAM_RENDERER_PARAM_METRICS_CALLBACK_ADD_INSTANT_EVENT_WITH_DESCRIPTOR: {
868 MetricsLogger::add_instant_event_with_descriptor_callback = reinterpret_cast<
869 stream_renderer_param_metrics_callback_add_instant_event_with_descriptor>(
870 static_cast<uintptr_t>(param.value));
871 break;
872 }
873 case STREAM_RENDERER_PARAM_METRICS_CALLBACK_ADD_INSTANT_EVENT_WITH_METRIC: {
874 MetricsLogger::add_instant_event_with_metric_callback = reinterpret_cast<
875 stream_renderer_param_metrics_callback_add_instant_event_with_metric>(
876 static_cast<uintptr_t>(param.value));
877 break;
878 }
879 case STREAM_RENDERER_PARAM_METRICS_CALLBACK_ADD_VULKAN_OUT_OF_MEMORY_EVENT: {
880 MetricsLogger::add_vulkan_out_of_memory_event = reinterpret_cast<
881 stream_renderer_param_metrics_callback_add_vulkan_out_of_memory_event>(
882 static_cast<uintptr_t>(param.value));
883 break;
884 }
885 case STREAM_RENDERER_PARAM_RENDERER_FEATURES: {
886 renderer_features_str =
887 std::string(reinterpret_cast<const char*>(static_cast<uintptr_t>(param.value)));
888 break;
889 }
890 case STREAM_RENDERER_PARAM_METRICS_CALLBACK_SET_ANNOTATION: {
891 MetricsLogger::set_crash_annotation_callback =
892 reinterpret_cast<stream_renderer_param_metrics_callback_set_annotation>(
893 static_cast<uintptr_t>(param.value));
894 break;
895 }
896 case STREAM_RENDERER_PARAM_METRICS_CALLBACK_ABORT: {
897 emugl::setDieFunction(
898 reinterpret_cast<stream_renderer_param_metrics_callback_abort>(
899 static_cast<uintptr_t>(param.value)));
900 break;
901 }
902 default: {
903 // We skip any parameters we don't recognize.
904 stream_renderer_error(
905 "Skipping unknown parameter key: %llu. May need to upgrade gfxstream.",
906 static_cast<unsigned long long>(param.key));
907 break;
908 }
909 }
910 }
911 stream_renderer_debug("Finished reading parameters");
912
913 // Some required params not found.
914 if (required_params.size() > 0) {
915 stream_renderer_error("Missing required parameters:");
916 for (uint64_t param : required_params) {
917 stream_renderer_error("%s", get_param_string(param).c_str());
918 }
919 stream_renderer_error("Failing initialization intentionally");
920 return -EINVAL;
921 }
922
923 #if GFXSTREAM_UNSTABLE_VULKAN_EXTERNAL_SYNC
924 renderer_flags |= STREAM_RENDERER_FLAGS_VULKAN_EXTERNAL_SYNC;
925 #endif
926
927 gfxstream::host::FeatureSet features;
928 int ret = parseGfxstreamFeatures(renderer_flags, renderer_features_str, features);
929 if (ret) {
930 stream_renderer_error("Failed to initialize: failed to parse Gfxstream features.");
931 return ret;
932 }
933
934 stream_renderer_info("Gfxstream features:");
935 for (const auto& [_, featureInfo] : features.map) {
936 stream_renderer_info(" %s: %s (%s)", featureInfo->name.c_str(),
937 (featureInfo->enabled ? "enabled" : "disabled"),
938 featureInfo->reason.c_str());
939 }
940
941 gfxstream::host::InitializeTracing();
942
943 // Set non product-specific callbacks
944 gfxstream::vk::vk_util::setVkCheckCallbacks(
945 std::make_unique<gfxstream::vk::vk_util::VkCheckCallbacks>(
946 gfxstream::vk::vk_util::VkCheckCallbacks{
947 .onVkErrorDeviceLost =
948 []() {
949 auto fb = gfxstream::FrameBuffer::getFB();
950 if (!fb) {
951 ERR("FrameBuffer not yet initialized. Dropping device lost event");
952 return;
953 }
954 fb->logVulkanDeviceLost();
955 },
956 .onVkErrorOutOfMemory =
957 [](VkResult result, const char* function, int line) {
958 auto fb = gfxstream::FrameBuffer::getFB();
959 if (!fb) {
960 stream_renderer_error(
961 "FrameBuffer not yet initialized. Dropping out of memory event");
962 return;
963 }
964 fb->logVulkanOutOfMemory(result, function, line);
965 },
966 .onVkErrorOutOfMemoryOnAllocation =
967 [](VkResult result, const char* function, int line,
968 std::optional<uint64_t> allocationSize) {
969 auto fb = gfxstream::FrameBuffer::getFB();
970 if (!fb) {
971 stream_renderer_error(
972 "FrameBuffer not yet initialized. Dropping out of memory event");
973 return;
974 }
975 fb->logVulkanOutOfMemory(result, function, line, allocationSize);
976 }}));
977
978 if (!skip_opengles) {
979 // aemu currently does its own opengles initialization in
980 // qemu/android/android-emu/android/opengles.cpp.
981 int ret =
982 stream_renderer_opengles_init(display_width, display_height, renderer_flags, features);
983 if (ret) {
984 return ret;
985 }
986 }
987
988 GFXSTREAM_TRACE_EVENT(GFXSTREAM_TRACE_STREAM_RENDERER_CATEGORY, "stream_renderer_init()");
989
990 sFrontend()->init(renderer_cookie, features, fence_callback);
991 gfxstream::FrameBuffer::waitUntilInitialized();
992
993 stream_renderer_info("Gfxstream initialized successfully!");
994 return 0;
995 }
996
gfxstream_backend_setup_window(void * native_window_handle,int32_t window_x,int32_t window_y,int32_t window_width,int32_t window_height,int32_t fb_width,int32_t fb_height)997 VG_EXPORT void gfxstream_backend_setup_window(void* native_window_handle, int32_t window_x,
998 int32_t window_y, int32_t window_width,
999 int32_t window_height, int32_t fb_width,
1000 int32_t fb_height) {
1001 android_showOpenglesWindow(native_window_handle, window_x, window_y, window_width,
1002 window_height, fb_width, fb_height, 1.0f, 0, false, false);
1003 }
1004
stream_renderer_teardown()1005 VG_EXPORT void stream_renderer_teardown() {
1006 sFrontend()->teardown();
1007
1008 android_finishOpenglesRenderer();
1009 android_hideOpenglesWindow();
1010 android_stopOpenglesRenderer(true);
1011
1012 stream_renderer_info("Gfxstream shut down completed!");
1013 }
1014
gfxstream_backend_set_screen_mask(int width,int height,const unsigned char * rgbaData)1015 VG_EXPORT void gfxstream_backend_set_screen_mask(int width, int height,
1016 const unsigned char* rgbaData) {
1017 android_setOpenglesScreenMask(width, height, rgbaData);
1018 }
1019
goldfish_pipe_get_service_ops()1020 const GoldfishPipeServiceOps* goldfish_pipe_get_service_ops() { return &goldfish_pipe_service_ops; }
1021
1022 static_assert(sizeof(struct stream_renderer_device_id) == 32,
1023 "stream_renderer_device_id must be 32 bytes");
1024 static_assert(offsetof(struct stream_renderer_device_id, device_uuid) == 0,
1025 "stream_renderer_device_id.device_uuid must be at offset 0");
1026 static_assert(offsetof(struct stream_renderer_device_id, driver_uuid) == 16,
1027 "stream_renderer_device_id.driver_uuid must be at offset 16");
1028
1029 static_assert(sizeof(struct stream_renderer_vulkan_info) == 36,
1030 "stream_renderer_vulkan_info must be 36 bytes");
1031 static_assert(offsetof(struct stream_renderer_vulkan_info, memory_index) == 0,
1032 "stream_renderer_vulkan_info.memory_index must be at offset 0");
1033 static_assert(offsetof(struct stream_renderer_vulkan_info, device_id) == 4,
1034 "stream_renderer_vulkan_info.device_id must be at offset 4");
1035
1036 static_assert(sizeof(struct stream_renderer_param_host_visible_memory_mask_entry) == 36,
1037 "stream_renderer_param_host_visible_memory_mask_entry must be 36 bytes");
1038 static_assert(offsetof(struct stream_renderer_param_host_visible_memory_mask_entry, device_id) == 0,
1039 "stream_renderer_param_host_visible_memory_mask_entry.device_id must be at offset 0");
1040 static_assert(
1041 offsetof(struct stream_renderer_param_host_visible_memory_mask_entry, memory_type_mask) == 32,
1042 "stream_renderer_param_host_visible_memory_mask_entry.memory_type_mask must be at offset 32");
1043
1044 static_assert(sizeof(struct stream_renderer_param_host_visible_memory_mask) == 16,
1045 "stream_renderer_param_host_visible_memory_mask must be 16 bytes");
1046 static_assert(offsetof(struct stream_renderer_param_host_visible_memory_mask, entries) == 0,
1047 "stream_renderer_param_host_visible_memory_mask.entries must be at offset 0");
1048 static_assert(offsetof(struct stream_renderer_param_host_visible_memory_mask, num_entries) == 8,
1049 "stream_renderer_param_host_visible_memory_mask.num_entries must be at offset 8");
1050
1051 static_assert(sizeof(struct stream_renderer_param) == 16, "stream_renderer_param must be 16 bytes");
1052 static_assert(offsetof(struct stream_renderer_param, key) == 0,
1053 "stream_renderer_param.key must be at offset 0");
1054 static_assert(offsetof(struct stream_renderer_param, value) == 8,
1055 "stream_renderer_param.value must be at offset 8");
1056
1057 #ifdef CONFIG_AEMU
1058
stream_renderer_set_service_ops(const GoldfishPipeServiceOps * ops)1059 VG_EXPORT void stream_renderer_set_service_ops(const GoldfishPipeServiceOps* ops) {
1060 sFrontend()->setServiceOps(ops);
1061 }
1062
1063 #endif // CONFIG_AEMU
1064
1065 } // extern "C"
1066