1 /**************************************************************************
2 *
3 * Copyright (C) 2014 Red Hat Inc.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice shall be included
13 * in all copies or substantial portions of the Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
16 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21 * OTHER DEALINGS IN THE SOFTWARE.
22 *
23 **************************************************************************/
24
25 #include <stdio.h>
26 #include <time.h>
27
28 #include <epoxy/gl.h>
29
30 #include <sys/stat.h>
31 #include <fcntl.h>
32 #include <errno.h>
33 #include <unistd.h>
34 #include <sys/mman.h>
35
36 #include "pipe/p_state.h"
37 #include "util/u_format.h"
38 #include "util/u_math.h"
39 #include "vkr_allocator.h"
40 #include "vkr_renderer.h"
41 #include "drm_renderer.h"
42 #include "vrend_renderer.h"
43 #include "proxy/proxy_renderer.h"
44 #include "vrend_winsys.h"
45
46 #include "virglrenderer.h"
47 #include "virglrenderer_hw.h"
48
49 #include "virgl_context.h"
50 #include "virgl_resource.h"
51 #include "virgl_util.h"
52
53 struct global_state {
54 bool client_initialized;
55 void *cookie;
56 int flags;
57 const struct virgl_renderer_callbacks *cbs;
58
59 bool resource_initialized;
60 bool context_initialized;
61 bool winsys_initialized;
62 bool vrend_initialized;
63 bool vkr_initialized;
64 bool proxy_initialized;
65 bool external_winsys_initialized;
66 };
67
68 static struct global_state state;
69
70 /* new API - just wrap internal API for now */
71
virgl_renderer_resource_create_internal(struct virgl_renderer_resource_create_args * args,UNUSED struct iovec * iov,UNUSED uint32_t num_iovs,void * image)72 static int virgl_renderer_resource_create_internal(struct virgl_renderer_resource_create_args *args,
73 UNUSED struct iovec *iov, UNUSED uint32_t num_iovs,
74 void *image)
75 {
76 struct virgl_resource *res;
77 struct pipe_resource *pipe_res;
78 struct vrend_renderer_resource_create_args vrend_args = { 0 };
79
80 if (!state.vrend_initialized)
81 return EINVAL;
82
83 /* do not accept handle 0 */
84 if (args->handle == 0)
85 return EINVAL;
86
87 vrend_args.target = args->target;
88 vrend_args.format = args->format;
89 vrend_args.bind = args->bind;
90 vrend_args.width = args->width;
91 vrend_args.height = args->height;
92 vrend_args.depth = args->depth;
93 vrend_args.array_size = args->array_size;
94 vrend_args.nr_samples = args->nr_samples;
95 vrend_args.last_level = args->last_level;
96 vrend_args.flags = args->flags;
97
98 pipe_res = vrend_renderer_resource_create(&vrend_args, image);
99 if (!pipe_res)
100 return EINVAL;
101
102 res = virgl_resource_create_from_pipe(args->handle, pipe_res, iov, num_iovs);
103 if (!res) {
104 vrend_renderer_resource_destroy((struct vrend_resource *)pipe_res);
105 return -ENOMEM;
106 }
107
108 res->map_info = vrend_renderer_resource_get_map_info(pipe_res);
109
110 return 0;
111 }
112
virgl_renderer_resource_create(struct virgl_renderer_resource_create_args * args,struct iovec * iov,uint32_t num_iovs)113 int virgl_renderer_resource_create(struct virgl_renderer_resource_create_args *args,
114 struct iovec *iov, uint32_t num_iovs)
115 {
116 TRACE_FUNC();
117 return virgl_renderer_resource_create_internal(args, iov, num_iovs, NULL);
118 }
119
virgl_renderer_resource_import_eglimage(struct virgl_renderer_resource_create_args * args,void * image)120 int virgl_renderer_resource_import_eglimage(struct virgl_renderer_resource_create_args *args, void *image)
121 {
122 TRACE_FUNC();
123 return virgl_renderer_resource_create_internal(args, NULL, 0, image);
124 }
125
virgl_renderer_resource_set_priv(uint32_t res_handle,void * priv)126 void virgl_renderer_resource_set_priv(uint32_t res_handle, void *priv)
127 {
128 struct virgl_resource *res = virgl_resource_lookup(res_handle);
129 if (!res)
130 return;
131
132 res->private_data = priv;
133 }
134
virgl_renderer_resource_get_priv(uint32_t res_handle)135 void *virgl_renderer_resource_get_priv(uint32_t res_handle)
136 {
137 struct virgl_resource *res = virgl_resource_lookup(res_handle);
138 if (!res)
139 return NULL;
140
141 return res->private_data;
142 }
143
detach_resource(struct virgl_context * ctx,void * data)144 static bool detach_resource(struct virgl_context *ctx, void *data)
145 {
146 struct virgl_resource *res = data;
147 ctx->detach_resource(ctx, res);
148 return true;
149 }
150
virgl_renderer_resource_unref(uint32_t res_handle)151 void virgl_renderer_resource_unref(uint32_t res_handle)
152 {
153 struct virgl_resource *res = virgl_resource_lookup(res_handle);
154 struct virgl_context_foreach_args args;
155
156 if (!res)
157 return;
158
159 args.callback = detach_resource;
160 args.data = res;
161 virgl_context_foreach(&args);
162
163 virgl_resource_remove(res->res_id);
164 }
165
virgl_renderer_fill_caps(uint32_t set,uint32_t version,void * caps)166 void virgl_renderer_fill_caps(uint32_t set, uint32_t version,
167 void *caps)
168 {
169 switch (set) {
170 case VIRGL_RENDERER_CAPSET_VIRGL:
171 case VIRGL_RENDERER_CAPSET_VIRGL2:
172 if (state.vrend_initialized)
173 vrend_renderer_fill_caps(set, version, (union virgl_caps *)caps);
174 break;
175 case VIRGL_RENDERER_CAPSET_VENUS:
176 if (state.vkr_initialized)
177 vkr_get_capset(caps);
178 break;
179 case VIRGL_RENDERER_CAPSET_DRM:
180 drm_renderer_capset(caps);
181 break;
182 default:
183 break;
184 }
185 }
186
per_context_fence_retire(struct virgl_context * ctx,uint32_t ring_idx,uint64_t fence_id)187 static void per_context_fence_retire(struct virgl_context *ctx,
188 uint32_t ring_idx,
189 uint64_t fence_id)
190 {
191 state.cbs->write_context_fence(state.cookie,
192 ctx->ctx_id,
193 ring_idx,
194 fence_id);
195 }
196
virgl_renderer_context_create_with_flags(uint32_t ctx_id,uint32_t ctx_flags,uint32_t nlen,const char * name)197 int virgl_renderer_context_create_with_flags(uint32_t ctx_id,
198 uint32_t ctx_flags,
199 uint32_t nlen,
200 const char *name)
201 {
202 const enum virgl_renderer_capset capset_id =
203 ctx_flags & VIRGL_RENDERER_CONTEXT_FLAG_CAPSET_ID_MASK;
204 struct virgl_context *ctx;
205 int ret;
206
207 TRACE_FUNC();
208
209 /* user context id must be greater than 0 */
210 if (ctx_id == 0)
211 return EINVAL;
212
213 /* unsupported flags */
214 if (ctx_flags & ~VIRGL_RENDERER_CONTEXT_FLAG_CAPSET_ID_MASK)
215 return EINVAL;
216
217 ctx = virgl_context_lookup(ctx_id);
218 if (ctx) {
219 return ctx->capset_id == capset_id ? 0 : EINVAL;
220 }
221
222 switch (capset_id) {
223 case VIRGL_RENDERER_CAPSET_VIRGL:
224 case VIRGL_RENDERER_CAPSET_VIRGL2:
225 if (!state.vrend_initialized)
226 return EINVAL;
227 ctx = vrend_renderer_context_create(ctx_id, nlen, name);
228 break;
229 case VIRGL_RENDERER_CAPSET_VENUS:
230 if (state.proxy_initialized)
231 ctx = proxy_context_create(ctx_id, ctx_flags, nlen, name);
232 else if (state.vkr_initialized)
233 ctx = vkr_context_create(nlen, name);
234 else
235 return EINVAL;
236 break;
237 case VIRGL_RENDERER_CAPSET_DRM:
238 ctx = drm_renderer_create(nlen, name);
239 break;
240 default:
241 return EINVAL;
242 break;
243 }
244 if (!ctx)
245 return ENOMEM;
246
247 ctx->ctx_id = ctx_id;
248 ctx->capset_id = capset_id;
249 ctx->fence_retire = per_context_fence_retire;
250
251 ret = virgl_context_add(ctx);
252 if (ret) {
253 ctx->destroy(ctx);
254 return ret;
255 }
256
257 return 0;
258 }
259
virgl_renderer_context_create(uint32_t handle,uint32_t nlen,const char * name)260 int virgl_renderer_context_create(uint32_t handle, uint32_t nlen, const char *name)
261 {
262 return virgl_renderer_context_create_with_flags(handle,
263 VIRGL_RENDERER_CAPSET_VIRGL2,
264 nlen,
265 name);
266 }
267
virgl_renderer_context_destroy(uint32_t handle)268 void virgl_renderer_context_destroy(uint32_t handle)
269 {
270 TRACE_FUNC();
271 virgl_context_remove(handle);
272 }
273
virgl_renderer_submit_cmd(void * buffer,int ctx_id,int ndw)274 int virgl_renderer_submit_cmd(void *buffer,
275 int ctx_id,
276 int ndw)
277 {
278 TRACE_FUNC();
279 struct virgl_context *ctx = virgl_context_lookup(ctx_id);
280 if (!ctx)
281 return EINVAL;
282
283 if (ndw < 0 || (unsigned)ndw > UINT32_MAX / sizeof(uint32_t))
284 return EINVAL;
285
286 return ctx->submit_cmd(ctx, buffer, ndw * sizeof(uint32_t));
287 }
288
virgl_renderer_transfer_write_iov(uint32_t handle,uint32_t ctx_id,int level,uint32_t stride,uint32_t layer_stride,struct virgl_box * box,uint64_t offset,struct iovec * iovec,unsigned int iovec_cnt)289 int virgl_renderer_transfer_write_iov(uint32_t handle,
290 uint32_t ctx_id,
291 int level,
292 uint32_t stride,
293 uint32_t layer_stride,
294 struct virgl_box *box,
295 uint64_t offset,
296 struct iovec *iovec,
297 unsigned int iovec_cnt)
298 {
299 TRACE_FUNC();
300
301 struct virgl_resource *res = virgl_resource_lookup(handle);
302 struct vrend_transfer_info transfer_info;
303
304 if (!res)
305 return EINVAL;
306
307 transfer_info.level = level;
308 transfer_info.stride = stride;
309 transfer_info.layer_stride = layer_stride;
310 transfer_info.box = (struct pipe_box *)box;
311 transfer_info.offset = offset;
312 transfer_info.iovec = iovec;
313 transfer_info.iovec_cnt = iovec_cnt;
314 transfer_info.synchronized = false;
315
316 if (ctx_id) {
317 struct virgl_context *ctx = virgl_context_lookup(ctx_id);
318 if (!ctx)
319 return EINVAL;
320
321 return ctx->transfer_3d(ctx, res, &transfer_info,
322 VIRGL_TRANSFER_TO_HOST);
323 } else {
324 if (!res->pipe_resource)
325 return EINVAL;
326
327 return vrend_renderer_transfer_pipe(res->pipe_resource, &transfer_info,
328 VIRGL_TRANSFER_TO_HOST);
329 }
330 }
331
virgl_renderer_transfer_read_iov(uint32_t handle,uint32_t ctx_id,uint32_t level,uint32_t stride,uint32_t layer_stride,struct virgl_box * box,uint64_t offset,struct iovec * iovec,int iovec_cnt)332 int virgl_renderer_transfer_read_iov(uint32_t handle, uint32_t ctx_id,
333 uint32_t level, uint32_t stride,
334 uint32_t layer_stride,
335 struct virgl_box *box,
336 uint64_t offset, struct iovec *iovec,
337 int iovec_cnt)
338 {
339 TRACE_FUNC();
340 struct virgl_resource *res = virgl_resource_lookup(handle);
341 struct vrend_transfer_info transfer_info;
342
343 if (!res)
344 return EINVAL;
345
346 transfer_info.level = level;
347 transfer_info.stride = stride;
348 transfer_info.layer_stride = layer_stride;
349 transfer_info.box = (struct pipe_box *)box;
350 transfer_info.offset = offset;
351 transfer_info.iovec = iovec;
352 transfer_info.iovec_cnt = iovec_cnt;
353 transfer_info.synchronized = false;
354
355 if (ctx_id) {
356 struct virgl_context *ctx = virgl_context_lookup(ctx_id);
357 if (!ctx)
358 return EINVAL;
359
360 return ctx->transfer_3d(ctx, res, &transfer_info,
361 VIRGL_TRANSFER_FROM_HOST);
362 } else {
363 if (!res->pipe_resource)
364 return EINVAL;
365
366 return vrend_renderer_transfer_pipe(res->pipe_resource, &transfer_info,
367 VIRGL_TRANSFER_FROM_HOST);
368 }
369 }
370
virgl_renderer_resource_attach_iov(int res_handle,struct iovec * iov,int num_iovs)371 int virgl_renderer_resource_attach_iov(int res_handle, struct iovec *iov,
372 int num_iovs)
373 {
374 TRACE_FUNC();
375 struct virgl_resource *res = virgl_resource_lookup(res_handle);
376 if (!res)
377 return EINVAL;
378
379 return virgl_resource_attach_iov(res, iov, num_iovs);
380 }
381
virgl_renderer_resource_detach_iov(int res_handle,struct iovec ** iov_p,int * num_iovs_p)382 void virgl_renderer_resource_detach_iov(int res_handle, struct iovec **iov_p, int *num_iovs_p)
383 {
384 TRACE_FUNC();
385 struct virgl_resource *res = virgl_resource_lookup(res_handle);
386 if (!res)
387 return;
388
389 if (iov_p)
390 *iov_p = (struct iovec *)res->iov;
391 if (num_iovs_p)
392 *num_iovs_p = res->iov_count;
393
394 virgl_resource_detach_iov(res);
395 }
396
virgl_renderer_create_fence(int client_fence_id,UNUSED uint32_t ctx_id)397 int virgl_renderer_create_fence(int client_fence_id, UNUSED uint32_t ctx_id)
398 {
399 TRACE_FUNC();
400 const uint32_t fence_id = (uint32_t)client_fence_id;
401 if (state.vrend_initialized)
402 return vrend_renderer_create_ctx0_fence(fence_id);
403 return EINVAL;
404 }
405
virgl_renderer_context_create_fence(uint32_t ctx_id,uint32_t flags,uint32_t ring_idx,uint64_t fence_id)406 int virgl_renderer_context_create_fence(uint32_t ctx_id,
407 uint32_t flags,
408 uint32_t ring_idx,
409 uint64_t fence_id)
410 {
411 TRACE_FUNC();
412 struct virgl_context *ctx = virgl_context_lookup(ctx_id);
413 if (!ctx)
414 return -EINVAL;
415
416 assert(state.cbs->version >= 3 && state.cbs->write_context_fence);
417 return ctx->submit_fence(ctx, flags, ring_idx, fence_id);
418 }
419
virgl_renderer_context_poll(uint32_t ctx_id)420 void virgl_renderer_context_poll(uint32_t ctx_id)
421 {
422 struct virgl_context *ctx = virgl_context_lookup(ctx_id);
423 if (!ctx)
424 return;
425
426 ctx->retire_fences(ctx);
427 }
428
virgl_renderer_context_get_poll_fd(uint32_t ctx_id)429 int virgl_renderer_context_get_poll_fd(uint32_t ctx_id)
430 {
431 struct virgl_context *ctx = virgl_context_lookup(ctx_id);
432 if (!ctx)
433 return -1;
434
435 return ctx->get_fencing_fd(ctx);
436 }
437
virgl_renderer_force_ctx_0(void)438 void virgl_renderer_force_ctx_0(void)
439 {
440 if (state.vrend_initialized)
441 vrend_renderer_force_ctx_0();
442 }
443
virgl_renderer_ctx_attach_resource(int ctx_id,int res_handle)444 void virgl_renderer_ctx_attach_resource(int ctx_id, int res_handle)
445 {
446 TRACE_FUNC();
447 struct virgl_context *ctx = virgl_context_lookup(ctx_id);
448 struct virgl_resource *res = virgl_resource_lookup(res_handle);
449 if (!ctx || !res)
450 return;
451 ctx->attach_resource(ctx, res);
452 }
453
virgl_renderer_ctx_detach_resource(int ctx_id,int res_handle)454 void virgl_renderer_ctx_detach_resource(int ctx_id, int res_handle)
455 {
456 TRACE_FUNC();
457 struct virgl_context *ctx = virgl_context_lookup(ctx_id);
458 struct virgl_resource *res = virgl_resource_lookup(res_handle);
459 if (!ctx || !res)
460 return;
461 ctx->detach_resource(ctx, res);
462 }
463
virgl_renderer_resource_get_info(int res_handle,struct virgl_renderer_resource_info * info)464 int virgl_renderer_resource_get_info(int res_handle,
465 struct virgl_renderer_resource_info *info)
466 {
467 TRACE_FUNC();
468 struct virgl_resource *res = virgl_resource_lookup(res_handle);
469
470 if (!res || !res->pipe_resource)
471 return EINVAL;
472 if (!info)
473 return EINVAL;
474
475 vrend_renderer_resource_get_info(res->pipe_resource,
476 (struct vrend_renderer_resource_info *)info);
477 info->handle = res_handle;
478
479 if (state.winsys_initialized) {
480 return vrend_winsys_get_fourcc_for_texture(info->tex_id,
481 info->virgl_format,
482 &info->drm_fourcc);
483 }
484
485 return 0;
486 }
487
virgl_renderer_get_cap_set(uint32_t cap_set,uint32_t * max_ver,uint32_t * max_size)488 void virgl_renderer_get_cap_set(uint32_t cap_set, uint32_t *max_ver,
489 uint32_t *max_size)
490 {
491 TRACE_FUNC();
492
493 /* this may be called before virgl_renderer_init */
494 switch (cap_set) {
495 case VIRGL_RENDERER_CAPSET_VIRGL:
496 case VIRGL_RENDERER_CAPSET_VIRGL2:
497 vrend_renderer_get_cap_set(cap_set, max_ver, max_size);
498 break;
499 case VIRGL_RENDERER_CAPSET_VENUS:
500 *max_ver = 0;
501 *max_size = vkr_get_capset(NULL);
502 break;
503 case VIRGL_RENDERER_CAPSET_DRM:
504 *max_ver = 0;
505 *max_size = drm_renderer_capset(NULL);
506 break;
507 default:
508 *max_ver = 0;
509 *max_size = 0;
510 break;
511 }
512 }
513
virgl_renderer_get_rect(int resource_id,struct iovec * iov,unsigned int num_iovs,uint32_t offset,int x,int y,int width,int height)514 void virgl_renderer_get_rect(int resource_id, struct iovec *iov, unsigned int num_iovs,
515 uint32_t offset, int x, int y, int width, int height)
516 {
517 TRACE_FUNC();
518 struct virgl_resource *res = virgl_resource_lookup(resource_id);
519 if (!res || !res->pipe_resource)
520 return;
521
522 vrend_renderer_get_rect(res->pipe_resource, iov, num_iovs, offset, x, y,
523 width, height);
524 }
525
526
ctx0_fence_retire(uint64_t fence_id,UNUSED void * retire_data)527 static void ctx0_fence_retire(uint64_t fence_id, UNUSED void *retire_data)
528 {
529 // ctx0 fence_id is created from uint32_t but stored internally as uint64_t,
530 // so casting back to uint32_t doesn't result in data loss.
531 assert((fence_id >> 32) == 0);
532 state.cbs->write_fence(state.cookie, (uint32_t)fence_id);
533 }
534
create_gl_context(int scanout_idx,struct virgl_gl_ctx_param * param)535 static virgl_renderer_gl_context create_gl_context(int scanout_idx, struct virgl_gl_ctx_param *param)
536 {
537 struct virgl_renderer_gl_ctx_param vparam;
538
539 if (state.winsys_initialized)
540 return vrend_winsys_create_context(param);
541
542 vparam.version = 1;
543 vparam.shared = param->shared;
544 vparam.major_ver = param->major_ver;
545 vparam.minor_ver = param->minor_ver;
546 return state.cbs->create_gl_context(state.cookie, scanout_idx, &vparam);
547 }
548
destroy_gl_context(virgl_renderer_gl_context ctx)549 static void destroy_gl_context(virgl_renderer_gl_context ctx)
550 {
551 if (state.winsys_initialized) {
552 vrend_winsys_destroy_context(ctx);
553 return;
554 }
555
556 state.cbs->destroy_gl_context(state.cookie, ctx);
557 }
558
make_current(virgl_renderer_gl_context ctx)559 static int make_current(virgl_renderer_gl_context ctx)
560 {
561 if (state.winsys_initialized)
562 return vrend_winsys_make_context_current(ctx);
563
564 return state.cbs->make_current(state.cookie, 0, ctx);
565 }
566
get_drm_fd(void)567 static int get_drm_fd(void)
568 {
569 if (state.cbs->get_drm_fd)
570 return state.cbs->get_drm_fd(state.cookie);
571
572 return -1;
573 }
574
575 static const struct vrend_if_cbs vrend_cbs = {
576 ctx0_fence_retire,
577 create_gl_context,
578 destroy_gl_context,
579 make_current,
580 get_drm_fd,
581 };
582
583 static int
proxy_renderer_cb_get_server_fd(uint32_t version)584 proxy_renderer_cb_get_server_fd(uint32_t version)
585 {
586 if (state.cbs && state.cbs->version >= 3 && state.cbs->get_server_fd)
587 return state.cbs->get_server_fd(state.cookie, version);
588 else
589 return -1;
590 }
591
592 static const struct proxy_renderer_cbs proxy_cbs = {
593 proxy_renderer_cb_get_server_fd,
594 };
595
virgl_renderer_get_cursor_data(uint32_t resource_id,uint32_t * width,uint32_t * height)596 void *virgl_renderer_get_cursor_data(uint32_t resource_id, uint32_t *width, uint32_t *height)
597 {
598 struct virgl_resource *res = virgl_resource_lookup(resource_id);
599 if (!res || !res->pipe_resource)
600 return NULL;
601
602 vrend_renderer_force_ctx_0();
603 return vrend_renderer_get_cursor_contents(res->pipe_resource,
604 width,
605 height);
606 }
607
608 static bool
virgl_context_foreach_retire_fences(struct virgl_context * ctx,UNUSED void * data)609 virgl_context_foreach_retire_fences(struct virgl_context *ctx,
610 UNUSED void* data)
611 {
612 /* vrend contexts are polled explicitly by the caller */
613 if (ctx->capset_id != VIRGL_RENDERER_CAPSET_VIRGL &&
614 ctx->capset_id != VIRGL_RENDERER_CAPSET_VIRGL2)
615 {
616 assert(ctx->retire_fences);
617 ctx->retire_fences(ctx);
618 }
619 return true;
620 }
621
virgl_renderer_poll(void)622 void virgl_renderer_poll(void)
623 {
624 TRACE_FUNC();
625 if (state.vrend_initialized)
626 vrend_renderer_poll();
627
628 struct virgl_context_foreach_args args;
629 args.callback = virgl_context_foreach_retire_fences;
630 virgl_context_foreach(&args);
631 }
632
virgl_renderer_cleanup(UNUSED void * cookie)633 void virgl_renderer_cleanup(UNUSED void *cookie)
634 {
635 TRACE_FUNC();
636 if (state.vrend_initialized)
637 vrend_renderer_prepare_reset();
638
639 if (state.context_initialized)
640 virgl_context_table_cleanup();
641
642 if (state.resource_initialized)
643 virgl_resource_table_cleanup();
644
645 if (state.proxy_initialized)
646 proxy_renderer_fini();
647
648 if (state.vkr_initialized) {
649 vkr_renderer_fini();
650 /* vkr_allocator_init is called on-demand upon the first map */
651 vkr_allocator_fini();
652 }
653
654 if (state.vrend_initialized)
655 vrend_renderer_fini();
656
657 if (state.winsys_initialized || state.external_winsys_initialized)
658 vrend_winsys_cleanup();
659
660 drm_renderer_fini();
661
662 memset(&state, 0, sizeof(state));
663 }
664
virgl_renderer_init(void * cookie,int flags,struct virgl_renderer_callbacks * cbs)665 int virgl_renderer_init(void *cookie, int flags, struct virgl_renderer_callbacks *cbs)
666 {
667 TRACE_INIT();
668 TRACE_FUNC();
669
670 int ret;
671
672 /* VIRGL_RENDERER_THREAD_SYNC is a hint and can be silently ignored */
673 if (!has_eventfd() || getenv("VIRGL_DISABLE_MT"))
674 flags &= ~VIRGL_RENDERER_THREAD_SYNC;
675
676 if (state.client_initialized && (state.cookie != cookie ||
677 state.flags != flags ||
678 state.cbs != cbs))
679 return -EBUSY;
680
681 if (!state.client_initialized) {
682 if (!cbs ||
683 cbs->version < 1 ||
684 cbs->version > VIRGL_RENDERER_CALLBACKS_VERSION)
685 return -1;
686
687 state.cookie = cookie;
688 state.flags = flags;
689 state.cbs = cbs;
690 state.client_initialized = true;
691 }
692
693 if (!state.resource_initialized) {
694 const struct virgl_resource_pipe_callbacks *pipe_cbs =
695 (flags & VIRGL_RENDERER_NO_VIRGL) ? NULL :
696 vrend_renderer_get_pipe_callbacks();
697
698 ret = virgl_resource_table_init(pipe_cbs);
699 if (ret)
700 goto fail;
701 state.resource_initialized = true;
702 }
703
704 if (!state.context_initialized) {
705 ret = virgl_context_table_init();
706 if (ret)
707 goto fail;
708 state.context_initialized = true;
709 }
710
711 if (!state.winsys_initialized && !(flags & VIRGL_RENDERER_NO_VIRGL) &&
712 (flags & (VIRGL_RENDERER_USE_EGL | VIRGL_RENDERER_USE_GLX))) {
713 int drm_fd = -1;
714
715 if (flags & VIRGL_RENDERER_USE_EGL) {
716 if (cbs->version >= 2 && cbs->get_drm_fd)
717 drm_fd = cbs->get_drm_fd(cookie);
718 }
719
720 ret = vrend_winsys_init(flags, drm_fd);
721 if (ret) {
722 if (drm_fd >= 0)
723 close(drm_fd);
724 goto fail;
725 }
726 state.winsys_initialized = true;
727 }
728
729 if (!state.winsys_initialized && !state.external_winsys_initialized &&
730 state.cbs && state.cbs->version >= 4 && state.cbs->get_egl_display) {
731 void *egl_display = NULL;
732
733 if (!cbs->create_gl_context || !cbs->destroy_gl_context ||
734 !cbs->make_current) {
735 ret = EINVAL;
736 goto fail;
737 }
738
739 egl_display = state.cbs->get_egl_display(cookie);
740
741 if (!egl_display) {
742 ret = -1;
743 goto fail;
744 }
745 ret = vrend_winsys_init_external(egl_display);
746
747 if (ret) {
748 ret = -1;
749 goto fail;
750 }
751
752 state.external_winsys_initialized = true;
753 }
754
755 if (!state.vrend_initialized && !(flags & VIRGL_RENDERER_NO_VIRGL)) {
756 uint32_t renderer_flags = 0;
757
758 if (!cookie || !cbs) {
759 ret = -1;
760 goto fail;
761 }
762
763 if (flags & VIRGL_RENDERER_THREAD_SYNC)
764 renderer_flags |= VREND_USE_THREAD_SYNC;
765 if (flags & VIRGL_RENDERER_ASYNC_FENCE_CB)
766 renderer_flags |= VREND_USE_ASYNC_FENCE_CB;
767 if (flags & VIRGL_RENDERER_USE_EXTERNAL_BLOB)
768 renderer_flags |= VREND_USE_EXTERNAL_BLOB;
769 if (flags & VIRGL_RENDERER_USE_VIDEO)
770 renderer_flags |= VREND_USE_VIDEO;
771
772 ret = vrend_renderer_init(&vrend_cbs, renderer_flags);
773 if (ret)
774 goto fail;
775 state.vrend_initialized = true;
776 }
777
778 if (!state.vkr_initialized && (flags & VIRGL_RENDERER_VENUS)) {
779 uint32_t vkr_flags = 0;
780 if (flags & VIRGL_RENDERER_THREAD_SYNC)
781 vkr_flags |= VKR_RENDERER_THREAD_SYNC;
782 if (flags & VIRGL_RENDERER_ASYNC_FENCE_CB)
783 vkr_flags |= VKR_RENDERER_ASYNC_FENCE_CB;
784 if (flags & VIRGL_RENDERER_RENDER_SERVER)
785 vkr_flags |= VKR_RENDERER_RENDER_SERVER;
786
787 ret = vkr_renderer_init(vkr_flags);
788 if (ret)
789 goto fail;
790 state.vkr_initialized = true;
791 }
792
793 if (!state.proxy_initialized && (flags & VIRGL_RENDERER_RENDER_SERVER)) {
794 ret = proxy_renderer_init(&proxy_cbs, flags | VIRGL_RENDERER_NO_VIRGL);
795 if (ret)
796 goto fail;
797 state.proxy_initialized = true;
798 }
799
800 if ((flags & VIRGL_RENDERER_ASYNC_FENCE_CB) &&
801 (flags & VIRGL_RENDERER_DRM)) {
802 int drm_fd = -1;
803
804 if (cbs->version >= 2 && cbs->get_drm_fd)
805 drm_fd = cbs->get_drm_fd(cookie);
806
807 drm_renderer_init(drm_fd);
808 }
809
810 return 0;
811
812 fail:
813 virgl_renderer_cleanup(NULL);
814 return ret;
815 }
816
virgl_renderer_get_fd_for_texture(uint32_t tex_id,int * fd)817 int virgl_renderer_get_fd_for_texture(uint32_t tex_id, int *fd)
818 {
819 TRACE_FUNC();
820 if (state.winsys_initialized)
821 return vrend_winsys_get_fd_for_texture(tex_id, fd);
822 return -1;
823 }
824
virgl_renderer_get_fd_for_texture2(uint32_t tex_id,int * fd,int * stride,int * offset)825 int virgl_renderer_get_fd_for_texture2(uint32_t tex_id, int *fd, int *stride, int *offset)
826 {
827 TRACE_FUNC();
828 if (state.winsys_initialized)
829 return vrend_winsys_get_fd_for_texture2(tex_id, fd, stride, offset);
830 return -1;
831 }
832
virgl_renderer_reset(void)833 void virgl_renderer_reset(void)
834 {
835 TRACE_FUNC();
836 if (state.vrend_initialized)
837 vrend_renderer_prepare_reset();
838
839 if (state.context_initialized)
840 virgl_context_table_reset();
841
842 if (state.resource_initialized)
843 virgl_resource_table_reset();
844
845 if (state.proxy_initialized)
846 proxy_renderer_reset();
847
848 if (state.vkr_initialized)
849 vkr_renderer_reset();
850
851 if (state.vrend_initialized)
852 vrend_renderer_reset();
853
854 drm_renderer_reset();
855 }
856
virgl_renderer_get_poll_fd(void)857 int virgl_renderer_get_poll_fd(void)
858 {
859 TRACE_FUNC();
860 if (state.vrend_initialized)
861 return vrend_renderer_get_poll_fd();
862
863 return -1;
864 }
865
virgl_set_debug_callback(virgl_debug_callback_type cb)866 virgl_debug_callback_type virgl_set_debug_callback(virgl_debug_callback_type cb)
867 {
868 return virgl_log_set_logger(cb);
869 }
870
virgl_renderer_export_query(void * execute_args,uint32_t execute_size)871 static int virgl_renderer_export_query(void *execute_args, uint32_t execute_size)
872 {
873 struct virgl_resource *res;
874 struct virgl_renderer_export_query *export_query = execute_args;
875 if (execute_size != sizeof(struct virgl_renderer_export_query))
876 return -EINVAL;
877
878 if (export_query->hdr.size != sizeof(struct virgl_renderer_export_query))
879 return -EINVAL;
880
881 res = virgl_resource_lookup(export_query->in_resource_id);
882 if (!res)
883 return -EINVAL;
884
885
886 if (res->pipe_resource) {
887 return vrend_renderer_export_query(res->pipe_resource, export_query);
888 } else if (!export_query->in_export_fds) {
889 /* Untyped resources are expected to be exported with
890 * virgl_renderer_resource_export_blob instead and have no type
891 * information. But when this is called to query (in_export_fds is
892 * false) an untyped resource, we should return sane values.
893 */
894 export_query->out_num_fds = 1;
895 export_query->out_fourcc = 0;
896 export_query->out_fds[0] = -1;
897 export_query->out_strides[0] = 0;
898 export_query->out_offsets[0] = 0;
899 export_query->out_modifier = DRM_FORMAT_MOD_INVALID;
900 return 0;
901 } else {
902 return -EINVAL;
903 }
904 }
905
virgl_renderer_supported_structures(void * execute_args,uint32_t execute_size)906 static int virgl_renderer_supported_structures(void *execute_args, uint32_t execute_size)
907 {
908 struct virgl_renderer_supported_structures *supported_structures = execute_args;
909 if (execute_size != sizeof(struct virgl_renderer_supported_structures))
910 return -EINVAL;
911
912 if (supported_structures->hdr.size != sizeof(struct virgl_renderer_supported_structures))
913 return -EINVAL;
914
915 if (supported_structures->in_stype_version == 0) {
916 supported_structures->out_supported_structures_mask =
917 VIRGL_RENDERER_STRUCTURE_TYPE_EXPORT_QUERY |
918 VIRGL_RENDERER_STRUCTURE_TYPE_SUPPORTED_STRUCTURES;
919 } else {
920 supported_structures->out_supported_structures_mask = 0;
921 }
922
923 return 0;
924 }
925
virgl_renderer_execute(void * execute_args,uint32_t execute_size)926 int virgl_renderer_execute(void *execute_args, uint32_t execute_size)
927 {
928 TRACE_FUNC();
929 struct virgl_renderer_hdr *hdr = execute_args;
930 if (hdr->stype_version != 0)
931 return -EINVAL;
932
933 switch (hdr->stype) {
934 case VIRGL_RENDERER_STRUCTURE_TYPE_SUPPORTED_STRUCTURES:
935 return virgl_renderer_supported_structures(execute_args, execute_size);
936 case VIRGL_RENDERER_STRUCTURE_TYPE_EXPORT_QUERY:
937 return virgl_renderer_export_query(execute_args, execute_size);
938 default:
939 return -EINVAL;
940 }
941 }
942
virgl_renderer_resource_create_blob(const struct virgl_renderer_resource_create_blob_args * args)943 int virgl_renderer_resource_create_blob(const struct virgl_renderer_resource_create_blob_args *args)
944 {
945 TRACE_FUNC();
946 struct virgl_resource *res;
947 struct virgl_context *ctx;
948 struct virgl_context_blob blob;
949 bool has_host_storage;
950 bool has_guest_storage;
951 int ret;
952
953 switch (args->blob_mem) {
954 case VIRGL_RENDERER_BLOB_MEM_GUEST:
955 has_host_storage = false;
956 has_guest_storage = true;
957 break;
958 case VIRGL_RENDERER_BLOB_MEM_HOST3D:
959 has_host_storage = true;
960 has_guest_storage = false;
961 break;
962 case VIRGL_RENDERER_BLOB_MEM_HOST3D_GUEST:
963 has_host_storage = true;
964 has_guest_storage = true;
965 break;
966 default:
967 return -EINVAL;
968 }
969
970 /* user resource id must be greater than 0 */
971 if (args->res_handle == 0)
972 return -EINVAL;
973
974 /* user resource id must be unique */
975 if (virgl_resource_lookup(args->res_handle))
976 return -EINVAL;
977
978 if (args->size == 0)
979 return -EINVAL;
980 if (has_guest_storage) {
981 const size_t iov_size = vrend_get_iovec_size(args->iovecs, args->num_iovs);
982 if (iov_size < args->size)
983 return -EINVAL;
984 } else {
985 if (args->num_iovs)
986 return -EINVAL;
987 }
988
989 if (!has_host_storage) {
990 res = virgl_resource_create_from_iov(args->res_handle,
991 args->iovecs,
992 args->num_iovs);
993 if (!res)
994 return -ENOMEM;
995
996 res->map_info = VIRGL_RENDERER_MAP_CACHE_CACHED;
997 return 0;
998 }
999
1000 ctx = virgl_context_lookup(args->ctx_id);
1001 if (!ctx)
1002 return -EINVAL;
1003
1004 ret = ctx->get_blob(ctx, args->res_handle, args->blob_id, args->size, args->blob_flags, &blob);
1005 if (ret)
1006 return ret;
1007
1008 if (blob.type == VIRGL_RESOURCE_OPAQUE_HANDLE) {
1009 assert(!(args->blob_flags & VIRGL_RENDERER_BLOB_FLAG_USE_SHAREABLE));
1010 res = virgl_resource_create_from_opaque_handle(ctx, args->res_handle, blob.u.opaque_handle);
1011 if (!res)
1012 return -ENOMEM;
1013 } else if (blob.type != VIRGL_RESOURCE_FD_INVALID) {
1014 res = virgl_resource_create_from_fd(args->res_handle,
1015 blob.type,
1016 blob.u.fd,
1017 args->iovecs,
1018 args->num_iovs,
1019 &blob.opaque_fd_metadata);
1020 if (!res) {
1021 close(blob.u.fd);
1022 return -ENOMEM;
1023 }
1024 } else {
1025 res = virgl_resource_create_from_pipe(args->res_handle,
1026 blob.u.pipe_resource,
1027 args->iovecs,
1028 args->num_iovs);
1029 if (!res) {
1030 vrend_renderer_resource_destroy((struct vrend_resource *)blob.u.pipe_resource);
1031 return -ENOMEM;
1032 }
1033 }
1034
1035 res->map_info = blob.map_info;
1036 res->map_size = args->size;
1037
1038 return 0;
1039 }
1040
virgl_renderer_resource_map(uint32_t res_handle,void ** out_map,uint64_t * out_size)1041 int virgl_renderer_resource_map(uint32_t res_handle, void **out_map, uint64_t *out_size)
1042 {
1043 TRACE_FUNC();
1044 int ret = 0;
1045 void *map = NULL;
1046 uint64_t map_size = 0;
1047 struct virgl_resource *res = virgl_resource_lookup(res_handle);
1048 if (!res || res->mapped)
1049 return -EINVAL;
1050
1051 if (res->pipe_resource) {
1052 ret = vrend_renderer_resource_map(res->pipe_resource, &map, &map_size);
1053 if (!ret)
1054 res->map_size = map_size;
1055 } else {
1056 switch (res->fd_type) {
1057 case VIRGL_RESOURCE_FD_DMABUF:
1058 case VIRGL_RESOURCE_FD_SHM:
1059 map = mmap(NULL, res->map_size, PROT_WRITE | PROT_READ, MAP_SHARED, res->fd, 0);
1060 map_size = res->map_size;
1061 break;
1062 case VIRGL_RESOURCE_FD_OPAQUE:
1063 ret = vkr_allocator_resource_map(res, &map, &map_size);
1064 break;
1065 default:
1066 break;
1067 }
1068 }
1069
1070 if (!map || map == MAP_FAILED)
1071 return -EINVAL;
1072
1073 res->mapped = map;
1074 *out_map = map;
1075 *out_size = map_size;
1076 return ret;
1077 }
1078
virgl_renderer_resource_unmap(uint32_t res_handle)1079 int virgl_renderer_resource_unmap(uint32_t res_handle)
1080 {
1081 TRACE_FUNC();
1082 int ret;
1083 struct virgl_resource *res = virgl_resource_lookup(res_handle);
1084 if (!res || !res->mapped)
1085 return -EINVAL;
1086
1087 if (res->pipe_resource) {
1088 ret = vrend_renderer_resource_unmap(res->pipe_resource);
1089 } else {
1090 switch (res->fd_type) {
1091 case VIRGL_RESOURCE_FD_DMABUF:
1092 ret = munmap(res->mapped, res->map_size);
1093 break;
1094 case VIRGL_RESOURCE_FD_OPAQUE:
1095 ret = vkr_allocator_resource_unmap(res);
1096 break;
1097 default:
1098 ret = -EINVAL;
1099 break;
1100 }
1101 }
1102
1103 assert(!ret);
1104 res->mapped = NULL;
1105 return ret;
1106 }
1107
virgl_renderer_resource_get_map_info(uint32_t res_handle,uint32_t * map_info)1108 int virgl_renderer_resource_get_map_info(uint32_t res_handle, uint32_t *map_info)
1109 {
1110 TRACE_FUNC();
1111 struct virgl_resource *res = virgl_resource_lookup(res_handle);
1112 if (!res)
1113 return -EINVAL;
1114
1115 if ((res->map_info & VIRGL_RENDERER_MAP_CACHE_MASK) ==
1116 VIRGL_RENDERER_MAP_CACHE_NONE)
1117 return -EINVAL;
1118
1119 *map_info = res->map_info;
1120 return 0;
1121 }
1122
1123 int
virgl_renderer_resource_export_blob(uint32_t res_id,uint32_t * fd_type,int * fd)1124 virgl_renderer_resource_export_blob(uint32_t res_id, uint32_t *fd_type, int *fd)
1125 {
1126 TRACE_FUNC();
1127 struct virgl_resource *res = virgl_resource_lookup(res_id);
1128 if (!res)
1129 return EINVAL;
1130
1131 switch (virgl_resource_export_fd(res, fd)) {
1132 case VIRGL_RESOURCE_FD_DMABUF:
1133 *fd_type = VIRGL_RENDERER_BLOB_FD_TYPE_DMABUF;
1134 break;
1135 case VIRGL_RESOURCE_FD_OPAQUE:
1136 *fd_type = VIRGL_RENDERER_BLOB_FD_TYPE_OPAQUE;
1137 break;
1138 case VIRGL_RESOURCE_FD_SHM:
1139 *fd_type = VIRGL_RENDERER_BLOB_FD_TYPE_SHM;
1140 break;
1141 default:
1142 return EINVAL;
1143 }
1144
1145 return 0;
1146 }
1147
1148 int
virgl_renderer_resource_import_blob(const struct virgl_renderer_resource_import_blob_args * args)1149 virgl_renderer_resource_import_blob(const struct virgl_renderer_resource_import_blob_args *args)
1150 {
1151 TRACE_FUNC();
1152 struct virgl_resource *res;
1153
1154 /* user resource id must be greater than 0 */
1155 if (args->res_handle == 0)
1156 return -EINVAL;
1157
1158 /* user resource id must be unique */
1159 if (virgl_resource_lookup(args->res_handle))
1160 return -EINVAL;
1161
1162 switch (args->blob_mem) {
1163 case VIRGL_RENDERER_BLOB_MEM_HOST3D:
1164 case VIRGL_RENDERER_BLOB_MEM_GUEST_VRAM:
1165 break;
1166 default:
1167 return -EINVAL;
1168 }
1169
1170 enum virgl_resource_fd_type fd_type = VIRGL_RESOURCE_FD_INVALID;
1171 switch (args->fd_type) {
1172 case VIRGL_RENDERER_BLOB_FD_TYPE_DMABUF:
1173 fd_type = VIRGL_RESOURCE_FD_DMABUF;
1174 break;
1175 case VIRGL_RENDERER_BLOB_FD_TYPE_OPAQUE:
1176 fd_type = VIRGL_RESOURCE_FD_OPAQUE;
1177 break;
1178 case VIRGL_RENDERER_BLOB_FD_TYPE_SHM:
1179 fd_type = VIRGL_RESOURCE_FD_SHM;
1180 break;
1181 default:
1182 return -EINVAL;
1183 }
1184
1185 if (args->fd < 0)
1186 return -EINVAL;
1187 if (args->size == 0)
1188 return -EINVAL;
1189
1190 res = virgl_resource_create_from_fd(args->res_handle,
1191 fd_type,
1192 args->fd,
1193 NULL,
1194 0,
1195 NULL);
1196 if (!res)
1197 return -ENOMEM;
1198
1199 res->map_info = 0;
1200 res->map_size = args->size;
1201
1202 return 0;
1203 }
1204
1205 int
virgl_renderer_export_fence(uint32_t client_fence_id,int * fd)1206 virgl_renderer_export_fence(uint32_t client_fence_id, int *fd)
1207 {
1208 TRACE_FUNC();
1209 return vrend_renderer_export_ctx0_fence(client_fence_id, fd);
1210 }
1211