1 /*
2 * Copyright © 2011 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
16 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
17 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
18 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
19 * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
20 * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22 * DEALINGS IN THE SOFTWARE.
23 *
24 * Authors:
25 * Benjamin Franzke <[email protected]>
26 */
27
28 #include <stdio.h>
29 #include <stdlib.h>
30 #include <stddef.h>
31 #include <stdint.h>
32 #include <stdbool.h>
33 #include <string.h>
34 #include <errno.h>
35 #include <limits.h>
36 #include <assert.h>
37 #include <sys/types.h>
38 #include <unistd.h>
39 #include <dlfcn.h>
40 #include <xf86drm.h>
41 #include "drm-uapi/drm_fourcc.h"
42 #include <GL/gl.h> /* mesa_interface needs GL types */
43
44 #include "mesa_interface.h"
45 #include "gbm_driint.h"
46 #include "gbmint.h"
47 #include "loader_dri_helper.h"
48 #include "kopper_interface.h"
49 #include "loader.h"
50 #include "util/u_debug.h"
51 #include "util/macros.h"
52 #include "dri_util.h"
53 #include "pipe/p_screen.h"
54 #include "dri_screen.h"
55
56 #include "gbm_backend_abi.h"
57
58 /* For importing wl_buffer */
59 #if HAVE_WAYLAND_PLATFORM
60 #include "wayland-drm.h"
61 #endif
62
63 static const struct gbm_core *core;
64
65 static GLboolean
dri_validate_egl_image(void * image,void * data)66 dri_validate_egl_image(void *image, void *data)
67 {
68 struct gbm_dri_device *dri = data;
69
70 if (dri->validate_image == NULL)
71 return false;
72
73 return dri->validate_image(image, dri->lookup_user_data);
74 }
75
76 static __DRIimage *
dri_lookup_egl_image_validated(void * image,void * data)77 dri_lookup_egl_image_validated(void *image, void *data)
78 {
79 struct gbm_dri_device *dri = data;
80
81 if (dri->lookup_image_validated == NULL)
82 return NULL;
83
84 return dri->lookup_image_validated(image, dri->lookup_user_data);
85 }
86
87 static void
dri_flush_front_buffer(__DRIdrawable * driDrawable,void * data)88 dri_flush_front_buffer(__DRIdrawable * driDrawable, void *data)
89 {
90 struct gbm_dri_surface *surf = data;
91 struct gbm_dri_device *dri = gbm_dri_device(surf->base.gbm);
92
93 if (dri->flush_front_buffer != NULL)
94 dri->flush_front_buffer(driDrawable, surf->dri_private);
95 }
96
97 static unsigned
dri_get_capability(void * loaderPrivate,enum dri_loader_cap cap)98 dri_get_capability(void *loaderPrivate, enum dri_loader_cap cap)
99 {
100 /* Note: loaderPrivate is _EGLDisplay* */
101 switch (cap) {
102 case DRI_LOADER_CAP_FP16:
103 return 1;
104 case DRI_LOADER_CAP_RGBA_ORDERING:
105 return 1;
106 default:
107 return 0;
108 }
109 }
110
111 static int
image_get_buffers(__DRIdrawable * driDrawable,unsigned int format,uint32_t * stamp,void * loaderPrivate,uint32_t buffer_mask,struct __DRIimageList * buffers)112 image_get_buffers(__DRIdrawable *driDrawable,
113 unsigned int format,
114 uint32_t *stamp,
115 void *loaderPrivate,
116 uint32_t buffer_mask,
117 struct __DRIimageList *buffers)
118 {
119 struct gbm_dri_surface *surf = loaderPrivate;
120 struct gbm_dri_device *dri = gbm_dri_device(surf->base.gbm);
121
122 if (dri->image_get_buffers == NULL)
123 return 0;
124
125 return dri->image_get_buffers(driDrawable, format, stamp,
126 surf->dri_private, buffer_mask, buffers);
127 }
128
129 static void
swrast_get_drawable_info(__DRIdrawable * driDrawable,int * x,int * y,int * width,int * height,void * loaderPrivate)130 swrast_get_drawable_info(__DRIdrawable *driDrawable,
131 int *x,
132 int *y,
133 int *width,
134 int *height,
135 void *loaderPrivate)
136 {
137 struct gbm_dri_surface *surf = loaderPrivate;
138
139 *x = 0;
140 *y = 0;
141 *width = surf->base.v0.width;
142 *height = surf->base.v0.height;
143 }
144
145 static void
swrast_put_image2(__DRIdrawable * driDrawable,int op,int x,int y,int width,int height,int stride,char * data,void * loaderPrivate)146 swrast_put_image2(__DRIdrawable *driDrawable,
147 int op,
148 int x,
149 int y,
150 int width,
151 int height,
152 int stride,
153 char *data,
154 void *loaderPrivate)
155 {
156 struct gbm_dri_surface *surf = loaderPrivate;
157 struct gbm_dri_device *dri = gbm_dri_device(surf->base.gbm);
158
159 dri->swrast_put_image2(driDrawable,
160 op, x, y,
161 width, height, stride,
162 data, surf->dri_private);
163 }
164
165 static void
swrast_put_image(__DRIdrawable * driDrawable,int op,int x,int y,int width,int height,char * data,void * loaderPrivate)166 swrast_put_image(__DRIdrawable *driDrawable,
167 int op,
168 int x,
169 int y,
170 int width,
171 int height,
172 char *data,
173 void *loaderPrivate)
174 {
175 swrast_put_image2(driDrawable, op, x, y, width, height,
176 width * 4, data, loaderPrivate);
177 }
178
179 static void
swrast_get_image(__DRIdrawable * driDrawable,int x,int y,int width,int height,char * data,void * loaderPrivate)180 swrast_get_image(__DRIdrawable *driDrawable,
181 int x,
182 int y,
183 int width,
184 int height,
185 char *data,
186 void *loaderPrivate)
187 {
188 struct gbm_dri_surface *surf = loaderPrivate;
189 struct gbm_dri_device *dri = gbm_dri_device(surf->base.gbm);
190
191 dri->swrast_get_image(driDrawable,
192 x, y,
193 width, height,
194 data, surf->dri_private);
195 }
196
197 static const __DRIuseInvalidateExtension use_invalidate = {
198 .base = { __DRI_USE_INVALIDATE, 1 }
199 };
200
201 static const __DRIimageLookupExtension image_lookup_extension = {
202 .base = { __DRI_IMAGE_LOOKUP, 2 },
203
204 .validateEGLImage = dri_validate_egl_image,
205 .lookupEGLImageValidated = dri_lookup_egl_image_validated,
206 };
207
208 static const __DRIimageLoaderExtension image_loader_extension = {
209 .base = { __DRI_IMAGE_LOADER, 2 },
210
211 .getBuffers = image_get_buffers,
212 .flushFrontBuffer = dri_flush_front_buffer,
213 .getCapability = dri_get_capability,
214 };
215
216 static const __DRIswrastLoaderExtension swrast_loader_extension = {
217 .base = { __DRI_SWRAST_LOADER, 2 },
218
219 .getDrawableInfo = swrast_get_drawable_info,
220 .putImage = swrast_put_image,
221 .getImage = swrast_get_image,
222 .putImage2 = swrast_put_image2
223 };
224
225 static const __DRIkopperLoaderExtension kopper_loader_extension = {
226 .base = { __DRI_KOPPER_LOADER, 1 },
227
228 .SetSurfaceCreateInfo = NULL,
229 };
230
231 static const __DRIextension *gbm_dri_screen_extensions[] = {
232 &image_lookup_extension.base,
233 &use_invalidate.base,
234 &image_loader_extension.base,
235 &swrast_loader_extension.base,
236 &kopper_loader_extension.base,
237 NULL,
238 };
239
240 static int
dri_screen_create_for_driver(struct gbm_dri_device * dri,char * driver_name,bool driver_name_is_inferred)241 dri_screen_create_for_driver(struct gbm_dri_device *dri, char *driver_name, bool driver_name_is_inferred)
242 {
243 bool swrast = driver_name == NULL; /* If it's pure swrast, not just swkms. */
244 enum dri_screen_type type = DRI_SCREEN_SWRAST;
245 if (!swrast) {
246 if (!strcmp(driver_name, "zink"))
247 type = DRI_SCREEN_KOPPER;
248 else if (!strcmp(driver_name, "kms_swrast"))
249 type = DRI_SCREEN_KMS_SWRAST;
250 else
251 type = DRI_SCREEN_DRI3;
252 }
253
254 dri->driver_name = swrast ? strdup("swrast") : driver_name;
255
256 dri->swrast = swrast;
257
258 dri->loader_extensions = gbm_dri_screen_extensions;
259 dri->screen = driCreateNewScreen3(0, swrast ? -1 : dri->base.v0.fd,
260 dri->loader_extensions,
261 type,
262 &dri->driver_configs, driver_name_is_inferred, true, dri);
263 if (dri->screen == NULL)
264 goto fail;
265
266 dri->lookup_user_data = NULL;
267
268 return 0;
269
270 fail:
271 free(dri->driver_name);
272 return -1;
273 }
274
275 static int
dri_screen_create(struct gbm_dri_device * dri,bool driver_name_is_inferred)276 dri_screen_create(struct gbm_dri_device *dri, bool driver_name_is_inferred)
277 {
278 char *driver_name;
279
280 driver_name = loader_get_driver_for_fd(dri->base.v0.fd);
281 if (!driver_name)
282 return -1;
283
284 return dri_screen_create_for_driver(dri, driver_name, driver_name_is_inferred);
285 }
286
287 static int
dri_screen_create_sw(struct gbm_dri_device * dri,bool driver_name_is_inferred)288 dri_screen_create_sw(struct gbm_dri_device *dri, bool driver_name_is_inferred)
289 {
290 char *driver_name;
291 int ret;
292
293 driver_name = strdup("kms_swrast");
294 if (!driver_name)
295 return -errno;
296
297 ret = dri_screen_create_for_driver(dri, driver_name, driver_name_is_inferred);
298 if (ret != 0)
299 ret = dri_screen_create_for_driver(dri, NULL, driver_name_is_inferred);
300 if (ret != 0)
301 return ret;
302
303 dri->software = true;
304 return 0;
305 }
306
307 static const struct gbm_dri_visual gbm_dri_visuals_table[] = {
308 { GBM_FORMAT_R8, __DRI_IMAGE_FORMAT_R8 },
309 { GBM_FORMAT_R16, __DRI_IMAGE_FORMAT_R16 },
310 { GBM_FORMAT_GR88, __DRI_IMAGE_FORMAT_GR88 },
311 { GBM_FORMAT_GR1616, __DRI_IMAGE_FORMAT_GR1616 },
312 { GBM_FORMAT_ARGB1555, __DRI_IMAGE_FORMAT_ARGB1555 },
313 { GBM_FORMAT_RGB565, __DRI_IMAGE_FORMAT_RGB565 },
314 { GBM_FORMAT_XRGB8888, __DRI_IMAGE_FORMAT_XRGB8888 },
315 { GBM_FORMAT_ARGB8888, __DRI_IMAGE_FORMAT_ARGB8888 },
316 { GBM_FORMAT_XBGR8888, __DRI_IMAGE_FORMAT_XBGR8888 },
317 { GBM_FORMAT_ABGR8888, __DRI_IMAGE_FORMAT_ABGR8888 },
318 { GBM_FORMAT_XRGB2101010, __DRI_IMAGE_FORMAT_XRGB2101010 },
319 { GBM_FORMAT_ARGB2101010, __DRI_IMAGE_FORMAT_ARGB2101010 },
320 { GBM_FORMAT_XBGR2101010, __DRI_IMAGE_FORMAT_XBGR2101010 },
321 { GBM_FORMAT_ABGR2101010, __DRI_IMAGE_FORMAT_ABGR2101010 },
322 { GBM_FORMAT_XBGR16161616, __DRI_IMAGE_FORMAT_XBGR16161616 },
323 { GBM_FORMAT_ABGR16161616, __DRI_IMAGE_FORMAT_ABGR16161616 },
324 { GBM_FORMAT_XBGR16161616F, __DRI_IMAGE_FORMAT_XBGR16161616F },
325 { GBM_FORMAT_ABGR16161616F, __DRI_IMAGE_FORMAT_ABGR16161616F },
326 };
327
328 static int
gbm_format_to_dri_format(uint32_t gbm_format)329 gbm_format_to_dri_format(uint32_t gbm_format)
330 {
331 gbm_format = core->v0.format_canonicalize(gbm_format);
332 for (size_t i = 0; i < ARRAY_SIZE(gbm_dri_visuals_table); i++) {
333 if (gbm_dri_visuals_table[i].gbm_format == gbm_format)
334 return gbm_dri_visuals_table[i].dri_image_format;
335 }
336
337 return 0;
338 }
339
340 static int
gbm_dri_is_format_supported(struct gbm_device * gbm,uint32_t format,uint32_t usage)341 gbm_dri_is_format_supported(struct gbm_device *gbm,
342 uint32_t format,
343 uint32_t usage)
344 {
345 struct gbm_dri_device *dri = gbm_dri_device(gbm);
346 int count;
347
348 if ((usage & GBM_BO_USE_CURSOR) && (usage & GBM_BO_USE_RENDERING))
349 return 0;
350
351 format = core->v0.format_canonicalize(format);
352 if (gbm_format_to_dri_format(format) == 0)
353 return 0;
354
355 /* If there is no query, fall back to the small table which was originally
356 * here. */
357 if (!dri->has_dmabuf_import) {
358 switch (format) {
359 case GBM_FORMAT_XRGB8888:
360 case GBM_FORMAT_ARGB8888:
361 case GBM_FORMAT_XBGR8888:
362 return 1;
363 default:
364 return 0;
365 }
366 }
367
368 /* This returns false if the format isn't supported */
369 if (!dri_query_dma_buf_modifiers(dri->screen, format, 0, NULL, NULL,
370 &count))
371 return 0;
372
373 return 1;
374 }
375
376 static int
gbm_dri_get_format_modifier_plane_count(struct gbm_device * gbm,uint32_t format,uint64_t modifier)377 gbm_dri_get_format_modifier_plane_count(struct gbm_device *gbm,
378 uint32_t format,
379 uint64_t modifier)
380 {
381 struct gbm_dri_device *dri = gbm_dri_device(gbm);
382 uint64_t plane_count;
383
384 if (!dri->has_dmabuf_import)
385 return -1;
386
387 format = core->v0.format_canonicalize(format);
388 if (gbm_format_to_dri_format(format) == 0)
389 return -1;
390
391 if (!dri2_query_dma_buf_format_modifier_attribs(dri->screen, format, modifier,
392 __DRI_IMAGE_FORMAT_MODIFIER_ATTRIB_PLANE_COUNT, &plane_count))
393 return -1;
394
395 return plane_count;
396 }
397
398 static int
gbm_dri_bo_write(struct gbm_bo * _bo,const void * buf,size_t count)399 gbm_dri_bo_write(struct gbm_bo *_bo, const void *buf, size_t count)
400 {
401 struct gbm_dri_bo *bo = gbm_dri_bo(_bo);
402
403 if (bo->image != NULL) {
404 errno = EINVAL;
405 return -1;
406 }
407
408 memcpy(bo->map, buf, count);
409
410 return 0;
411 }
412
413 static int
gbm_dri_bo_get_fd(struct gbm_bo * _bo)414 gbm_dri_bo_get_fd(struct gbm_bo *_bo)
415 {
416 struct gbm_dri_bo *bo = gbm_dri_bo(_bo);
417 int fd;
418
419 if (bo->image == NULL)
420 return -1;
421
422 if (!dri2_query_image(bo->image, __DRI_IMAGE_ATTRIB_FD, &fd))
423 return -1;
424
425 return fd;
426 }
427
428 static int
get_number_planes(struct gbm_dri_device * dri,__DRIimage * image)429 get_number_planes(struct gbm_dri_device *dri, __DRIimage *image)
430 {
431 int num_planes = 0;
432
433 /* Dumb buffers are single-plane only. */
434 if (!image)
435 return 1;
436
437 dri2_query_image(image, __DRI_IMAGE_ATTRIB_NUM_PLANES, &num_planes);
438
439 if (num_planes <= 0)
440 num_planes = 1;
441
442 return num_planes;
443 }
444
445 static int
gbm_dri_bo_get_planes(struct gbm_bo * _bo)446 gbm_dri_bo_get_planes(struct gbm_bo *_bo)
447 {
448 struct gbm_dri_device *dri = gbm_dri_device(_bo->gbm);
449 struct gbm_dri_bo *bo = gbm_dri_bo(_bo);
450
451 return get_number_planes(dri, bo->image);
452 }
453
454 static union gbm_bo_handle
gbm_dri_bo_get_handle_for_plane(struct gbm_bo * _bo,int plane)455 gbm_dri_bo_get_handle_for_plane(struct gbm_bo *_bo, int plane)
456 {
457 struct gbm_dri_device *dri = gbm_dri_device(_bo->gbm);
458 struct gbm_dri_bo *bo = gbm_dri_bo(_bo);
459 union gbm_bo_handle ret;
460 ret.s32 = -1;
461
462 if (plane >= get_number_planes(dri, bo->image)) {
463 errno = EINVAL;
464 return ret;
465 }
466
467 /* dumb BOs can only utilize non-planar formats */
468 if (!bo->image) {
469 assert(plane == 0);
470 ret.s32 = bo->handle;
471 return ret;
472 }
473
474 __DRIimage *image = dri2_from_planar(bo->image, plane, NULL);
475 if (image) {
476 dri2_query_image(image, __DRI_IMAGE_ATTRIB_HANDLE, &ret.s32);
477 dri2_destroy_image(image);
478 } else {
479 assert(plane == 0);
480 dri2_query_image(bo->image, __DRI_IMAGE_ATTRIB_HANDLE, &ret.s32);
481 }
482
483 return ret;
484 }
485
486 static int
gbm_dri_bo_get_plane_fd(struct gbm_bo * _bo,int plane)487 gbm_dri_bo_get_plane_fd(struct gbm_bo *_bo, int plane)
488 {
489 struct gbm_dri_device *dri = gbm_dri_device(_bo->gbm);
490 struct gbm_dri_bo *bo = gbm_dri_bo(_bo);
491 int fd = -1;
492
493 if (!dri->has_dmabuf_import) {
494 /* Preserve legacy behavior if plane is 0 */
495 if (plane == 0)
496 return gbm_dri_bo_get_fd(_bo);
497
498 errno = ENOSYS;
499 return -1;
500 }
501
502 /* dumb BOs can only utilize non-planar formats */
503 if (!bo->image) {
504 errno = EINVAL;
505 return -1;
506 }
507
508 if (plane >= get_number_planes(dri, bo->image)) {
509 errno = EINVAL;
510 return -1;
511 }
512
513 __DRIimage *image = dri2_from_planar(bo->image, plane, NULL);
514 if (image) {
515 dri2_query_image(image, __DRI_IMAGE_ATTRIB_FD, &fd);
516 dri2_destroy_image(image);
517 } else {
518 assert(plane == 0);
519 dri2_query_image(bo->image, __DRI_IMAGE_ATTRIB_FD, &fd);
520 }
521
522 return fd;
523 }
524
525 static uint32_t
gbm_dri_bo_get_stride(struct gbm_bo * _bo,int plane)526 gbm_dri_bo_get_stride(struct gbm_bo *_bo, int plane)
527 {
528 struct gbm_dri_device *dri = gbm_dri_device(_bo->gbm);
529 struct gbm_dri_bo *bo = gbm_dri_bo(_bo);
530 __DRIimage *image;
531 int stride = 0;
532
533 if (!dri->has_dmabuf_import) {
534 /* Preserve legacy behavior if plane is 0 */
535 if (plane == 0)
536 return _bo->v0.stride;
537
538 errno = ENOSYS;
539 return 0;
540 }
541
542 if (plane >= get_number_planes(dri, bo->image)) {
543 errno = EINVAL;
544 return 0;
545 }
546
547 if (bo->image == NULL) {
548 assert(plane == 0);
549 return _bo->v0.stride;
550 }
551
552 image = dri2_from_planar(bo->image, plane, NULL);
553 if (image) {
554 dri2_query_image(image, __DRI_IMAGE_ATTRIB_STRIDE, &stride);
555 dri2_destroy_image(image);
556 } else {
557 assert(plane == 0);
558 dri2_query_image(bo->image, __DRI_IMAGE_ATTRIB_STRIDE, &stride);
559 }
560
561 return (uint32_t)stride;
562 }
563
564 static uint32_t
gbm_dri_bo_get_offset(struct gbm_bo * _bo,int plane)565 gbm_dri_bo_get_offset(struct gbm_bo *_bo, int plane)
566 {
567 struct gbm_dri_device *dri = gbm_dri_device(_bo->gbm);
568 struct gbm_dri_bo *bo = gbm_dri_bo(_bo);
569 int offset = 0;
570
571 if (plane >= get_number_planes(dri, bo->image))
572 return 0;
573
574 /* Dumb images have no offset */
575 if (bo->image == NULL) {
576 assert(plane == 0);
577 return 0;
578 }
579
580 __DRIimage *image = dri2_from_planar(bo->image, plane, NULL);
581 if (image) {
582 dri2_query_image(image, __DRI_IMAGE_ATTRIB_OFFSET, &offset);
583 dri2_destroy_image(image);
584 } else {
585 assert(plane == 0);
586 dri2_query_image(bo->image, __DRI_IMAGE_ATTRIB_OFFSET, &offset);
587 }
588
589 return (uint32_t)offset;
590 }
591
592 static uint64_t
gbm_dri_bo_get_modifier(struct gbm_bo * _bo)593 gbm_dri_bo_get_modifier(struct gbm_bo *_bo)
594 {
595 struct gbm_dri_bo *bo = gbm_dri_bo(_bo);
596
597 /* Dumb buffers have no modifiers */
598 if (!bo->image)
599 return DRM_FORMAT_MOD_LINEAR;
600
601 uint64_t ret = 0;
602 int mod;
603 if (!dri2_query_image(bo->image, __DRI_IMAGE_ATTRIB_MODIFIER_UPPER,
604 &mod))
605 return DRM_FORMAT_MOD_INVALID;
606
607 ret = (uint64_t)mod << 32;
608
609 if (!dri2_query_image(bo->image, __DRI_IMAGE_ATTRIB_MODIFIER_LOWER,
610 &mod))
611 return DRM_FORMAT_MOD_INVALID;
612
613 ret |= (uint64_t)(mod & 0xffffffff);
614
615 return ret;
616 }
617
618 static void
gbm_dri_bo_destroy(struct gbm_bo * _bo)619 gbm_dri_bo_destroy(struct gbm_bo *_bo)
620 {
621 struct gbm_dri_device *dri = gbm_dri_device(_bo->gbm);
622 struct gbm_dri_bo *bo = gbm_dri_bo(_bo);
623 struct drm_mode_destroy_dumb arg;
624
625 if (bo->image != NULL) {
626 dri2_destroy_image(bo->image);
627 } else {
628 gbm_dri_bo_unmap_dumb(bo);
629 memset(&arg, 0, sizeof(arg));
630 arg.handle = bo->handle;
631 drmIoctl(dri->base.v0.fd, DRM_IOCTL_MODE_DESTROY_DUMB, &arg);
632 }
633
634 free(bo);
635 }
636
637 static struct gbm_bo *
gbm_dri_bo_import(struct gbm_device * gbm,uint32_t type,void * buffer,uint32_t usage)638 gbm_dri_bo_import(struct gbm_device *gbm,
639 uint32_t type, void *buffer, uint32_t usage)
640 {
641 struct gbm_dri_device *dri = gbm_dri_device(gbm);
642 struct gbm_dri_bo *bo;
643 __DRIimage *image;
644 unsigned dri_use = 0;
645 int gbm_format;
646
647 if (!dri->has_dmabuf_import) {
648 errno = ENOSYS;
649 return NULL;
650 }
651
652 switch (type) {
653 #if HAVE_WAYLAND_PLATFORM
654 case GBM_BO_IMPORT_WL_BUFFER:
655 {
656 struct wl_drm_buffer *wb;
657
658 if (!dri->wl_drm) {
659 errno = EINVAL;
660 return NULL;
661 }
662
663 wb = wayland_drm_buffer_get(dri->wl_drm, (struct wl_resource *) buffer);
664 if (!wb) {
665 errno = EINVAL;
666 return NULL;
667 }
668
669 image = dri2_dup_image(wb->driver_buffer, NULL);
670
671 /* GBM_FORMAT_* is identical to WL_DRM_FORMAT_*, so no conversion
672 * required. */
673 gbm_format = wb->format;
674 break;
675 }
676 #endif
677
678 case GBM_BO_IMPORT_EGL_IMAGE:
679 {
680 if (dri->lookup_image_validated == NULL) {
681 errno = EINVAL;
682 return NULL;
683 }
684
685 if (!dri->validate_image(buffer, dri->lookup_user_data)) {
686 errno = EINVAL;
687 return NULL;
688 }
689 image = dri->lookup_image_validated(buffer, dri->lookup_user_data);
690 image = dri2_dup_image(image, NULL);
691 dri2_query_image(image, __DRI_IMAGE_ATTRIB_FOURCC, &gbm_format);
692 if (gbm_format == DRM_FORMAT_INVALID) {
693 errno = EINVAL;
694 dri2_destroy_image(image);
695 return NULL;
696 }
697 break;
698 }
699
700 case GBM_BO_IMPORT_FD:
701 {
702 struct gbm_import_fd_data *fd_data = buffer;
703 int stride = fd_data->stride, offset = 0;
704 int fourcc;
705
706 /* GBM's GBM_FORMAT_* tokens are a strict superset of the DRI FourCC
707 * tokens accepted by createImageFromDmaBufs, except for not supporting
708 * the sARGB format. */
709 fourcc = core->v0.format_canonicalize(fd_data->format);
710
711 image = dri2_from_dma_bufs(dri->screen,
712 fd_data->width,
713 fd_data->height,
714 fourcc,
715 DRM_FORMAT_MOD_INVALID,
716 &fd_data->fd, 1,
717 &stride, &offset,
718 0, 0, 0, 0, 0,
719 NULL, NULL);
720 if (image == NULL) {
721 errno = EINVAL;
722 return NULL;
723 }
724 gbm_format = fd_data->format;
725 break;
726 }
727
728 case GBM_BO_IMPORT_FD_MODIFIER:
729 {
730 struct gbm_import_fd_modifier_data *fd_data = buffer;
731 unsigned int error;
732 int fourcc;
733
734 /* GBM's GBM_FORMAT_* tokens are a strict superset of the DRI FourCC
735 * tokens accepted by createImageFromDmaBufs, except for not supporting
736 * the sARGB format. */
737 fourcc = core->v0.format_canonicalize(fd_data->format);
738
739 image = dri2_from_dma_bufs(dri->screen, fd_data->width,
740 fd_data->height, fourcc,
741 fd_data->modifier,
742 fd_data->fds,
743 fd_data->num_fds,
744 fd_data->strides,
745 fd_data->offsets,
746 0, 0, 0, 0,
747 0, &error, NULL);
748 if (image == NULL) {
749 errno = ENOSYS;
750 return NULL;
751 }
752
753 gbm_format = fourcc;
754 break;
755 }
756
757 default:
758 errno = ENOSYS;
759 return NULL;
760 }
761
762
763 bo = calloc(1, sizeof *bo);
764 if (bo == NULL) {
765 dri2_destroy_image(image);
766 return NULL;
767 }
768
769 bo->image = image;
770
771 if (usage & GBM_BO_USE_SCANOUT)
772 dri_use |= __DRI_IMAGE_USE_SCANOUT;
773 if (usage & GBM_BO_USE_CURSOR)
774 dri_use |= __DRI_IMAGE_USE_CURSOR;
775 if (!dri2_validate_usage(bo->image, dri_use)) {
776 errno = EINVAL;
777 dri2_destroy_image(bo->image);
778 free(bo);
779 return NULL;
780 }
781
782 bo->base.gbm = gbm;
783 bo->base.v0.format = gbm_format;
784
785 dri2_query_image(bo->image, __DRI_IMAGE_ATTRIB_WIDTH,
786 (int*)&bo->base.v0.width);
787 dri2_query_image(bo->image, __DRI_IMAGE_ATTRIB_HEIGHT,
788 (int*)&bo->base.v0.height);
789 dri2_query_image(bo->image, __DRI_IMAGE_ATTRIB_STRIDE,
790 (int*)&bo->base.v0.stride);
791 dri2_query_image(bo->image, __DRI_IMAGE_ATTRIB_HANDLE,
792 &bo->base.v0.handle.s32);
793
794 return &bo->base;
795 }
796
797 static struct gbm_bo *
create_dumb(struct gbm_device * gbm,uint32_t width,uint32_t height,uint32_t format,uint32_t usage)798 create_dumb(struct gbm_device *gbm,
799 uint32_t width, uint32_t height,
800 uint32_t format, uint32_t usage)
801 {
802 struct gbm_dri_device *dri = gbm_dri_device(gbm);
803 struct drm_mode_create_dumb create_arg;
804 struct gbm_dri_bo *bo;
805 struct drm_mode_destroy_dumb destroy_arg;
806 int ret;
807 int is_cursor, is_scanout;
808
809 is_cursor = (usage & GBM_BO_USE_CURSOR) != 0 &&
810 format == GBM_FORMAT_ARGB8888;
811 is_scanout = (usage & GBM_BO_USE_SCANOUT) != 0 &&
812 (format == GBM_FORMAT_XRGB8888 || format == GBM_FORMAT_XBGR8888);
813 if (!is_cursor && !is_scanout) {
814 errno = EINVAL;
815 return NULL;
816 }
817
818 bo = calloc(1, sizeof *bo);
819 if (bo == NULL)
820 return NULL;
821
822 memset(&create_arg, 0, sizeof(create_arg));
823 create_arg.bpp = 32;
824 create_arg.width = width;
825 create_arg.height = height;
826
827 ret = drmIoctl(dri->base.v0.fd, DRM_IOCTL_MODE_CREATE_DUMB, &create_arg);
828 if (ret)
829 goto free_bo;
830
831 bo->base.gbm = gbm;
832 bo->base.v0.width = width;
833 bo->base.v0.height = height;
834 bo->base.v0.stride = create_arg.pitch;
835 bo->base.v0.format = format;
836 bo->base.v0.handle.u32 = create_arg.handle;
837 bo->handle = create_arg.handle;
838 bo->size = create_arg.size;
839
840 if (gbm_dri_bo_map_dumb(bo) == NULL)
841 goto destroy_dumb;
842
843 return &bo->base;
844
845 destroy_dumb:
846 memset(&destroy_arg, 0, sizeof destroy_arg);
847 destroy_arg.handle = create_arg.handle;
848 drmIoctl(dri->base.v0.fd, DRM_IOCTL_MODE_DESTROY_DUMB, &destroy_arg);
849 free_bo:
850 free(bo);
851
852 return NULL;
853 }
854
855 static struct gbm_bo *
gbm_dri_bo_create(struct gbm_device * gbm,uint32_t width,uint32_t height,uint32_t format,uint32_t usage,const uint64_t * modifiers,const unsigned int count)856 gbm_dri_bo_create(struct gbm_device *gbm,
857 uint32_t width, uint32_t height,
858 uint32_t format, uint32_t usage,
859 const uint64_t *modifiers,
860 const unsigned int count)
861 {
862 struct gbm_dri_device *dri = gbm_dri_device(gbm);
863 struct gbm_dri_bo *bo;
864 int dri_format;
865 unsigned dri_use = 0;
866 uint64_t *mods_comp = NULL;
867 uint64_t *mods_filtered = NULL;
868 unsigned int count_filtered = 0;
869
870 format = core->v0.format_canonicalize(format);
871
872 if (usage & GBM_BO_USE_WRITE || !dri->has_dmabuf_export)
873 return create_dumb(gbm, width, height, format, usage);
874
875 bo = calloc(1, sizeof *bo);
876 if (bo == NULL)
877 return NULL;
878
879 bo->base.gbm = gbm;
880 bo->base.v0.width = width;
881 bo->base.v0.height = height;
882 bo->base.v0.format = format;
883
884 dri_format = gbm_format_to_dri_format(format);
885 if (dri_format == 0) {
886 errno = EINVAL;
887 goto failed;
888 }
889
890 if (usage & GBM_BO_USE_SCANOUT)
891 dri_use |= __DRI_IMAGE_USE_SCANOUT;
892 if (usage & GBM_BO_USE_CURSOR)
893 dri_use |= __DRI_IMAGE_USE_CURSOR;
894 if (usage & GBM_BO_USE_LINEAR)
895 dri_use |= __DRI_IMAGE_USE_LINEAR;
896 if (usage & GBM_BO_USE_PROTECTED)
897 dri_use |= __DRI_IMAGE_USE_PROTECTED;
898 if (usage & GBM_BO_USE_FRONT_RENDERING)
899 dri_use |= __DRI_IMAGE_USE_FRONT_RENDERING;
900
901 /* Gallium drivers requires shared in order to get the handle/stride */
902 dri_use |= __DRI_IMAGE_USE_SHARE;
903
904 /* If the driver supports fixed-rate compression, filter the acceptable
905 * modifiers by the compression rate. */
906 if (modifiers && dri->has_compression_modifiers) {
907 enum __DRIFixedRateCompression comp = __DRI_FIXED_RATE_COMPRESSION_NONE;
908
909 switch (usage & GBM_BO_FIXED_COMPRESSION_MASK) {
910 #define CASE(x) case GBM_BO_FIXED_COMPRESSION_ ## x: comp = __DRI_FIXED_RATE_COMPRESSION_ ## x; break;
911 CASE(DEFAULT);
912 CASE(1BPC);
913 CASE(2BPC);
914 CASE(3BPC);
915 CASE(4BPC);
916 CASE(5BPC);
917 CASE(6BPC);
918 CASE(7BPC);
919 CASE(8BPC);
920 CASE(9BPC);
921 CASE(10BPC);
922 CASE(11BPC);
923 CASE(12BPC);
924 #undef CASE
925 default:
926 break;
927 }
928
929 int count_comp = 0;
930
931 /* Find how many acceptable modifiers there are for our rate. If there
932 * are none, fall back to no compression, as it is not mandatory to use
933 * the specified compression rate. */
934 if (!dri2_query_compression_modifiers(dri->screen, format, comp,
935 0, NULL, &count_comp) ||
936 count_comp == 0) {
937 if (comp == __DRI_FIXED_RATE_COMPRESSION_NONE) {
938 errno = EINVAL;
939 goto failed;
940 }
941
942 comp = __DRI_FIXED_RATE_COMPRESSION_NONE;
943 if (!dri2_query_compression_modifiers(dri->screen, format, comp,
944 0, NULL, &count_comp)) {
945 errno = EINVAL;
946 goto failed;
947 }
948 }
949
950 if (count_comp == 0) {
951 errno = EINVAL;
952 goto failed;
953 }
954
955 mods_comp = malloc(count_comp * sizeof(uint64_t));
956 mods_filtered = malloc(count_comp * sizeof(uint64_t));
957 if (!mods_comp || !mods_filtered) {
958 errno = ENOMEM;
959 goto failed;
960 }
961
962 if (!dri2_query_compression_modifiers(dri->screen, format, comp,
963 count_comp, mods_comp,
964 &count_comp)) {
965 errno = ENOMEM;
966 goto failed;
967 }
968
969
970 /* Intersect the list of user-supplied acceptable modifiers with the set
971 * of modifiers acceptable for this compression rate. */
972 for (unsigned int i = 0; i < count_comp; i++) {
973 for (unsigned int j = 0; j < count; j++) {
974 if (mods_comp[i] == modifiers[j]) {
975 mods_filtered[count_filtered++] = mods_comp[i];
976 break;
977 }
978 }
979 }
980
981 free(mods_comp);
982 mods_comp = NULL;
983 }
984
985 bo->image = dri_create_image_with_modifiers(dri->screen, width, height,
986 dri_format, dri_use,
987 mods_filtered ? mods_filtered : modifiers,
988 mods_filtered ? count_filtered : count,
989 bo);
990 if (bo->image == NULL)
991 goto failed;
992
993 free(mods_filtered);
994 mods_filtered = NULL;
995
996 dri2_query_image(bo->image, __DRI_IMAGE_ATTRIB_HANDLE,
997 &bo->base.v0.handle.s32);
998 dri2_query_image(bo->image, __DRI_IMAGE_ATTRIB_STRIDE,
999 (int *) &bo->base.v0.stride);
1000
1001 return &bo->base;
1002
1003 failed:
1004 free(mods_comp);
1005 free(mods_filtered);
1006 free(bo);
1007 return NULL;
1008 }
1009
1010 static void *
gbm_dri_bo_map(struct gbm_bo * _bo,uint32_t x,uint32_t y,uint32_t width,uint32_t height,uint32_t flags,uint32_t * stride,void ** map_data)1011 gbm_dri_bo_map(struct gbm_bo *_bo,
1012 uint32_t x, uint32_t y,
1013 uint32_t width, uint32_t height,
1014 uint32_t flags, uint32_t *stride, void **map_data)
1015 {
1016 struct gbm_dri_device *dri = gbm_dri_device(_bo->gbm);
1017 struct gbm_dri_bo *bo = gbm_dri_bo(_bo);
1018
1019 /* If it's a dumb buffer, we already have a mapping */
1020 if (bo->map) {
1021 *map_data = (char *)bo->map + (bo->base.v0.stride * y) + (x * 4);
1022 *stride = bo->base.v0.stride;
1023 return *map_data;
1024 }
1025
1026 mtx_lock(&dri->mutex);
1027 if (!dri->context) {
1028 unsigned error;
1029
1030 dri->context = driCreateContextAttribs(dri->screen,
1031 __DRI_API_OPENGL,
1032 NULL, NULL, 0, NULL,
1033 &error, NULL);
1034 }
1035 assert(dri->context);
1036 mtx_unlock(&dri->mutex);
1037
1038 /* GBM flags and DRI flags are the same, so just pass them on */
1039 return dri2_map_image(dri->context, bo->image, x, y,
1040 width, height, flags, (int *)stride,
1041 map_data);
1042 }
1043
1044 static void
gbm_dri_bo_unmap(struct gbm_bo * _bo,void * map_data)1045 gbm_dri_bo_unmap(struct gbm_bo *_bo, void *map_data)
1046 {
1047 struct gbm_dri_device *dri = gbm_dri_device(_bo->gbm);
1048 struct gbm_dri_bo *bo = gbm_dri_bo(_bo);
1049
1050 /* Check if it's a dumb buffer and check the pointer is in range */
1051 if (bo->map) {
1052 assert(map_data >= bo->map);
1053 assert(map_data < (bo->map + bo->size));
1054 return;
1055 }
1056
1057 if (!dri->context)
1058 return;
1059
1060 dri2_unmap_image(dri->context, bo->image, map_data);
1061
1062 /*
1063 * Not all DRI drivers use direct maps. They may queue up DMA operations
1064 * on the mapping context. Since there is no explicit gbm flush
1065 * mechanism, we need to flush here.
1066 */
1067 dri_flush(dri->context, NULL, __DRI2_FLUSH_CONTEXT, 0);
1068 }
1069
1070
1071 static struct gbm_surface *
gbm_dri_surface_create(struct gbm_device * gbm,uint32_t width,uint32_t height,uint32_t format,uint32_t flags,const uint64_t * modifiers,const unsigned count)1072 gbm_dri_surface_create(struct gbm_device *gbm,
1073 uint32_t width, uint32_t height,
1074 uint32_t format, uint32_t flags,
1075 const uint64_t *modifiers, const unsigned count)
1076 {
1077 struct gbm_dri_surface *surf;
1078
1079 if (count)
1080 assert(modifiers);
1081
1082 /* It's acceptable to create an image with INVALID modifier in the list,
1083 * but it cannot be on the only modifier (since it will certainly fail
1084 * later). While we could easily catch this after modifier creation, doing
1085 * the check here is a convenient debug check likely pointing at whatever
1086 * interface the client is using to build its modifier list.
1087 */
1088 if (count == 1 && modifiers[0] == DRM_FORMAT_MOD_INVALID) {
1089 fprintf(stderr, "Only invalid modifier specified\n");
1090 errno = EINVAL;
1091 }
1092
1093 surf = calloc(1, sizeof *surf);
1094 if (surf == NULL) {
1095 errno = ENOMEM;
1096 return NULL;
1097 }
1098
1099 surf->base.gbm = gbm;
1100 surf->base.v0.width = width;
1101 surf->base.v0.height = height;
1102 surf->base.v0.format = core->v0.format_canonicalize(format);
1103 surf->base.v0.flags = flags;
1104 if (!modifiers) {
1105 assert(!count);
1106 return &surf->base;
1107 }
1108
1109 surf->base.v0.modifiers = calloc(count, sizeof(*modifiers));
1110 if (count && !surf->base.v0.modifiers) {
1111 errno = ENOMEM;
1112 free(surf);
1113 return NULL;
1114 }
1115
1116 /* TODO: We are deferring validation of modifiers until the image is actually
1117 * created. This deferred creation can fail due to a modifier-format
1118 * mismatch. The result is the client has a surface but no object to back it.
1119 */
1120 surf->base.v0.count = count;
1121 memcpy(surf->base.v0.modifiers, modifiers, count * sizeof(*modifiers));
1122
1123 return &surf->base;
1124 }
1125
1126 static void
gbm_dri_surface_destroy(struct gbm_surface * _surf)1127 gbm_dri_surface_destroy(struct gbm_surface *_surf)
1128 {
1129 struct gbm_dri_surface *surf = gbm_dri_surface(_surf);
1130
1131 free(surf->base.v0.modifiers);
1132 free(surf);
1133 }
1134
1135 static void
dri_destroy(struct gbm_device * gbm)1136 dri_destroy(struct gbm_device *gbm)
1137 {
1138 struct gbm_dri_device *dri = gbm_dri_device(gbm);
1139 unsigned i;
1140
1141 if (dri->context)
1142 driDestroyContext(dri->context);
1143
1144 driDestroyScreen(dri->screen);
1145 for (i = 0; dri->driver_configs[i]; i++)
1146 free((__DRIconfig *) dri->driver_configs[i]);
1147 free(dri->driver_configs);
1148 free(dri->driver_name);
1149
1150 free(dri);
1151 }
1152
1153 static struct gbm_device *
dri_device_create(int fd,uint32_t gbm_backend_version)1154 dri_device_create(int fd, uint32_t gbm_backend_version)
1155 {
1156 struct gbm_dri_device *dri;
1157 int ret;
1158 bool force_sw;
1159
1160 dri = calloc(1, sizeof *dri);
1161 if (!dri)
1162 return NULL;
1163
1164 dri->base.v0.fd = fd;
1165 dri->base.v0.backend_version = gbm_backend_version;
1166 dri->base.v0.bo_create = gbm_dri_bo_create;
1167 dri->base.v0.bo_import = gbm_dri_bo_import;
1168 dri->base.v0.bo_map = gbm_dri_bo_map;
1169 dri->base.v0.bo_unmap = gbm_dri_bo_unmap;
1170 dri->base.v0.is_format_supported = gbm_dri_is_format_supported;
1171 dri->base.v0.get_format_modifier_plane_count =
1172 gbm_dri_get_format_modifier_plane_count;
1173 dri->base.v0.bo_write = gbm_dri_bo_write;
1174 dri->base.v0.bo_get_fd = gbm_dri_bo_get_fd;
1175 dri->base.v0.bo_get_planes = gbm_dri_bo_get_planes;
1176 dri->base.v0.bo_get_handle = gbm_dri_bo_get_handle_for_plane;
1177 dri->base.v0.bo_get_plane_fd = gbm_dri_bo_get_plane_fd;
1178 dri->base.v0.bo_get_stride = gbm_dri_bo_get_stride;
1179 dri->base.v0.bo_get_offset = gbm_dri_bo_get_offset;
1180 dri->base.v0.bo_get_modifier = gbm_dri_bo_get_modifier;
1181 dri->base.v0.bo_destroy = gbm_dri_bo_destroy;
1182 dri->base.v0.destroy = dri_destroy;
1183 dri->base.v0.surface_create = gbm_dri_surface_create;
1184 dri->base.v0.surface_destroy = gbm_dri_surface_destroy;
1185
1186 dri->base.v0.name = "drm";
1187
1188 dri->visual_table = gbm_dri_visuals_table;
1189 dri->num_visuals = ARRAY_SIZE(gbm_dri_visuals_table);
1190
1191 mtx_init(&dri->mutex, mtx_plain);
1192
1193 force_sw = debug_get_bool_option("GBM_ALWAYS_SOFTWARE", false);
1194 if (!force_sw) {
1195 ret = dri_screen_create(dri, false);
1196 if (ret)
1197 ret = dri_screen_create_sw(dri, true);
1198 } else {
1199 ret = dri_screen_create_sw(dri, false);
1200 }
1201
1202 if (ret)
1203 goto err_dri;
1204
1205 struct dri_screen *screen = dri_screen(dri->screen);
1206 struct pipe_screen *pscreen = screen->base.screen;
1207 #ifdef HAVE_LIBDRM
1208 if (pscreen->get_param(pscreen, PIPE_CAP_DMABUF) & DRM_PRIME_CAP_IMPORT)
1209 dri->has_dmabuf_import = true;
1210 if (pscreen->get_param(pscreen, PIPE_CAP_DMABUF) & DRM_PRIME_CAP_EXPORT)
1211 dri->has_dmabuf_export = true;
1212 #endif
1213 dri->has_compression_modifiers = pscreen->query_compression_rates &&
1214 pscreen->query_compression_modifiers;
1215
1216 return &dri->base;
1217
1218 err_dri:
1219 free(dri);
1220
1221 return NULL;
1222 }
1223
1224 struct gbm_backend gbm_dri_backend = {
1225 .v0.backend_version = GBM_BACKEND_ABI_VERSION,
1226 .v0.backend_name = "dri",
1227 .v0.create_device = dri_device_create,
1228 };
1229
1230 struct gbm_backend * gbmint_get_backend(const struct gbm_core *gbm_core);
1231
1232 PUBLIC struct gbm_backend *
gbmint_get_backend(const struct gbm_core * gbm_core)1233 gbmint_get_backend(const struct gbm_core *gbm_core) {
1234 core = gbm_core;
1235 return &gbm_dri_backend;
1236 };
1237