1 /*
2 * Copyright © 2010 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
16 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
17 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
18 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
19 * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
20 * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22 * DEALINGS IN THE SOFTWARE.
23 *
24 * Authors:
25 * Kristian Høgsberg <[email protected]>
26 */
27
28 #include <dlfcn.h>
29 #include <errno.h>
30 #include <fcntl.h>
31 #include <limits.h>
32 #include <stdbool.h>
33 #include <stdint.h>
34 #include <stdio.h>
35 #include <stdlib.h>
36 #include <string.h>
37 #include <time.h>
38 #include <unistd.h>
39 #include <c11/threads.h>
40 #ifdef HAVE_LIBDRM
41 #include <xf86drm.h>
42 #include "drm-uapi/drm_fourcc.h"
43 #endif
44 #include <GL/gl.h>
45 #include "mesa_interface.h"
46 #include <sys/stat.h>
47 #include <sys/types.h>
48 #include "dri_screen.h"
49
50 #ifdef HAVE_WAYLAND_PLATFORM
51 #include "linux-dmabuf-unstable-v1-client-protocol.h"
52 #include "wayland-drm-client-protocol.h"
53 #include "wayland-drm.h"
54 #include <wayland-client.h>
55 #endif
56
57 #ifdef HAVE_X11_PLATFORM
58 #include "X11/Xlibint.h"
59 #include "loader_x11.h"
60 #endif
61
62 #include "GL/mesa_glinterop.h"
63 #include "pipe-loader/pipe_loader.h"
64 #include "loader/loader.h"
65 #include "mapi/glapi/glapi.h"
66 #include "pipe/p_screen.h"
67 #include "util/bitscan.h"
68 #include "util/driconf.h"
69 #include "util/libsync.h"
70 #include "util/os_file.h"
71 #include "util/u_atomic.h"
72 #include "util/u_call_once.h"
73 #include "util/u_math.h"
74 #include "util/u_vector.h"
75 #include "egl_dri2.h"
76 #include "egldefines.h"
77
78 #define NUM_ATTRIBS 16
79
80 static const enum pipe_format dri2_pbuffer_visuals[] = {
81 PIPE_FORMAT_R16G16B16A16_FLOAT,
82 PIPE_FORMAT_R16G16B16X16_FLOAT,
83 PIPE_FORMAT_B10G10R10A2_UNORM,
84 PIPE_FORMAT_B10G10R10X2_UNORM,
85 PIPE_FORMAT_BGRA8888_UNORM,
86 PIPE_FORMAT_BGRX8888_UNORM,
87 PIPE_FORMAT_B5G6R5_UNORM,
88 };
89
90 static void
dri_set_background_context(void * loaderPrivate)91 dri_set_background_context(void *loaderPrivate)
92 {
93 _EGLContext *ctx = _eglGetCurrentContext();
94 _EGLThreadInfo *t = _eglGetCurrentThread();
95
96 _eglBindContextToThread(ctx, t);
97 }
98
99 static void
dri2_gl_flush_get(_glapi_proc * glFlush)100 dri2_gl_flush_get(_glapi_proc *glFlush)
101 {
102 *glFlush = _glapi_get_proc_address("glFlush");
103 }
104
105 static void
dri2_gl_flush()106 dri2_gl_flush()
107 {
108 static void (*glFlush)(void);
109 static util_once_flag once = UTIL_ONCE_FLAG_INIT;
110
111 util_call_once_data(&once, (util_call_once_data_func)dri2_gl_flush_get,
112 &glFlush);
113
114 /* if glFlush is not available things are horribly broken */
115 if (!glFlush) {
116 _eglLog(_EGL_WARNING, "DRI2: failed to find glFlush entry point");
117 return;
118 }
119
120 glFlush();
121 }
122
123 static GLboolean
dri_is_thread_safe(UNUSED void * loaderPrivate)124 dri_is_thread_safe(UNUSED void *loaderPrivate)
125 {
126 #ifdef HAVE_X11_PLATFORM
127 struct dri2_egl_surface *dri2_surf = loaderPrivate;
128
129 /* loader_dri3_blit_context_get creates a context with
130 * loaderPrivate being NULL. Enabling glthread for a blitting
131 * context isn't useful so return false.
132 */
133 if (!loaderPrivate)
134 return false;
135
136 _EGLDisplay *display = dri2_surf->base.Resource.Display;
137
138 Display *xdpy = (Display *)display->PlatformDisplay;
139
140 /* Check Xlib is running in thread safe mode when running on EGL/X11-xlib
141 * platform
142 *
143 * 'lock_fns' is the XLockDisplay function pointer of the X11 display 'dpy'.
144 * It will be NULL if XInitThreads wasn't called.
145 */
146 if (display->Platform == _EGL_PLATFORM_X11 && xdpy && !xdpy->lock_fns)
147 return false;
148 #endif
149
150 return true;
151 }
152
153 const __DRIbackgroundCallableExtension background_callable_extension = {
154 .base = {__DRI_BACKGROUND_CALLABLE, 2},
155
156 .setBackgroundContext = dri_set_background_context,
157 .isThreadSafe = dri_is_thread_safe,
158 };
159
160 const __DRIuseInvalidateExtension use_invalidate = {
161 .base = {__DRI_USE_INVALIDATE, 1},
162 };
163
164 static void
dri2_get_pbuffer_drawable_info(__DRIdrawable * draw,int * x,int * y,int * w,int * h,void * loaderPrivate)165 dri2_get_pbuffer_drawable_info(__DRIdrawable *draw, int *x, int *y, int *w,
166 int *h, void *loaderPrivate)
167 {
168 struct dri2_egl_surface *dri2_surf = loaderPrivate;
169
170 *x = *y = 0;
171 *w = dri2_surf->base.Width;
172 *h = dri2_surf->base.Height;
173 }
174
175 static int
dri2_get_bytes_per_pixel(struct dri2_egl_surface * dri2_surf)176 dri2_get_bytes_per_pixel(struct dri2_egl_surface *dri2_surf)
177 {
178 const int depth = dri2_surf->base.Config->BufferSize;
179 return depth ? util_next_power_of_two(depth / 8) : 0;
180 }
181
182 static void
dri2_put_image(__DRIdrawable * draw,int op,int x,int y,int w,int h,char * data,void * loaderPrivate)183 dri2_put_image(__DRIdrawable *draw, int op, int x, int y, int w, int h,
184 char *data, void *loaderPrivate)
185 {
186 struct dri2_egl_surface *dri2_surf = loaderPrivate;
187 const int bpp = dri2_get_bytes_per_pixel(dri2_surf);
188 const int width = dri2_surf->base.Width;
189 const int height = dri2_surf->base.Height;
190 const int dst_stride = width * bpp;
191 const int src_stride = w * bpp;
192 const int x_offset = x * bpp;
193 int copy_width = src_stride;
194
195 if (!dri2_surf->swrast_device_buffer)
196 dri2_surf->swrast_device_buffer = malloc(height * dst_stride);
197
198 if (dri2_surf->swrast_device_buffer) {
199 const char *src = data;
200 char *dst = dri2_surf->swrast_device_buffer;
201
202 dst += x_offset;
203 dst += y * dst_stride;
204
205 /* Drivers are allowed to submit OOB PutImage requests, so clip here. */
206 if (copy_width > dst_stride - x_offset)
207 copy_width = dst_stride - x_offset;
208 if (h > height - y)
209 h = height - y;
210
211 for (; 0 < h; --h) {
212 memcpy(dst, src, copy_width);
213 dst += dst_stride;
214 src += src_stride;
215 }
216 }
217 }
218
219 static void
dri2_get_image(__DRIdrawable * read,int x,int y,int w,int h,char * data,void * loaderPrivate)220 dri2_get_image(__DRIdrawable *read, int x, int y, int w, int h, char *data,
221 void *loaderPrivate)
222 {
223 struct dri2_egl_surface *dri2_surf = loaderPrivate;
224 const int bpp = dri2_get_bytes_per_pixel(dri2_surf);
225 const int width = dri2_surf->base.Width;
226 const int height = dri2_surf->base.Height;
227 const int src_stride = width * bpp;
228 const int dst_stride = w * bpp;
229 const int x_offset = x * bpp;
230 int copy_width = dst_stride;
231 const char *src = dri2_surf->swrast_device_buffer;
232 char *dst = data;
233
234 if (!src) {
235 memset(data, 0, copy_width * h);
236 return;
237 }
238
239 src += x_offset;
240 src += y * src_stride;
241
242 /* Drivers are allowed to submit OOB GetImage requests, so clip here. */
243 if (copy_width > src_stride - x_offset)
244 copy_width = src_stride - x_offset;
245 if (h > height - y)
246 h = height - y;
247
248 for (; 0 < h; --h) {
249 memcpy(dst, src, copy_width);
250 src += src_stride;
251 dst += dst_stride;
252 }
253 }
254
255 /* HACK: technically we should have swrast_null, instead of these.
256 */
257 const __DRIswrastLoaderExtension swrast_pbuffer_loader_extension = {
258 .base = {__DRI_SWRAST_LOADER, 1},
259 .getDrawableInfo = dri2_get_pbuffer_drawable_info,
260 .putImage = dri2_put_image,
261 .getImage = dri2_get_image,
262 };
263
264 static const EGLint dri2_to_egl_attribute_map[__DRI_ATTRIB_MAX] = {
265 [__DRI_ATTRIB_BUFFER_SIZE] = EGL_BUFFER_SIZE,
266 [__DRI_ATTRIB_LEVEL] = EGL_LEVEL,
267 [__DRI_ATTRIB_LUMINANCE_SIZE] = EGL_LUMINANCE_SIZE,
268 [__DRI_ATTRIB_DEPTH_SIZE] = EGL_DEPTH_SIZE,
269 [__DRI_ATTRIB_STENCIL_SIZE] = EGL_STENCIL_SIZE,
270 [__DRI_ATTRIB_SAMPLE_BUFFERS] = EGL_SAMPLE_BUFFERS,
271 [__DRI_ATTRIB_SAMPLES] = EGL_SAMPLES,
272 [__DRI_ATTRIB_MAX_PBUFFER_WIDTH] = EGL_MAX_PBUFFER_WIDTH,
273 [__DRI_ATTRIB_MAX_PBUFFER_HEIGHT] = EGL_MAX_PBUFFER_HEIGHT,
274 [__DRI_ATTRIB_MAX_PBUFFER_PIXELS] = EGL_MAX_PBUFFER_PIXELS,
275 [__DRI_ATTRIB_MAX_SWAP_INTERVAL] = EGL_MAX_SWAP_INTERVAL,
276 [__DRI_ATTRIB_MIN_SWAP_INTERVAL] = EGL_MIN_SWAP_INTERVAL,
277 [__DRI_ATTRIB_YINVERTED] = EGL_Y_INVERTED_NOK,
278 };
279
280 const __DRIconfig *
dri2_get_dri_config(struct dri2_egl_config * conf,EGLint surface_type,EGLenum colorspace)281 dri2_get_dri_config(struct dri2_egl_config *conf, EGLint surface_type,
282 EGLenum colorspace)
283 {
284 const bool double_buffer = surface_type == EGL_WINDOW_BIT;
285 const bool srgb = colorspace == EGL_GL_COLORSPACE_SRGB_KHR;
286
287 return conf->dri_config[double_buffer][srgb];
288 }
289
290 static EGLBoolean
dri2_match_config(const _EGLConfig * conf,const _EGLConfig * criteria)291 dri2_match_config(const _EGLConfig *conf, const _EGLConfig *criteria)
292 {
293 #ifdef HAVE_X11_PLATFORM
294 if (conf->Display->Platform == _EGL_PLATFORM_X11 &&
295 conf->AlphaSize > 0 &&
296 conf->NativeVisualID != criteria->NativeVisualID)
297 return EGL_FALSE;
298 #endif
299
300 if (_eglCompareConfigs(conf, criteria, NULL, EGL_FALSE) != 0)
301 return EGL_FALSE;
302
303 if (!_eglMatchConfig(conf, criteria))
304 return EGL_FALSE;
305
306 return EGL_TRUE;
307 }
308
309 void
dri2_get_shifts_and_sizes(const __DRIconfig * config,int * shifts,unsigned int * sizes)310 dri2_get_shifts_and_sizes(const __DRIconfig *config, int *shifts,
311 unsigned int *sizes)
312 {
313 driGetConfigAttrib(config, __DRI_ATTRIB_RED_SHIFT,
314 (unsigned int *)&shifts[0]);
315 driGetConfigAttrib(config, __DRI_ATTRIB_GREEN_SHIFT,
316 (unsigned int *)&shifts[1]);
317 driGetConfigAttrib(config, __DRI_ATTRIB_BLUE_SHIFT,
318 (unsigned int *)&shifts[2]);
319 driGetConfigAttrib(config, __DRI_ATTRIB_ALPHA_SHIFT,
320 (unsigned int *)&shifts[3]);
321 driGetConfigAttrib(config, __DRI_ATTRIB_RED_SIZE, &sizes[0]);
322 driGetConfigAttrib(config, __DRI_ATTRIB_GREEN_SIZE, &sizes[1]);
323 driGetConfigAttrib(config, __DRI_ATTRIB_BLUE_SIZE, &sizes[2]);
324 driGetConfigAttrib(config, __DRI_ATTRIB_ALPHA_SIZE, &sizes[3]);
325 }
326
327 enum pipe_format
dri2_image_format_for_pbuffer_config(struct dri2_egl_display * dri2_dpy,const __DRIconfig * config)328 dri2_image_format_for_pbuffer_config(struct dri2_egl_display *dri2_dpy,
329 const __DRIconfig *config)
330 {
331 struct gl_config *gl_config = (struct gl_config *) config;
332 return gl_config->color_format;
333 }
334
335 struct dri2_egl_config *
dri2_add_config(_EGLDisplay * disp,const __DRIconfig * dri_config,EGLint surface_type,const EGLint * attr_list)336 dri2_add_config(_EGLDisplay *disp, const __DRIconfig *dri_config,
337 EGLint surface_type, const EGLint *attr_list)
338 {
339 struct dri2_egl_config *conf;
340 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
341 _EGLConfig base;
342 unsigned int attrib, value, double_buffer;
343 bool srgb = false;
344 EGLint key, bind_to_texture_rgb, bind_to_texture_rgba;
345 _EGLConfig *matching_config;
346 EGLint num_configs = 0;
347 EGLint config_id;
348
349 _eglInitConfig(&base, disp, _eglGetArraySize(disp->Configs) + 1);
350
351 double_buffer = 0;
352 bind_to_texture_rgb = 0;
353 bind_to_texture_rgba = 0;
354
355 for (int i = 0; i < __DRI_ATTRIB_MAX; ++i) {
356 if (!driIndexConfigAttrib(dri_config, i, &attrib, &value))
357 break;
358
359 switch (attrib) {
360 case __DRI_ATTRIB_RENDER_TYPE:
361 if (value & __DRI_ATTRIB_FLOAT_BIT)
362 base.ComponentType = EGL_COLOR_COMPONENT_TYPE_FLOAT_EXT;
363 if (value & __DRI_ATTRIB_RGBA_BIT)
364 value = EGL_RGB_BUFFER;
365 else if (value & __DRI_ATTRIB_LUMINANCE_BIT)
366 value = EGL_LUMINANCE_BUFFER;
367 else
368 return NULL;
369 base.ColorBufferType = value;
370 break;
371
372 case __DRI_ATTRIB_CONFIG_CAVEAT:
373 if (value & __DRI_ATTRIB_NON_CONFORMANT_CONFIG)
374 value = EGL_NON_CONFORMANT_CONFIG;
375 else if (value & __DRI_ATTRIB_SLOW_BIT)
376 value = EGL_SLOW_CONFIG;
377 else
378 value = EGL_NONE;
379 base.ConfigCaveat = value;
380 break;
381
382 case __DRI_ATTRIB_BIND_TO_TEXTURE_RGB:
383 bind_to_texture_rgb = value;
384 break;
385
386 case __DRI_ATTRIB_BIND_TO_TEXTURE_RGBA:
387 bind_to_texture_rgba = value;
388 break;
389
390 case __DRI_ATTRIB_DOUBLE_BUFFER:
391 double_buffer = value;
392 break;
393
394 case __DRI_ATTRIB_RED_SIZE:
395 base.RedSize = value;
396 break;
397
398 case __DRI_ATTRIB_GREEN_SIZE:
399 base.GreenSize = value;
400 break;
401
402 case __DRI_ATTRIB_BLUE_SIZE:
403 base.BlueSize = value;
404 break;
405
406 case __DRI_ATTRIB_ALPHA_SIZE:
407 base.AlphaSize = value;
408 break;
409
410 case __DRI_ATTRIB_ACCUM_RED_SIZE:
411 case __DRI_ATTRIB_ACCUM_GREEN_SIZE:
412 case __DRI_ATTRIB_ACCUM_BLUE_SIZE:
413 case __DRI_ATTRIB_ACCUM_ALPHA_SIZE:
414 /* Don't expose visuals with the accumulation buffer. */
415 if (value > 0)
416 return NULL;
417 break;
418
419 case __DRI_ATTRIB_FRAMEBUFFER_SRGB_CAPABLE:
420 srgb = value != 0;
421 if (!disp->Extensions.KHR_gl_colorspace && srgb)
422 return NULL;
423 break;
424
425 case __DRI_ATTRIB_MAX_PBUFFER_WIDTH:
426 base.MaxPbufferWidth = _EGL_MAX_PBUFFER_WIDTH;
427 break;
428 case __DRI_ATTRIB_MAX_PBUFFER_HEIGHT:
429 base.MaxPbufferHeight = _EGL_MAX_PBUFFER_HEIGHT;
430 break;
431 case __DRI_ATTRIB_MUTABLE_RENDER_BUFFER:
432 if (disp->Extensions.KHR_mutable_render_buffer)
433 surface_type |= EGL_MUTABLE_RENDER_BUFFER_BIT_KHR;
434 break;
435 default:
436 key = dri2_to_egl_attribute_map[attrib];
437 if (key != 0)
438 _eglSetConfigKey(&base, key, value);
439 break;
440 }
441 }
442
443 if (attr_list)
444 for (int i = 0; attr_list[i] != EGL_NONE; i += 2)
445 _eglSetConfigKey(&base, attr_list[i], attr_list[i + 1]);
446
447 base.NativeRenderable = EGL_TRUE;
448
449 base.SurfaceType = surface_type;
450 if (surface_type &
451 (EGL_PBUFFER_BIT |
452 (disp->Extensions.NOK_texture_from_pixmap ? EGL_PIXMAP_BIT : 0))) {
453 base.BindToTextureRGB = bind_to_texture_rgb;
454 if (base.AlphaSize > 0)
455 base.BindToTextureRGBA = bind_to_texture_rgba;
456 }
457
458 if (double_buffer) {
459 surface_type &= ~EGL_PIXMAP_BIT;
460 } else {
461 surface_type &= ~EGL_WINDOW_BIT;
462 }
463
464 if (!surface_type)
465 return NULL;
466
467 base.RenderableType = disp->ClientAPIs;
468 base.Conformant = disp->ClientAPIs;
469
470 base.MinSwapInterval = dri2_dpy->min_swap_interval;
471 base.MaxSwapInterval = dri2_dpy->max_swap_interval;
472
473 if (!_eglValidateConfig(&base, EGL_FALSE)) {
474 _eglLog(_EGL_DEBUG, "DRI2: failed to validate config %d", base.ConfigID);
475 return NULL;
476 }
477
478 config_id = base.ConfigID;
479 base.ConfigID = EGL_DONT_CARE;
480 base.SurfaceType = EGL_DONT_CARE;
481 num_configs = _eglFilterArray(disp->Configs, (void **)&matching_config, 1,
482 (_EGLArrayForEach)dri2_match_config, &base);
483
484 if (num_configs == 1) {
485 conf = (struct dri2_egl_config *)matching_config;
486
487 if (!conf->dri_config[double_buffer][srgb])
488 conf->dri_config[double_buffer][srgb] = dri_config;
489 else
490 /* a similar config type is already added (unlikely) => discard */
491 return NULL;
492 } else if (num_configs == 0) {
493 conf = calloc(1, sizeof *conf);
494 if (conf == NULL)
495 return NULL;
496
497 conf->dri_config[double_buffer][srgb] = dri_config;
498
499 memcpy(&conf->base, &base, sizeof base);
500 conf->base.SurfaceType = 0;
501 conf->base.ConfigID = config_id;
502
503 _eglLinkConfig(&conf->base);
504 } else {
505 unreachable("duplicates should not be possible");
506 return NULL;
507 }
508
509 conf->base.SurfaceType |= surface_type;
510
511 return conf;
512 }
513
514 static int
dri2_pbuffer_visual_index(enum pipe_format format)515 dri2_pbuffer_visual_index(enum pipe_format format)
516 {
517 for (unsigned i = 0; i < ARRAY_SIZE(dri2_pbuffer_visuals); i++) {
518 if (dri2_pbuffer_visuals[i] == format)
519 return i;
520 }
521
522 return -1;
523 }
524
525 void
dri2_add_pbuffer_configs_for_visuals(_EGLDisplay * disp)526 dri2_add_pbuffer_configs_for_visuals(_EGLDisplay *disp)
527 {
528 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
529 unsigned int format_count[ARRAY_SIZE(dri2_pbuffer_visuals)] = {0};
530
531 for (unsigned i = 0; dri2_dpy->driver_configs[i] != NULL; i++) {
532 struct dri2_egl_config *dri2_conf;
533 struct gl_config *gl_config =
534 (struct gl_config *) dri2_dpy->driver_configs[i];
535 int idx = dri2_pbuffer_visual_index(gl_config->color_format);
536
537 if (idx == -1)
538 continue;
539
540 dri2_conf = dri2_add_config(disp, dri2_dpy->driver_configs[i],
541 EGL_PBUFFER_BIT, NULL);
542 if (dri2_conf)
543 format_count[idx]++;
544 }
545
546 for (unsigned i = 0; i < ARRAY_SIZE(format_count); i++) {
547 if (!format_count[i]) {
548 _eglLog(_EGL_DEBUG, "No DRI config supports native format %s",
549 util_format_name(dri2_pbuffer_visuals[i]));
550 }
551 }
552 }
553
554 GLboolean
dri2_validate_egl_image(void * image,void * data)555 dri2_validate_egl_image(void *image, void *data)
556 {
557 _EGLDisplay *disp = _eglLockDisplay(data);
558 _EGLImage *img = _eglLookupImage(image, disp);
559 _eglUnlockDisplay(disp);
560
561 if (img == NULL) {
562 _eglError(EGL_BAD_PARAMETER, "dri2_validate_egl_image");
563 return false;
564 }
565
566 return true;
567 }
568
569 __DRIimage *
dri2_lookup_egl_image_validated(void * image,void * data)570 dri2_lookup_egl_image_validated(void *image, void *data)
571 {
572 struct dri2_egl_image *dri2_img;
573
574 (void)data;
575
576 dri2_img = dri2_egl_image(image);
577
578 return dri2_img->dri_image;
579 }
580
581 const __DRIimageLookupExtension image_lookup_extension = {
582 .base = {__DRI_IMAGE_LOOKUP, 2},
583
584 .validateEGLImage = dri2_validate_egl_image,
585 .lookupEGLImageValidated = dri2_lookup_egl_image_validated,
586 };
587
588 EGLBoolean
dri2_load_driver(_EGLDisplay * disp)589 dri2_load_driver(_EGLDisplay *disp)
590 {
591 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
592
593 dri2_dpy->kopper = disp->Options.Zink && !debug_get_bool_option("LIBGL_KOPPER_DISABLE", false);
594 dri2_dpy->kopper_without_modifiers = dri2_dpy->kopper && debug_get_bool_option("LIBGL_KOPPER_DRI2", false);
595 dri2_dpy->swrast = (disp->Options.ForceSoftware && !dri2_dpy->kopper) ||
596 !dri2_dpy->driver_name || strstr(dri2_dpy->driver_name, "swrast");
597 dri2_dpy->swrast_not_kms = dri2_dpy->swrast && (!dri2_dpy->driver_name || strcmp(dri2_dpy->driver_name, "kms_swrast"));
598
599 return EGL_TRUE;
600 }
601
602 static const char *
dri2_query_driver_name(_EGLDisplay * disp)603 dri2_query_driver_name(_EGLDisplay *disp)
604 {
605 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
606 return dri2_dpy->driver_name;
607 }
608
609 static char *
dri2_query_driver_config(_EGLDisplay * disp)610 dri2_query_driver_config(_EGLDisplay *disp)
611 {
612 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
613 char *ret;
614
615 ret = pipe_loader_get_driinfo_xml(dri2_dpy->driver_name);
616
617 mtx_unlock(&dri2_dpy->lock);
618
619 return ret;
620 }
621
622 void
dri2_setup_screen(_EGLDisplay * disp)623 dri2_setup_screen(_EGLDisplay *disp)
624 {
625 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
626 struct dri_screen *screen = dri_screen(dri2_dpy->dri_screen_render_gpu);
627 struct pipe_screen *pscreen = screen->base.screen;
628 unsigned int api_mask = screen->api_mask;
629
630 int caps = dri_get_screen_param(dri2_dpy->dri_screen_render_gpu, PIPE_CAP_DMABUF);
631 /* set if both import and export are suported */
632 if (dri2_dpy->multibuffers_available) {
633 dri2_dpy->has_dmabuf_import = (caps & DRM_PRIME_CAP_IMPORT) > 0;
634 dri2_dpy->has_dmabuf_export = (caps & DRM_PRIME_CAP_EXPORT) > 0;
635 }
636 #ifdef HAVE_ANDROID_PLATFORM
637 dri2_dpy->has_native_fence_fd = dri_get_screen_param(dri2_dpy->dri_screen_render_gpu, PIPE_CAP_NATIVE_FENCE_FD);
638 #endif
639 dri2_dpy->has_compression_modifiers = pscreen->query_compression_rates && pscreen->query_compression_modifiers;
640
641 /*
642 * EGL 1.5 specification defines the default value to 1. Moreover,
643 * eglSwapInterval() is required to clamp requested value to the supported
644 * range. Since the default value is implicitly assumed to be supported,
645 * use it as both minimum and maximum for the platforms that do not allow
646 * changing the interval. Platforms, which allow it (e.g. x11, wayland)
647 * override these values already.
648 */
649 dri2_dpy->min_swap_interval = 1;
650 dri2_dpy->max_swap_interval = 1;
651 dri2_dpy->default_swap_interval = 1;
652
653 disp->ClientAPIs = 0;
654 if ((api_mask & (1 << __DRI_API_OPENGL)) && _eglIsApiValid(EGL_OPENGL_API))
655 disp->ClientAPIs |= EGL_OPENGL_BIT;
656 if ((api_mask & (1 << __DRI_API_GLES)) && _eglIsApiValid(EGL_OPENGL_ES_API))
657 disp->ClientAPIs |= EGL_OPENGL_ES_BIT;
658 if ((api_mask & (1 << __DRI_API_GLES2)) && _eglIsApiValid(EGL_OPENGL_ES_API))
659 disp->ClientAPIs |= EGL_OPENGL_ES2_BIT;
660 if ((api_mask & (1 << __DRI_API_GLES3)) && _eglIsApiValid(EGL_OPENGL_ES_API))
661 disp->ClientAPIs |= EGL_OPENGL_ES3_BIT_KHR;
662
663 disp->Extensions.KHR_create_context = EGL_TRUE;
664 disp->Extensions.KHR_create_context_no_error = EGL_TRUE;
665 disp->Extensions.KHR_no_config_context = EGL_TRUE;
666 disp->Extensions.KHR_surfaceless_context = EGL_TRUE;
667
668 disp->Extensions.MESA_gl_interop = EGL_TRUE;
669
670 disp->Extensions.MESA_query_driver = EGL_TRUE;
671
672 /* Report back to EGL the bitmask of priorities supported */
673 disp->Extensions.IMG_context_priority =
674 dri_get_screen_param(dri2_dpy->dri_screen_render_gpu, PIPE_CAP_CONTEXT_PRIORITY_MASK);
675
676 disp->Extensions.EXT_pixel_format_float = EGL_TRUE;
677
678 if (pscreen->is_format_supported(pscreen, PIPE_FORMAT_B8G8R8A8_SRGB,
679 PIPE_TEXTURE_2D, 0, 0,
680 PIPE_BIND_RENDER_TARGET)) {
681 disp->Extensions.KHR_gl_colorspace = EGL_TRUE;
682 }
683
684 disp->Extensions.EXT_config_select_group = EGL_TRUE;
685
686 disp->Extensions.EXT_create_context_robustness =
687 dri_get_screen_param(dri2_dpy->dri_screen_render_gpu, PIPE_CAP_DEVICE_RESET_STATUS_QUERY);
688 disp->RobustBufferAccess =
689 dri_get_screen_param(dri2_dpy->dri_screen_render_gpu, PIPE_CAP_ROBUST_BUFFER_ACCESS_BEHAVIOR);
690
691 /* EXT_query_reset_notification_strategy complements and requires
692 * EXT_create_context_robustness. */
693 disp->Extensions.EXT_query_reset_notification_strategy =
694 disp->Extensions.EXT_create_context_robustness;
695
696 disp->Extensions.KHR_fence_sync = EGL_TRUE;
697 disp->Extensions.KHR_wait_sync = EGL_TRUE;
698 disp->Extensions.KHR_cl_event2 = EGL_TRUE;
699 if (dri_fence_get_caps(dri2_dpy->dri_screen_render_gpu)
700 & __DRI_FENCE_CAP_NATIVE_FD)
701 disp->Extensions.ANDROID_native_fence_sync = EGL_TRUE;
702
703 if (dri_get_pipe_screen(dri2_dpy->dri_screen_render_gpu)->get_disk_shader_cache)
704 disp->Extensions.ANDROID_blob_cache = EGL_TRUE;
705
706 disp->Extensions.KHR_reusable_sync = EGL_TRUE;
707
708 int capabilities;
709 capabilities = dri2_get_capabilities(dri2_dpy->dri_screen_render_gpu);
710 disp->Extensions.MESA_drm_image = (capabilities & __DRI_IMAGE_CAP_GLOBAL_NAMES) != 0;
711
712 #ifdef HAVE_LIBDRM
713 if (dri_get_screen_param(dri2_dpy->dri_screen_render_gpu, PIPE_CAP_DMABUF) & DRM_PRIME_CAP_EXPORT)
714 disp->Extensions.MESA_image_dma_buf_export = true;
715
716 if (dri2_dpy->has_dmabuf_import) {
717 disp->Extensions.EXT_image_dma_buf_import = EGL_TRUE;
718 disp->Extensions.EXT_image_dma_buf_import_modifiers = EGL_TRUE;
719 }
720 #endif
721 disp->Extensions.MESA_x11_native_visual_id = EGL_TRUE;
722 disp->Extensions.EXT_surface_compression = EGL_TRUE;
723 disp->Extensions.KHR_image_base = EGL_TRUE;
724 disp->Extensions.KHR_gl_renderbuffer_image = EGL_TRUE;
725 disp->Extensions.KHR_gl_texture_2D_image = EGL_TRUE;
726 disp->Extensions.KHR_gl_texture_cubemap_image = EGL_TRUE;
727
728 if (dri_get_screen_param(dri2_dpy->dri_screen_render_gpu, PIPE_CAP_MAX_TEXTURE_3D_LEVELS) != 0)
729 disp->Extensions.KHR_gl_texture_3D_image = EGL_TRUE;
730
731 disp->Extensions.KHR_context_flush_control = EGL_TRUE;
732
733 if (dri_get_pipe_screen(dri2_dpy->dri_screen_render_gpu)->set_damage_region)
734 disp->Extensions.KHR_partial_update = EGL_TRUE;
735
736 disp->Extensions.EXT_protected_surface =
737 dri_get_screen_param(dri2_dpy->dri_screen_render_gpu, PIPE_CAP_DEVICE_PROTECTED_SURFACE) != 0;
738 disp->Extensions.EXT_protected_content =
739 dri_get_screen_param(dri2_dpy->dri_screen_render_gpu, PIPE_CAP_DEVICE_PROTECTED_CONTEXT) != 0;
740 }
741
742 void
dri2_setup_swap_interval(_EGLDisplay * disp,int max_swap_interval)743 dri2_setup_swap_interval(_EGLDisplay *disp, int max_swap_interval)
744 {
745 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
746 GLint vblank_mode = DRI_CONF_VBLANK_DEF_INTERVAL_1;
747
748 /* Allow driconf to override applications.*/
749 dri2GalliumConfigQueryi(dri2_dpy->dri_screen_render_gpu, "vblank_mode", &vblank_mode);
750
751 switch (vblank_mode) {
752 case DRI_CONF_VBLANK_NEVER:
753 dri2_dpy->min_swap_interval = 0;
754 dri2_dpy->max_swap_interval = 0;
755 dri2_dpy->default_swap_interval = 0;
756 break;
757 case DRI_CONF_VBLANK_ALWAYS_SYNC:
758 dri2_dpy->min_swap_interval = 1;
759 dri2_dpy->max_swap_interval = max_swap_interval;
760 dri2_dpy->default_swap_interval = 1;
761 break;
762 case DRI_CONF_VBLANK_DEF_INTERVAL_0:
763 dri2_dpy->min_swap_interval = 0;
764 dri2_dpy->max_swap_interval = max_swap_interval;
765 dri2_dpy->default_swap_interval = 0;
766 break;
767 default:
768 case DRI_CONF_VBLANK_DEF_INTERVAL_1:
769 dri2_dpy->min_swap_interval = 0;
770 dri2_dpy->max_swap_interval = max_swap_interval;
771 dri2_dpy->default_swap_interval = 1;
772 break;
773 }
774 }
775
776 /* All platforms but DRM call this function to create the screen and populate
777 * the driver_configs. DRM inherits that information from its display - GBM.
778 */
779 EGLBoolean
dri2_create_screen(_EGLDisplay * disp)780 dri2_create_screen(_EGLDisplay *disp)
781 {
782 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
783 char *driver_name_display_gpu;
784 enum dri_screen_type type = DRI_SCREEN_DRI3;
785
786 if (dri2_dpy->kopper)
787 type = DRI_SCREEN_KOPPER;
788 else if (dri2_dpy->swrast_not_kms)
789 type = DRI_SCREEN_SWRAST;
790 else if (dri2_dpy->swrast)
791 type = DRI_SCREEN_KMS_SWRAST;
792
793 if (dri2_dpy->fd_render_gpu != dri2_dpy->fd_display_gpu) {
794 driver_name_display_gpu =
795 loader_get_driver_for_fd(dri2_dpy->fd_display_gpu);
796 if (driver_name_display_gpu) {
797 /* check if driver name is matching so that non mesa drivers
798 * will not crash.
799 */
800 if (strcmp(dri2_dpy->driver_name, driver_name_display_gpu) == 0) {
801 dri2_dpy->dri_screen_display_gpu = driCreateNewScreen3(
802 0, dri2_dpy->fd_display_gpu, dri2_dpy->loader_extensions,
803 type, &dri2_dpy->driver_configs, false, dri2_dpy->multibuffers_available, disp);
804 }
805 free(driver_name_display_gpu);
806 }
807 }
808
809 int screen_fd = dri2_dpy->swrast_not_kms ? -1 : dri2_dpy->fd_render_gpu;
810 dri2_dpy->dri_screen_render_gpu = driCreateNewScreen3(
811 0, screen_fd, dri2_dpy->loader_extensions, type,
812 &dri2_dpy->driver_configs, false, dri2_dpy->multibuffers_available, disp);
813
814 if (dri2_dpy->dri_screen_render_gpu == NULL) {
815 _eglLog(_EGL_WARNING, "egl: failed to create dri2 screen");
816 return EGL_FALSE;
817 }
818
819 if (dri2_dpy->fd_render_gpu == dri2_dpy->fd_display_gpu)
820 dri2_dpy->dri_screen_display_gpu = dri2_dpy->dri_screen_render_gpu;
821
822 dri2_dpy->own_dri_screen = true;
823 return EGL_TRUE;
824 }
825
826 EGLBoolean
dri2_setup_device(_EGLDisplay * disp,EGLBoolean software)827 dri2_setup_device(_EGLDisplay *disp, EGLBoolean software)
828 {
829 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
830 _EGLDevice *dev;
831 int render_fd;
832
833 /* If we're not software, we need a DRM node FD */
834 assert(software || dri2_dpy->fd_render_gpu >= 0);
835
836 /* fd_render_gpu is what we got from WSI, so might actually be a lie and
837 * not a render node... */
838 if (software) {
839 render_fd = -1;
840 } else if (loader_is_device_render_capable(dri2_dpy->fd_render_gpu)) {
841 render_fd = dri2_dpy->fd_render_gpu;
842 } else {
843 render_fd = dri_query_compatible_render_only_device_fd(
844 dri2_dpy->fd_render_gpu);
845 if (render_fd < 0)
846 return EGL_FALSE;
847 }
848
849 dev = _eglFindDevice(render_fd, software);
850
851 if (render_fd >= 0 && render_fd != dri2_dpy->fd_render_gpu)
852 close(render_fd);
853
854 if (!dev)
855 return EGL_FALSE;
856
857 disp->Device = dev;
858 return EGL_TRUE;
859 }
860
861 /**
862 * Called via eglInitialize(), drv->Initialize().
863 *
864 * This must be guaranteed to be called exactly once, even if eglInitialize is
865 * called many times (without a eglTerminate in between).
866 */
867 static EGLBoolean
dri2_initialize(_EGLDisplay * disp)868 dri2_initialize(_EGLDisplay *disp)
869 {
870 EGLBoolean ret = EGL_FALSE;
871 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
872
873 /* In the case where the application calls eglMakeCurrent(context1),
874 * eglTerminate, then eglInitialize again (without a call to eglReleaseThread
875 * or eglMakeCurrent(NULL) before that), dri2_dpy structure is still
876 * initialized, as we need it to be able to free context1 correctly.
877 *
878 * It would probably be safest to forcibly release the display with
879 * dri2_display_release, to make sure the display is reinitialized correctly.
880 * However, the EGL spec states that we need to keep a reference to the
881 * current context (so we cannot call dri2_make_current(NULL)), and therefore
882 * we would leak context1 as we would be missing the old display connection
883 * to free it up correctly.
884 */
885 if (dri2_dpy) {
886 p_atomic_inc(&dri2_dpy->ref_count);
887 return EGL_TRUE;
888 }
889
890 loader_set_logger(_eglLog);
891
892 switch (disp->Platform) {
893 case _EGL_PLATFORM_SURFACELESS:
894 ret = dri2_initialize_surfaceless(disp);
895 break;
896 case _EGL_PLATFORM_DEVICE:
897 ret = dri2_initialize_device(disp);
898 break;
899 case _EGL_PLATFORM_X11:
900 case _EGL_PLATFORM_XCB:
901 ret = dri2_initialize_x11(disp);
902 break;
903 case _EGL_PLATFORM_DRM:
904 ret = dri2_initialize_drm(disp);
905 break;
906 case _EGL_PLATFORM_WAYLAND:
907 ret = dri2_initialize_wayland(disp);
908 break;
909 case _EGL_PLATFORM_ANDROID:
910 ret = dri2_initialize_android(disp);
911 break;
912 default:
913 unreachable("Callers ensure we cannot get here.");
914 return EGL_FALSE;
915 }
916
917 if (!ret)
918 return EGL_FALSE;
919
920 if (_eglGetArraySize(disp->Configs) == 0) {
921 _eglError(EGL_NOT_INITIALIZED, "failed to add any EGLConfigs");
922 dri2_display_destroy(disp);
923 return EGL_FALSE;
924 }
925
926 dri2_dpy = dri2_egl_display(disp);
927 p_atomic_inc(&dri2_dpy->ref_count);
928
929 mtx_init(&dri2_dpy->lock, mtx_plain);
930
931 return EGL_TRUE;
932 }
933
934 /**
935 * Decrement display reference count, and free up display if necessary.
936 */
937 static void
dri2_display_release(_EGLDisplay * disp)938 dri2_display_release(_EGLDisplay *disp)
939 {
940 struct dri2_egl_display *dri2_dpy;
941
942 if (!disp)
943 return;
944
945 dri2_dpy = dri2_egl_display(disp);
946
947 assert(dri2_dpy->ref_count > 0);
948
949 if (!p_atomic_dec_zero(&dri2_dpy->ref_count))
950 return;
951
952 _eglCleanupDisplay(disp);
953 dri2_display_destroy(disp);
954 }
955
956 void
dri2_display_destroy(_EGLDisplay * disp)957 dri2_display_destroy(_EGLDisplay *disp)
958 {
959 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
960
961 if (dri2_dpy->own_dri_screen) {
962 if (dri2_dpy->vtbl && dri2_dpy->vtbl->close_screen_notify)
963 dri2_dpy->vtbl->close_screen_notify(disp);
964
965 driDestroyScreen(dri2_dpy->dri_screen_render_gpu);
966
967 if (dri2_dpy->dri_screen_display_gpu &&
968 dri2_dpy->fd_render_gpu != dri2_dpy->fd_display_gpu)
969 driDestroyScreen(dri2_dpy->dri_screen_display_gpu);
970 }
971 if (dri2_dpy->fd_display_gpu >= 0 &&
972 dri2_dpy->fd_render_gpu != dri2_dpy->fd_display_gpu)
973 close(dri2_dpy->fd_display_gpu);
974 if (dri2_dpy->fd_render_gpu >= 0)
975 close(dri2_dpy->fd_render_gpu);
976
977 free(dri2_dpy->driver_name);
978
979 #ifdef HAVE_WAYLAND_PLATFORM
980 free(dri2_dpy->device_name);
981 #endif
982
983 switch (disp->Platform) {
984 case _EGL_PLATFORM_X11:
985 case _EGL_PLATFORM_XCB:
986 dri2_teardown_x11(dri2_dpy);
987 break;
988 case _EGL_PLATFORM_DRM:
989 dri2_teardown_drm(dri2_dpy);
990 break;
991 case _EGL_PLATFORM_WAYLAND:
992 dri2_teardown_wayland(dri2_dpy);
993 break;
994 case _EGL_PLATFORM_ANDROID:
995 #ifdef HAVE_ANDROID_PLATFORM
996 u_gralloc_destroy(&dri2_dpy->gralloc);
997 #endif
998 break;
999 case _EGL_PLATFORM_SURFACELESS:
1000 break;
1001 case _EGL_PLATFORM_DEVICE:
1002 break;
1003 default:
1004 unreachable("Platform teardown is not properly hooked.");
1005 break;
1006 }
1007
1008 /* The drm platform does not create the screen/driver_configs but reuses
1009 * the ones from the gbm device. As such the gbm itself is responsible
1010 * for the cleanup.
1011 */
1012 if (disp->Platform != _EGL_PLATFORM_DRM && dri2_dpy->driver_configs) {
1013 for (unsigned i = 0; dri2_dpy->driver_configs[i]; i++)
1014 free((__DRIconfig *)dri2_dpy->driver_configs[i]);
1015 free(dri2_dpy->driver_configs);
1016 }
1017 free(dri2_dpy);
1018 disp->DriverData = NULL;
1019 }
1020
1021 struct dri2_egl_display *
dri2_display_create(void)1022 dri2_display_create(void)
1023 {
1024 struct dri2_egl_display *dri2_dpy = calloc(1, sizeof *dri2_dpy);
1025 if (!dri2_dpy) {
1026 _eglError(EGL_BAD_ALLOC, "eglInitialize");
1027 return NULL;
1028 }
1029
1030 dri2_dpy->fd_render_gpu = -1;
1031 dri2_dpy->fd_display_gpu = -1;
1032 dri2_dpy->multibuffers_available = true;
1033
1034 return dri2_dpy;
1035 }
1036
1037 /**
1038 * Called via eglTerminate(), drv->Terminate().
1039 *
1040 * This must be guaranteed to be called exactly once, even if eglTerminate is
1041 * called many times (without a eglInitialize in between).
1042 */
1043 static EGLBoolean
dri2_terminate(_EGLDisplay * disp)1044 dri2_terminate(_EGLDisplay *disp)
1045 {
1046 /* Release all non-current Context/Surfaces. */
1047 _eglReleaseDisplayResources(disp);
1048
1049 dri2_display_release(disp);
1050
1051 return EGL_TRUE;
1052 }
1053
1054 /**
1055 * Set the error code after a call to
1056 * dri2_egl_display::dri2::createContextAttribs.
1057 */
1058 static void
dri2_create_context_attribs_error(int dri_error)1059 dri2_create_context_attribs_error(int dri_error)
1060 {
1061 EGLint egl_error;
1062
1063 switch (dri_error) {
1064 case __DRI_CTX_ERROR_SUCCESS:
1065 return;
1066
1067 case __DRI_CTX_ERROR_NO_MEMORY:
1068 egl_error = EGL_BAD_ALLOC;
1069 break;
1070
1071 /* From the EGL_KHR_create_context spec, section "Errors":
1072 *
1073 * * If <config> does not support a client API context compatible
1074 * with the requested API major and minor version, [...] context
1075 * flags, and context reset notification behavior (for client API types
1076 * where these attributes are supported), then an EGL_BAD_MATCH error is
1077 * generated.
1078 *
1079 * * If an OpenGL ES context is requested and the values for
1080 * attributes EGL_CONTEXT_MAJOR_VERSION_KHR and
1081 * EGL_CONTEXT_MINOR_VERSION_KHR specify an OpenGL ES version that
1082 * is not defined, than an EGL_BAD_MATCH error is generated.
1083 *
1084 * * If an OpenGL context is requested, the requested version is
1085 * greater than 3.2, and the value for attribute
1086 * EGL_CONTEXT_OPENGL_PROFILE_MASK_KHR has no bits set; has any
1087 * bits set other than EGL_CONTEXT_OPENGL_CORE_PROFILE_BIT_KHR and
1088 * EGL_CONTEXT_OPENGL_COMPATIBILITY_PROFILE_BIT_KHR; has more than
1089 * one of these bits set; or if the implementation does not support
1090 * the requested profile, then an EGL_BAD_MATCH error is generated.
1091 */
1092 case __DRI_CTX_ERROR_BAD_API:
1093 case __DRI_CTX_ERROR_BAD_VERSION:
1094 case __DRI_CTX_ERROR_BAD_FLAG:
1095 egl_error = EGL_BAD_MATCH;
1096 break;
1097
1098 /* From the EGL_KHR_create_context spec, section "Errors":
1099 *
1100 * * If an attribute name or attribute value in <attrib_list> is not
1101 * recognized (including unrecognized bits in bitmask attributes),
1102 * then an EGL_BAD_ATTRIBUTE error is generated."
1103 */
1104 case __DRI_CTX_ERROR_UNKNOWN_ATTRIBUTE:
1105 case __DRI_CTX_ERROR_UNKNOWN_FLAG:
1106 egl_error = EGL_BAD_ATTRIBUTE;
1107 break;
1108
1109 default:
1110 assert(!"unknown dri_error code");
1111 egl_error = EGL_BAD_MATCH;
1112 break;
1113 }
1114
1115 _eglError(egl_error, "dri2_create_context");
1116 }
1117
1118 static bool
dri2_fill_context_attribs(struct dri2_egl_context * dri2_ctx,struct dri2_egl_display * dri2_dpy,uint32_t * ctx_attribs,unsigned * num_attribs)1119 dri2_fill_context_attribs(struct dri2_egl_context *dri2_ctx,
1120 struct dri2_egl_display *dri2_dpy,
1121 uint32_t *ctx_attribs, unsigned *num_attribs)
1122 {
1123 int pos = 0;
1124
1125 assert(*num_attribs >= NUM_ATTRIBS);
1126
1127 ctx_attribs[pos++] = __DRI_CTX_ATTRIB_MAJOR_VERSION;
1128 ctx_attribs[pos++] = dri2_ctx->base.ClientMajorVersion;
1129 ctx_attribs[pos++] = __DRI_CTX_ATTRIB_MINOR_VERSION;
1130 ctx_attribs[pos++] = dri2_ctx->base.ClientMinorVersion;
1131
1132 if (dri2_ctx->base.Flags != 0) {
1133 ctx_attribs[pos++] = __DRI_CTX_ATTRIB_FLAGS;
1134 ctx_attribs[pos++] = dri2_ctx->base.Flags;
1135 }
1136
1137 if (dri2_ctx->base.ResetNotificationStrategy !=
1138 EGL_NO_RESET_NOTIFICATION_KHR) {
1139 ctx_attribs[pos++] = __DRI_CTX_ATTRIB_RESET_STRATEGY;
1140 ctx_attribs[pos++] = __DRI_CTX_RESET_LOSE_CONTEXT;
1141 }
1142
1143 if (dri2_ctx->base.ContextPriority != EGL_CONTEXT_PRIORITY_MEDIUM_IMG) {
1144 unsigned val;
1145
1146 switch (dri2_ctx->base.ContextPriority) {
1147 case EGL_CONTEXT_PRIORITY_HIGH_IMG:
1148 val = __DRI_CTX_PRIORITY_HIGH;
1149 break;
1150 case EGL_CONTEXT_PRIORITY_MEDIUM_IMG:
1151 val = __DRI_CTX_PRIORITY_MEDIUM;
1152 break;
1153 case EGL_CONTEXT_PRIORITY_LOW_IMG:
1154 val = __DRI_CTX_PRIORITY_LOW;
1155 break;
1156 default:
1157 _eglError(EGL_BAD_CONFIG, "eglCreateContext");
1158 return false;
1159 }
1160
1161 ctx_attribs[pos++] = __DRI_CTX_ATTRIB_PRIORITY;
1162 ctx_attribs[pos++] = val;
1163 }
1164
1165 if (dri2_ctx->base.ReleaseBehavior ==
1166 EGL_CONTEXT_RELEASE_BEHAVIOR_NONE_KHR) {
1167 ctx_attribs[pos++] = __DRI_CTX_ATTRIB_RELEASE_BEHAVIOR;
1168 ctx_attribs[pos++] = __DRI_CTX_RELEASE_BEHAVIOR_NONE;
1169 }
1170
1171 if (dri2_ctx->base.NoError) {
1172 ctx_attribs[pos++] = __DRI_CTX_ATTRIB_NO_ERROR;
1173 ctx_attribs[pos++] = true;
1174 }
1175
1176 if (dri2_ctx->base.Protected) {
1177 ctx_attribs[pos++] = __DRI_CTX_ATTRIB_PROTECTED;
1178 ctx_attribs[pos++] = true;
1179 }
1180
1181 *num_attribs = pos;
1182
1183 return true;
1184 }
1185
1186 /**
1187 * Called via eglCreateContext(), drv->CreateContext().
1188 */
1189 static _EGLContext *
dri2_create_context(_EGLDisplay * disp,_EGLConfig * conf,_EGLContext * share_list,const EGLint * attrib_list)1190 dri2_create_context(_EGLDisplay *disp, _EGLConfig *conf,
1191 _EGLContext *share_list, const EGLint *attrib_list)
1192 {
1193 struct dri2_egl_context *dri2_ctx;
1194 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1195 struct dri2_egl_context *dri2_ctx_shared = dri2_egl_context(share_list);
1196 __DRIcontext *shared = dri2_ctx_shared ? dri2_ctx_shared->dri_context : NULL;
1197 struct dri2_egl_config *dri2_config = dri2_egl_config(conf);
1198 const __DRIconfig *dri_config;
1199 int api;
1200 unsigned error;
1201 unsigned num_attribs = NUM_ATTRIBS;
1202 uint32_t ctx_attribs[NUM_ATTRIBS];
1203
1204 dri2_ctx = malloc(sizeof *dri2_ctx);
1205 if (!dri2_ctx) {
1206 dri2_egl_error_unlock(dri2_dpy, EGL_BAD_ALLOC, "eglCreateContext");
1207 return NULL;
1208 }
1209
1210 if (!_eglInitContext(&dri2_ctx->base, disp, conf, share_list, attrib_list))
1211 goto cleanup;
1212
1213 switch (dri2_ctx->base.ClientAPI) {
1214 case EGL_OPENGL_ES_API:
1215 switch (dri2_ctx->base.ClientMajorVersion) {
1216 case 1:
1217 api = __DRI_API_GLES;
1218 break;
1219 case 2:
1220 api = __DRI_API_GLES2;
1221 break;
1222 case 3:
1223 api = __DRI_API_GLES3;
1224 break;
1225 default:
1226 _eglError(EGL_BAD_PARAMETER, "eglCreateContext");
1227 goto cleanup;
1228 }
1229 break;
1230 case EGL_OPENGL_API:
1231 if ((dri2_ctx->base.ClientMajorVersion >= 4 ||
1232 (dri2_ctx->base.ClientMajorVersion == 3 &&
1233 dri2_ctx->base.ClientMinorVersion >= 2)) &&
1234 dri2_ctx->base.Profile == EGL_CONTEXT_OPENGL_CORE_PROFILE_BIT_KHR)
1235 api = __DRI_API_OPENGL_CORE;
1236 else if (dri2_ctx->base.ClientMajorVersion == 3 &&
1237 dri2_ctx->base.ClientMinorVersion == 1)
1238 api = __DRI_API_OPENGL_CORE;
1239 else
1240 api = __DRI_API_OPENGL;
1241 break;
1242 default:
1243 _eglError(EGL_BAD_PARAMETER, "eglCreateContext");
1244 goto cleanup;
1245 }
1246
1247 if (conf != NULL) {
1248 /* The config chosen here isn't necessarily
1249 * used for surfaces later.
1250 * A pixmap surface will use the single config.
1251 * This opportunity depends on disabling the
1252 * doubleBufferMode check in
1253 * src/mesa/main/context.c:check_compatible()
1254 */
1255 if (dri2_config->dri_config[1][0])
1256 dri_config = dri2_config->dri_config[1][0];
1257 else
1258 dri_config = dri2_config->dri_config[0][0];
1259 } else
1260 dri_config = NULL;
1261
1262 if (!dri2_fill_context_attribs(dri2_ctx, dri2_dpy, ctx_attribs,
1263 &num_attribs))
1264 goto cleanup;
1265
1266 dri2_ctx->dri_context = driCreateContextAttribs(
1267 dri2_dpy->dri_screen_render_gpu, api, dri_config, shared, num_attribs / 2,
1268 ctx_attribs, &error, dri2_ctx);
1269 dri2_create_context_attribs_error(error);
1270
1271 if (!dri2_ctx->dri_context)
1272 goto cleanup;
1273
1274 mtx_unlock(&dri2_dpy->lock);
1275
1276 return &dri2_ctx->base;
1277
1278 cleanup:
1279 mtx_unlock(&dri2_dpy->lock);
1280 free(dri2_ctx);
1281 return NULL;
1282 }
1283
1284 /**
1285 * Called via eglDestroyContext(), drv->DestroyContext().
1286 */
1287 static EGLBoolean
dri2_destroy_context(_EGLDisplay * disp,_EGLContext * ctx)1288 dri2_destroy_context(_EGLDisplay *disp, _EGLContext *ctx)
1289 {
1290 struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
1291
1292 if (_eglPutContext(ctx)) {
1293 driDestroyContext(dri2_ctx->dri_context);
1294 free(dri2_ctx);
1295 }
1296
1297 return EGL_TRUE;
1298 }
1299
1300 EGLBoolean
dri2_init_surface(_EGLSurface * surf,_EGLDisplay * disp,EGLint type,_EGLConfig * conf,const EGLint * attrib_list,EGLBoolean enable_out_fence,void * native_surface)1301 dri2_init_surface(_EGLSurface *surf, _EGLDisplay *disp, EGLint type,
1302 _EGLConfig *conf, const EGLint *attrib_list,
1303 EGLBoolean enable_out_fence, void *native_surface)
1304 {
1305 struct dri2_egl_surface *dri2_surf = dri2_egl_surface(surf);
1306
1307 dri2_surf->out_fence_fd = -1;
1308 dri2_surf->enable_out_fence = false;
1309 if (disp->Extensions.ANDROID_native_fence_sync) {
1310 dri2_surf->enable_out_fence = enable_out_fence;
1311 }
1312
1313 return _eglInitSurface(surf, disp, type, conf, attrib_list, native_surface);
1314 }
1315
1316 static void
dri2_surface_set_out_fence_fd(_EGLSurface * surf,int fence_fd)1317 dri2_surface_set_out_fence_fd(_EGLSurface *surf, int fence_fd)
1318 {
1319 struct dri2_egl_surface *dri2_surf = dri2_egl_surface(surf);
1320
1321 if (dri2_surf->out_fence_fd >= 0)
1322 close(dri2_surf->out_fence_fd);
1323
1324 dri2_surf->out_fence_fd = fence_fd;
1325 }
1326
1327 void
dri2_fini_surface(_EGLSurface * surf)1328 dri2_fini_surface(_EGLSurface *surf)
1329 {
1330 struct dri2_egl_surface *dri2_surf = dri2_egl_surface(surf);
1331
1332 dri2_surface_set_out_fence_fd(surf, -1);
1333 dri2_surf->enable_out_fence = false;
1334 }
1335
1336 static EGLBoolean
dri2_destroy_surface(_EGLDisplay * disp,_EGLSurface * surf)1337 dri2_destroy_surface(_EGLDisplay *disp, _EGLSurface *surf)
1338 {
1339 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1340 EGLBoolean ret = EGL_TRUE;
1341
1342 if (_eglPutSurface(surf))
1343 ret = dri2_dpy->vtbl->destroy_surface(disp, surf);
1344
1345 return ret;
1346 }
1347
1348 static void
dri2_surf_update_fence_fd(_EGLContext * ctx,_EGLDisplay * disp,_EGLSurface * surf)1349 dri2_surf_update_fence_fd(_EGLContext *ctx, _EGLDisplay *disp,
1350 _EGLSurface *surf)
1351 {
1352 __DRIcontext *dri_ctx = dri2_egl_context(ctx)->dri_context;
1353 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1354 struct dri2_egl_surface *dri2_surf = dri2_egl_surface(surf);
1355 int fence_fd = -1;
1356 void *fence;
1357
1358 if (!dri2_surf->enable_out_fence)
1359 return;
1360
1361 fence = dri_create_fence_fd(dri_ctx, -1);
1362 if (fence) {
1363 fence_fd = dri_get_fence_fd(dri2_dpy->dri_screen_render_gpu, fence);
1364 dri_destroy_fence(dri2_dpy->dri_screen_render_gpu, fence);
1365 }
1366 dri2_surface_set_out_fence_fd(surf, fence_fd);
1367 }
1368
1369 EGLBoolean
dri2_create_drawable(struct dri2_egl_display * dri2_dpy,const __DRIconfig * config,struct dri2_egl_surface * dri2_surf,void * loaderPrivate)1370 dri2_create_drawable(struct dri2_egl_display *dri2_dpy,
1371 const __DRIconfig *config,
1372 struct dri2_egl_surface *dri2_surf, void *loaderPrivate)
1373 {
1374 bool is_pixmap = dri2_surf->base.Type == EGL_PBUFFER_BIT ||
1375 dri2_surf->base.Type == EGL_PIXMAP_BIT;
1376 dri2_surf->dri_drawable = dri_create_drawable(dri2_dpy->dri_screen_render_gpu, config, is_pixmap, loaderPrivate);
1377 if (dri2_surf->dri_drawable == NULL)
1378 return _eglError(EGL_BAD_ALLOC, "createNewDrawable");
1379
1380 return EGL_TRUE;
1381 }
1382
1383 /**
1384 * Called via eglMakeCurrent(), drv->MakeCurrent().
1385 */
1386 static EGLBoolean
dri2_make_current(_EGLDisplay * disp,_EGLSurface * dsurf,_EGLSurface * rsurf,_EGLContext * ctx)1387 dri2_make_current(_EGLDisplay *disp, _EGLSurface *dsurf, _EGLSurface *rsurf,
1388 _EGLContext *ctx)
1389 {
1390 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1391 struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
1392 _EGLDisplay *old_disp = NULL;
1393 struct dri2_egl_display *old_dri2_dpy = NULL;
1394 _EGLContext *old_ctx;
1395 _EGLSurface *old_dsurf, *old_rsurf;
1396 _EGLSurface *tmp_dsurf, *tmp_rsurf;
1397 __DRIdrawable *ddraw, *rdraw;
1398 __DRIcontext *cctx;
1399 EGLint egl_error = EGL_SUCCESS;
1400
1401 if (!dri2_dpy)
1402 return _eglError(EGL_NOT_INITIALIZED, "eglMakeCurrent");
1403
1404 /* make new bindings, set the EGL error otherwise */
1405 if (!_eglBindContext(ctx, dsurf, rsurf, &old_ctx, &old_dsurf, &old_rsurf))
1406 return EGL_FALSE;
1407
1408 if (old_ctx == ctx && old_dsurf == dsurf && old_rsurf == rsurf) {
1409 _eglPutSurface(old_dsurf);
1410 _eglPutSurface(old_rsurf);
1411 _eglPutContext(old_ctx);
1412 return EGL_TRUE;
1413 }
1414
1415 if (old_ctx) {
1416 __DRIcontext *old_cctx = dri2_egl_context(old_ctx)->dri_context;
1417 old_disp = old_ctx->Resource.Display;
1418 old_dri2_dpy = dri2_egl_display(old_disp);
1419
1420 /* Disable shared buffer mode */
1421 if (old_dsurf && _eglSurfaceInSharedBufferMode(old_dsurf) &&
1422 old_dri2_dpy->vtbl->set_shared_buffer_mode) {
1423 old_dri2_dpy->vtbl->set_shared_buffer_mode(old_disp, old_dsurf, false);
1424 }
1425
1426 driUnbindContext(old_cctx);
1427
1428 if (old_dsurf)
1429 dri2_surf_update_fence_fd(old_ctx, old_disp, old_dsurf);
1430 }
1431
1432 ddraw = (dsurf) ? dri2_dpy->vtbl->get_dri_drawable(dsurf) : NULL;
1433 rdraw = (rsurf) ? dri2_dpy->vtbl->get_dri_drawable(rsurf) : NULL;
1434 cctx = (dri2_ctx) ? dri2_ctx->dri_context : NULL;
1435
1436 if (cctx) {
1437 if (!driBindContext(cctx, ddraw, rdraw)) {
1438 _EGLContext *tmp_ctx;
1439
1440 /* driBindContext failed. We cannot tell for sure why, but
1441 * setting the error to EGL_BAD_MATCH is surely better than leaving it
1442 * as EGL_SUCCESS.
1443 */
1444 egl_error = EGL_BAD_MATCH;
1445
1446 /* undo the previous _eglBindContext */
1447 _eglBindContext(old_ctx, old_dsurf, old_rsurf, &ctx, &tmp_dsurf,
1448 &tmp_rsurf);
1449 assert(&dri2_ctx->base == ctx && tmp_dsurf == dsurf &&
1450 tmp_rsurf == rsurf);
1451
1452 _eglPutSurface(dsurf);
1453 _eglPutSurface(rsurf);
1454 _eglPutContext(ctx);
1455
1456 _eglPutSurface(old_dsurf);
1457 _eglPutSurface(old_rsurf);
1458 _eglPutContext(old_ctx);
1459
1460 ddraw =
1461 (old_dsurf) ? dri2_dpy->vtbl->get_dri_drawable(old_dsurf) : NULL;
1462 rdraw =
1463 (old_rsurf) ? dri2_dpy->vtbl->get_dri_drawable(old_rsurf) : NULL;
1464 cctx = (old_ctx) ? dri2_egl_context(old_ctx)->dri_context : NULL;
1465
1466 /* undo the previous driUnbindContext */
1467 if (driBindContext(cctx, ddraw, rdraw)) {
1468 if (old_dsurf && _eglSurfaceInSharedBufferMode(old_dsurf) &&
1469 old_dri2_dpy->vtbl->set_shared_buffer_mode) {
1470 old_dri2_dpy->vtbl->set_shared_buffer_mode(old_disp, old_dsurf,
1471 true);
1472 }
1473
1474 return _eglError(egl_error, "eglMakeCurrent");
1475 }
1476
1477 /* We cannot restore the same state as it was before calling
1478 * eglMakeCurrent() and the spec isn't clear about what to do. We
1479 * can prevent EGL from calling into the DRI driver with no DRI
1480 * context bound.
1481 */
1482 dsurf = rsurf = NULL;
1483 ctx = NULL;
1484
1485 _eglBindContext(ctx, dsurf, rsurf, &tmp_ctx, &tmp_dsurf, &tmp_rsurf);
1486 assert(tmp_ctx == old_ctx && tmp_dsurf == old_dsurf &&
1487 tmp_rsurf == old_rsurf);
1488
1489 _eglLog(_EGL_WARNING, "DRI2: failed to rebind the previous context");
1490 } else {
1491 /* driBindContext succeeded, so take a reference on the
1492 * dri2_dpy. This prevents dri2_dpy from being reinitialized when a
1493 * EGLDisplay is terminated and then initialized again while a
1494 * context is still bound. See dri2_initialize() for a more in depth
1495 * explanation. */
1496 p_atomic_inc(&dri2_dpy->ref_count);
1497 }
1498 }
1499
1500 dri2_destroy_surface(disp, old_dsurf);
1501 dri2_destroy_surface(disp, old_rsurf);
1502
1503 if (old_ctx) {
1504 dri2_destroy_context(disp, old_ctx);
1505 dri2_display_release(old_disp);
1506 }
1507
1508 if (egl_error != EGL_SUCCESS)
1509 return _eglError(egl_error, "eglMakeCurrent");
1510
1511 if (dsurf && _eglSurfaceHasMutableRenderBuffer(dsurf) &&
1512 dri2_dpy->vtbl->set_shared_buffer_mode) {
1513 /* Always update the shared buffer mode. This is obviously needed when
1514 * the active EGL_RENDER_BUFFER is EGL_SINGLE_BUFFER. When
1515 * EGL_RENDER_BUFFER is EGL_BACK_BUFFER, the update protects us in the
1516 * case where external non-EGL API may have changed window's shared
1517 * buffer mode since we last saw it.
1518 */
1519 bool mode = (dsurf->ActiveRenderBuffer == EGL_SINGLE_BUFFER);
1520 dri2_dpy->vtbl->set_shared_buffer_mode(disp, dsurf, mode);
1521 }
1522
1523 return EGL_TRUE;
1524 }
1525
1526 __DRIdrawable *
dri2_surface_get_dri_drawable(_EGLSurface * surf)1527 dri2_surface_get_dri_drawable(_EGLSurface *surf)
1528 {
1529 struct dri2_egl_surface *dri2_surf = dri2_egl_surface(surf);
1530
1531 return dri2_surf->dri_drawable;
1532 }
1533
1534 static _EGLSurface *
dri2_create_window_surface(_EGLDisplay * disp,_EGLConfig * conf,void * native_window,const EGLint * attrib_list)1535 dri2_create_window_surface(_EGLDisplay *disp, _EGLConfig *conf,
1536 void *native_window, const EGLint *attrib_list)
1537 {
1538 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1539 _EGLSurface *ret = dri2_dpy->vtbl->create_window_surface(
1540 disp, conf, native_window, attrib_list);
1541 mtx_unlock(&dri2_dpy->lock);
1542 return ret;
1543 }
1544
1545 static _EGLSurface *
dri2_create_pixmap_surface(_EGLDisplay * disp,_EGLConfig * conf,void * native_pixmap,const EGLint * attrib_list)1546 dri2_create_pixmap_surface(_EGLDisplay *disp, _EGLConfig *conf,
1547 void *native_pixmap, const EGLint *attrib_list)
1548 {
1549 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1550 _EGLSurface *ret = NULL;
1551
1552 if (dri2_dpy->vtbl->create_pixmap_surface)
1553 ret = dri2_dpy->vtbl->create_pixmap_surface(disp, conf, native_pixmap,
1554 attrib_list);
1555
1556 mtx_unlock(&dri2_dpy->lock);
1557
1558 return ret;
1559 }
1560
1561 static _EGLSurface *
dri2_create_pbuffer_surface(_EGLDisplay * disp,_EGLConfig * conf,const EGLint * attrib_list)1562 dri2_create_pbuffer_surface(_EGLDisplay *disp, _EGLConfig *conf,
1563 const EGLint *attrib_list)
1564 {
1565 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1566 _EGLSurface *ret = NULL;
1567
1568 if (dri2_dpy->vtbl->create_pbuffer_surface)
1569 ret = dri2_dpy->vtbl->create_pbuffer_surface(disp, conf, attrib_list);
1570
1571 mtx_unlock(&dri2_dpy->lock);
1572
1573 return ret;
1574 }
1575
1576 static EGLBoolean
dri2_swap_interval(_EGLDisplay * disp,_EGLSurface * surf,EGLint interval)1577 dri2_swap_interval(_EGLDisplay *disp, _EGLSurface *surf, EGLint interval)
1578 {
1579 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1580 EGLBoolean ret = EGL_TRUE;
1581
1582 if (dri2_dpy->vtbl->swap_interval)
1583 ret = dri2_dpy->vtbl->swap_interval(disp, surf, interval);
1584
1585 mtx_unlock(&dri2_dpy->lock);
1586
1587 return ret;
1588 }
1589
1590 /**
1591 * Asks the client API to flush any rendering to the drawable so that we can
1592 * do our swapbuffers.
1593 */
1594 void
dri2_flush_drawable_for_swapbuffers_flags(_EGLDisplay * disp,_EGLSurface * draw,enum __DRI2throttleReason throttle_reason)1595 dri2_flush_drawable_for_swapbuffers_flags(
1596 _EGLDisplay *disp, _EGLSurface *draw,
1597 enum __DRI2throttleReason throttle_reason)
1598 {
1599 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1600 __DRIdrawable *dri_drawable = dri2_dpy->vtbl->get_dri_drawable(draw);
1601
1602 /* flush not available for swrast */
1603 if (dri2_dpy->swrast_not_kms)
1604 return;
1605
1606 /* We know there's a current context because:
1607 *
1608 * "If surface is not bound to the calling thread’s current
1609 * context, an EGL_BAD_SURFACE error is generated."
1610 */
1611 _EGLContext *ctx = _eglGetCurrentContext();
1612 struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
1613
1614 /* From the EGL 1.4 spec (page 52):
1615 *
1616 * "The contents of ancillary buffers are always undefined
1617 * after calling eglSwapBuffers."
1618 */
1619 dri_flush(dri2_ctx->dri_context, dri_drawable,
1620 __DRI2_FLUSH_DRAWABLE | __DRI2_FLUSH_INVALIDATE_ANCILLARY,
1621 throttle_reason);
1622 }
1623
1624 void
dri2_flush_drawable_for_swapbuffers(_EGLDisplay * disp,_EGLSurface * draw)1625 dri2_flush_drawable_for_swapbuffers(_EGLDisplay *disp, _EGLSurface *draw)
1626 {
1627 dri2_flush_drawable_for_swapbuffers_flags(disp, draw,
1628 __DRI2_THROTTLE_SWAPBUFFER);
1629 }
1630
1631 static EGLBoolean
dri2_swap_buffers(_EGLDisplay * disp,_EGLSurface * surf)1632 dri2_swap_buffers(_EGLDisplay *disp, _EGLSurface *surf)
1633 {
1634 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1635 __DRIdrawable *dri_drawable = dri2_dpy->vtbl->get_dri_drawable(surf);
1636 _EGLContext *ctx = _eglGetCurrentContext();
1637 EGLBoolean ret;
1638
1639 if (ctx && surf)
1640 dri2_surf_update_fence_fd(ctx, disp, surf);
1641 ret = dri2_dpy->vtbl->swap_buffers(disp, surf);
1642
1643 /* SwapBuffers marks the end of the frame; reset the damage region for
1644 * use again next time.
1645 */
1646 if (ret && disp->Extensions.KHR_partial_update)
1647 dri_set_damage_region(dri_drawable, 0, NULL);
1648
1649 return ret;
1650 }
1651
1652 static EGLBoolean
dri2_swap_buffers_with_damage(_EGLDisplay * disp,_EGLSurface * surf,const EGLint * rects,EGLint n_rects)1653 dri2_swap_buffers_with_damage(_EGLDisplay *disp, _EGLSurface *surf,
1654 const EGLint *rects, EGLint n_rects)
1655 {
1656 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1657 __DRIdrawable *dri_drawable = dri2_dpy->vtbl->get_dri_drawable(surf);
1658 _EGLContext *ctx = _eglGetCurrentContext();
1659 EGLBoolean ret;
1660
1661 if (ctx && surf)
1662 dri2_surf_update_fence_fd(ctx, disp, surf);
1663 if (dri2_dpy->vtbl->swap_buffers_with_damage)
1664 ret =
1665 dri2_dpy->vtbl->swap_buffers_with_damage(disp, surf, rects, n_rects);
1666 else
1667 ret = dri2_dpy->vtbl->swap_buffers(disp, surf);
1668
1669 /* SwapBuffers marks the end of the frame; reset the damage region for
1670 * use again next time.
1671 */
1672 if (ret && disp->Extensions.KHR_partial_update)
1673 dri_set_damage_region(dri_drawable, 0, NULL);
1674
1675 return ret;
1676 }
1677
1678 static EGLBoolean
dri2_swap_buffers_region(_EGLDisplay * disp,_EGLSurface * surf,EGLint numRects,const EGLint * rects)1679 dri2_swap_buffers_region(_EGLDisplay *disp, _EGLSurface *surf, EGLint numRects,
1680 const EGLint *rects)
1681 {
1682 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1683 __DRIdrawable *dri_drawable = dri2_dpy->vtbl->get_dri_drawable(surf);
1684 EGLBoolean ret;
1685
1686 if (!dri2_dpy->vtbl->swap_buffers_region)
1687 return EGL_FALSE;
1688 ret = dri2_dpy->vtbl->swap_buffers_region(disp, surf, numRects, rects);
1689
1690 /* SwapBuffers marks the end of the frame; reset the damage region for
1691 * use again next time.
1692 */
1693 if (ret && disp->Extensions.KHR_partial_update)
1694 dri_set_damage_region(dri_drawable, 0, NULL);
1695
1696 return ret;
1697 }
1698
1699 static EGLBoolean
dri2_set_damage_region(_EGLDisplay * disp,_EGLSurface * surf,EGLint * rects,EGLint n_rects)1700 dri2_set_damage_region(_EGLDisplay *disp, _EGLSurface *surf, EGLint *rects,
1701 EGLint n_rects)
1702 {
1703 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1704 __DRIdrawable *drawable = dri2_dpy->vtbl->get_dri_drawable(surf);
1705
1706 if (!disp->Extensions.KHR_partial_update) {
1707 mtx_unlock(&dri2_dpy->lock);
1708 return EGL_FALSE;
1709 }
1710
1711 dri_set_damage_region(drawable, n_rects, rects);
1712 mtx_unlock(&dri2_dpy->lock);
1713 return EGL_TRUE;
1714 }
1715
1716 static EGLBoolean
dri2_post_sub_buffer(_EGLDisplay * disp,_EGLSurface * surf,EGLint x,EGLint y,EGLint width,EGLint height)1717 dri2_post_sub_buffer(_EGLDisplay *disp, _EGLSurface *surf, EGLint x, EGLint y,
1718 EGLint width, EGLint height)
1719 {
1720 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1721 EGLBoolean ret = EGL_FALSE;
1722
1723 if (dri2_dpy->vtbl->post_sub_buffer)
1724 ret = dri2_dpy->vtbl->post_sub_buffer(disp, surf, x, y, width, height);
1725
1726 mtx_unlock(&dri2_dpy->lock);
1727
1728 return ret;
1729 }
1730
1731 static EGLBoolean
dri2_copy_buffers(_EGLDisplay * disp,_EGLSurface * surf,void * native_pixmap_target)1732 dri2_copy_buffers(_EGLDisplay *disp, _EGLSurface *surf,
1733 void *native_pixmap_target)
1734 {
1735 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1736 if (!dri2_dpy->vtbl->copy_buffers)
1737 return dri2_egl_error_unlock(dri2_dpy, EGL_BAD_NATIVE_PIXMAP,
1738 "no support for native pixmaps");
1739 EGLBoolean ret =
1740 dri2_dpy->vtbl->copy_buffers(disp, surf, native_pixmap_target);
1741 mtx_unlock(&dri2_dpy->lock);
1742 return ret;
1743 }
1744
1745 static EGLint
dri2_query_buffer_age(_EGLDisplay * disp,_EGLSurface * surf)1746 dri2_query_buffer_age(_EGLDisplay *disp, _EGLSurface *surf)
1747 {
1748 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1749 if (!dri2_dpy->vtbl->query_buffer_age)
1750 return 0;
1751 return dri2_dpy->vtbl->query_buffer_age(disp, surf);
1752 }
1753
1754 static EGLBoolean
dri2_wait_client(_EGLDisplay * disp,_EGLContext * ctx)1755 dri2_wait_client(_EGLDisplay *disp, _EGLContext *ctx)
1756 {
1757 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1758 _EGLSurface *surf = ctx->DrawSurface;
1759 __DRIdrawable *dri_drawable = dri2_dpy->vtbl->get_dri_drawable(surf);
1760
1761 /* FIXME: If EGL allows frontbuffer rendering for window surfaces,
1762 * we need to copy fake to real here.*/
1763
1764 if (!dri2_dpy->swrast_not_kms)
1765 dri_flush_drawable(dri_drawable);
1766
1767 return EGL_TRUE;
1768 }
1769
1770 static EGLBoolean
dri2_wait_native(EGLint engine)1771 dri2_wait_native(EGLint engine)
1772 {
1773 if (engine != EGL_CORE_NATIVE_ENGINE)
1774 return _eglError(EGL_BAD_PARAMETER, "eglWaitNative");
1775 /* glXWaitX(); */
1776
1777 return EGL_TRUE;
1778 }
1779
1780 static EGLBoolean
dri2_bind_tex_image(_EGLDisplay * disp,_EGLSurface * surf,EGLint buffer)1781 dri2_bind_tex_image(_EGLDisplay *disp, _EGLSurface *surf, EGLint buffer)
1782 {
1783 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1784 struct dri2_egl_context *dri2_ctx;
1785 _EGLContext *ctx;
1786 GLint format, target;
1787 __DRIdrawable *dri_drawable = dri2_dpy->vtbl->get_dri_drawable(surf);
1788
1789 ctx = _eglGetCurrentContext();
1790 dri2_ctx = dri2_egl_context(ctx);
1791
1792 if (!_eglBindTexImage(disp, surf, buffer)) {
1793 mtx_unlock(&dri2_dpy->lock);
1794 return EGL_FALSE;
1795 }
1796
1797 switch (surf->TextureFormat) {
1798 case EGL_TEXTURE_RGB:
1799 format = __DRI_TEXTURE_FORMAT_RGB;
1800 break;
1801 case EGL_TEXTURE_RGBA:
1802 format = __DRI_TEXTURE_FORMAT_RGBA;
1803 break;
1804 default:
1805 assert(!"Unexpected texture format in dri2_bind_tex_image()");
1806 format = __DRI_TEXTURE_FORMAT_RGBA;
1807 }
1808
1809 switch (surf->TextureTarget) {
1810 case EGL_TEXTURE_2D:
1811 target = GL_TEXTURE_2D;
1812 break;
1813 default:
1814 target = GL_TEXTURE_2D;
1815 assert(!"Unexpected texture target in dri2_bind_tex_image()");
1816 }
1817
1818 dri_set_tex_buffer2(dri2_ctx->dri_context, target, format, dri_drawable);
1819
1820 mtx_unlock(&dri2_dpy->lock);
1821
1822 return EGL_TRUE;
1823 }
1824
1825 static EGLBoolean
dri2_release_tex_image(_EGLDisplay * disp,_EGLSurface * surf,EGLint buffer)1826 dri2_release_tex_image(_EGLDisplay *disp, _EGLSurface *surf, EGLint buffer)
1827 {
1828 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1829
1830 if (!_eglReleaseTexImage(disp, surf, buffer)) {
1831 mtx_unlock(&dri2_dpy->lock);
1832 return EGL_FALSE;
1833 }
1834
1835 mtx_unlock(&dri2_dpy->lock);
1836
1837 return EGL_TRUE;
1838 }
1839
1840 static _EGLImage *
dri2_create_image(_EGLDisplay * disp,_EGLContext * ctx,EGLenum target,EGLClientBuffer buffer,const EGLint * attr_list)1841 dri2_create_image(_EGLDisplay *disp, _EGLContext *ctx, EGLenum target,
1842 EGLClientBuffer buffer, const EGLint *attr_list)
1843 {
1844 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1845 _EGLImage *ret =
1846 dri2_dpy->vtbl->create_image(disp, ctx, target, buffer, attr_list);
1847 mtx_unlock(&dri2_dpy->lock);
1848 return ret;
1849 }
1850
1851 _EGLImage *
dri2_create_image_from_dri(_EGLDisplay * disp,__DRIimage * dri_image)1852 dri2_create_image_from_dri(_EGLDisplay *disp, __DRIimage *dri_image)
1853 {
1854 struct dri2_egl_image *dri2_img;
1855
1856 if (dri_image == NULL) {
1857 _eglError(EGL_BAD_ALLOC, "dri2_create_image");
1858 return NULL;
1859 }
1860
1861 dri2_img = malloc(sizeof *dri2_img);
1862 if (!dri2_img) {
1863 _eglError(EGL_BAD_ALLOC, "dri2_create_image");
1864 return NULL;
1865 }
1866
1867 _eglInitImage(&dri2_img->base, disp);
1868
1869 dri2_img->dri_image = dri_image;
1870
1871 return &dri2_img->base;
1872 }
1873
1874 /**
1875 * Translate a DRI Image extension error code into an EGL error code.
1876 */
1877 static EGLint
egl_error_from_dri_image_error(int dri_error)1878 egl_error_from_dri_image_error(int dri_error)
1879 {
1880 switch (dri_error) {
1881 case __DRI_IMAGE_ERROR_SUCCESS:
1882 return EGL_SUCCESS;
1883 case __DRI_IMAGE_ERROR_BAD_ALLOC:
1884 return EGL_BAD_ALLOC;
1885 case __DRI_IMAGE_ERROR_BAD_MATCH:
1886 return EGL_BAD_MATCH;
1887 case __DRI_IMAGE_ERROR_BAD_PARAMETER:
1888 return EGL_BAD_PARAMETER;
1889 case __DRI_IMAGE_ERROR_BAD_ACCESS:
1890 return EGL_BAD_ACCESS;
1891 default:
1892 assert(!"unknown dri_error code");
1893 return EGL_BAD_ALLOC;
1894 }
1895 }
1896
1897 static _EGLImage *
dri2_create_image_khr_renderbuffer(_EGLDisplay * disp,_EGLContext * ctx,EGLClientBuffer buffer,const EGLint * attr_list)1898 dri2_create_image_khr_renderbuffer(_EGLDisplay *disp, _EGLContext *ctx,
1899 EGLClientBuffer buffer,
1900 const EGLint *attr_list)
1901 {
1902 struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
1903 GLuint renderbuffer = (GLuint)(uintptr_t)buffer;
1904 __DRIimage *dri_image;
1905
1906 if (renderbuffer == 0) {
1907 _eglError(EGL_BAD_PARAMETER, "dri2_create_image_khr");
1908 return EGL_NO_IMAGE_KHR;
1909 }
1910
1911 if (!disp->Extensions.KHR_gl_renderbuffer_image) {
1912 _eglError(EGL_BAD_PARAMETER, "dri2_create_image_khr");
1913 return EGL_NO_IMAGE_KHR;
1914 }
1915
1916 unsigned error = ~0;
1917 dri_image = dri_create_image_from_renderbuffer(
1918 dri2_ctx->dri_context, renderbuffer, NULL, &error);
1919
1920 assert(!!dri_image == (error == __DRI_IMAGE_ERROR_SUCCESS));
1921
1922 if (!dri_image) {
1923 _eglError(egl_error_from_dri_image_error(error), "dri2_create_image_khr");
1924 return EGL_NO_IMAGE_KHR;
1925 }
1926
1927 return dri2_create_image_from_dri(disp, dri_image);
1928 }
1929
1930 #ifdef HAVE_WAYLAND_PLATFORM
1931
1932 /* This structure describes how a wl_buffer maps to one or more
1933 * __DRIimages. A wl_drm_buffer stores the wl_drm format code and the
1934 * offsets and strides of the planes in the buffer. This table maps a
1935 * wl_drm format code to a description of the planes in the buffer
1936 * that lets us create a __DRIimage for each of the planes. */
1937
1938 static const struct wl_drm_components_descriptor {
1939 uint32_t dri_components;
1940 EGLint components;
1941 int nplanes;
1942 } wl_drm_components[] = {
1943 {__DRI_IMAGE_COMPONENTS_RGB, EGL_TEXTURE_RGB, 1},
1944 {__DRI_IMAGE_COMPONENTS_RGBA, EGL_TEXTURE_RGBA, 1},
1945 {__DRI_IMAGE_COMPONENTS_Y_U_V, EGL_TEXTURE_Y_U_V_WL, 3},
1946 {__DRI_IMAGE_COMPONENTS_Y_UV, EGL_TEXTURE_Y_UV_WL, 2},
1947 {__DRI_IMAGE_COMPONENTS_Y_XUXV, EGL_TEXTURE_Y_XUXV_WL, 2},
1948 };
1949
1950 static _EGLImage *
dri2_create_image_wayland_wl_buffer(_EGLDisplay * disp,_EGLContext * ctx,EGLClientBuffer _buffer,const EGLint * attr_list)1951 dri2_create_image_wayland_wl_buffer(_EGLDisplay *disp, _EGLContext *ctx,
1952 EGLClientBuffer _buffer,
1953 const EGLint *attr_list)
1954 {
1955 struct wl_drm_buffer *buffer;
1956 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1957 const struct wl_drm_components_descriptor *f;
1958 __DRIimage *dri_image;
1959 _EGLImageAttribs attrs;
1960 int32_t plane;
1961
1962 buffer = wayland_drm_buffer_get(dri2_dpy->wl_server_drm,
1963 (struct wl_resource *)_buffer);
1964 if (!buffer)
1965 return NULL;
1966
1967 if (!_eglParseImageAttribList(&attrs, disp, attr_list))
1968 return NULL;
1969
1970 plane = attrs.PlaneWL;
1971 f = buffer->driver_format;
1972 if (plane < 0 || plane >= f->nplanes) {
1973 _eglError(EGL_BAD_PARAMETER,
1974 "dri2_create_image_wayland_wl_buffer (plane out of bounds)");
1975 return NULL;
1976 }
1977
1978 dri_image = dri2_from_planar(buffer->driver_buffer, plane, NULL);
1979 if (dri_image == NULL && plane == 0)
1980 dri_image = dri2_dup_image(buffer->driver_buffer, NULL);
1981 if (dri_image == NULL) {
1982 _eglError(EGL_BAD_PARAMETER, "dri2_create_image_wayland_wl_buffer");
1983 return NULL;
1984 }
1985
1986 return dri2_create_image_from_dri(disp, dri_image);
1987 }
1988 #endif
1989
1990 static EGLBoolean
dri2_get_sync_values_chromium(_EGLDisplay * disp,_EGLSurface * surf,EGLuint64KHR * ust,EGLuint64KHR * msc,EGLuint64KHR * sbc)1991 dri2_get_sync_values_chromium(_EGLDisplay *disp, _EGLSurface *surf,
1992 EGLuint64KHR *ust, EGLuint64KHR *msc,
1993 EGLuint64KHR *sbc)
1994 {
1995 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1996 EGLBoolean ret = EGL_FALSE;
1997
1998 if (dri2_dpy->vtbl->get_sync_values)
1999 ret = dri2_dpy->vtbl->get_sync_values(disp, surf, ust, msc, sbc);
2000
2001 return ret;
2002 }
2003
2004 static EGLBoolean
dri2_get_msc_rate_angle(_EGLDisplay * disp,_EGLSurface * surf,EGLint * numerator,EGLint * denominator)2005 dri2_get_msc_rate_angle(_EGLDisplay *disp, _EGLSurface *surf, EGLint *numerator,
2006 EGLint *denominator)
2007 {
2008 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
2009 if (!dri2_dpy->vtbl->get_msc_rate)
2010 return EGL_FALSE;
2011 return dri2_dpy->vtbl->get_msc_rate(disp, surf, numerator, denominator);
2012 }
2013
2014 /**
2015 * Set the error code after a call to
2016 * dri2_egl_image::dri_image::createImageFromTexture.
2017 */
2018 static void
dri2_create_image_khr_texture_error(int dri_error)2019 dri2_create_image_khr_texture_error(int dri_error)
2020 {
2021 EGLint egl_error = egl_error_from_dri_image_error(dri_error);
2022
2023 if (egl_error != EGL_SUCCESS)
2024 _eglError(egl_error, "dri2_create_image_khr_texture");
2025 }
2026
2027 static _EGLImage *
dri2_create_image_khr_texture(_EGLDisplay * disp,_EGLContext * ctx,EGLenum target,EGLClientBuffer buffer,const EGLint * attr_list)2028 dri2_create_image_khr_texture(_EGLDisplay *disp, _EGLContext *ctx,
2029 EGLenum target, EGLClientBuffer buffer,
2030 const EGLint *attr_list)
2031 {
2032 struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
2033 struct dri2_egl_image *dri2_img;
2034 GLuint texture = (GLuint)(uintptr_t)buffer;
2035 _EGLImageAttribs attrs;
2036 GLuint depth;
2037 GLenum gl_target;
2038 unsigned error = __DRI_IMAGE_ERROR_SUCCESS;
2039
2040 if (texture == 0) {
2041 _eglError(EGL_BAD_PARAMETER, "dri2_create_image_khr");
2042 return EGL_NO_IMAGE_KHR;
2043 }
2044
2045 if (!_eglParseImageAttribList(&attrs, disp, attr_list))
2046 return EGL_NO_IMAGE_KHR;
2047
2048 switch (target) {
2049 case EGL_GL_TEXTURE_2D_KHR:
2050 if (!disp->Extensions.KHR_gl_texture_2D_image) {
2051 _eglError(EGL_BAD_PARAMETER, "dri2_create_image_khr");
2052 return EGL_NO_IMAGE_KHR;
2053 }
2054 depth = 0;
2055 gl_target = GL_TEXTURE_2D;
2056 break;
2057 case EGL_GL_TEXTURE_3D_KHR:
2058 if (!disp->Extensions.KHR_gl_texture_3D_image) {
2059 _eglError(EGL_BAD_PARAMETER, "dri2_create_image_khr");
2060 return EGL_NO_IMAGE_KHR;
2061 }
2062
2063 depth = attrs.GLTextureZOffset;
2064 gl_target = GL_TEXTURE_3D;
2065 break;
2066 case EGL_GL_TEXTURE_CUBE_MAP_POSITIVE_X_KHR:
2067 case EGL_GL_TEXTURE_CUBE_MAP_NEGATIVE_X_KHR:
2068 case EGL_GL_TEXTURE_CUBE_MAP_POSITIVE_Y_KHR:
2069 case EGL_GL_TEXTURE_CUBE_MAP_NEGATIVE_Y_KHR:
2070 case EGL_GL_TEXTURE_CUBE_MAP_POSITIVE_Z_KHR:
2071 case EGL_GL_TEXTURE_CUBE_MAP_NEGATIVE_Z_KHR:
2072 if (!disp->Extensions.KHR_gl_texture_cubemap_image) {
2073 _eglError(EGL_BAD_PARAMETER, "dri2_create_image_khr");
2074 return EGL_NO_IMAGE_KHR;
2075 }
2076
2077 depth = target - EGL_GL_TEXTURE_CUBE_MAP_POSITIVE_X_KHR;
2078 gl_target = GL_TEXTURE_CUBE_MAP;
2079 break;
2080 default:
2081 unreachable("Unexpected target in dri2_create_image_khr_texture()");
2082 return EGL_NO_IMAGE_KHR;
2083 }
2084
2085 dri2_img = malloc(sizeof *dri2_img);
2086 if (!dri2_img) {
2087 _eglError(EGL_BAD_ALLOC, "dri2_create_image_khr");
2088 return EGL_NO_IMAGE_KHR;
2089 }
2090
2091 _eglInitImage(&dri2_img->base, disp);
2092
2093 dri2_img->dri_image = dri2_create_from_texture(
2094 dri2_ctx->dri_context, gl_target, texture, depth, attrs.GLTextureLevel,
2095 &error, NULL);
2096 dri2_create_image_khr_texture_error(error);
2097
2098 if (!dri2_img->dri_image) {
2099 free(dri2_img);
2100 return EGL_NO_IMAGE_KHR;
2101 }
2102 return &dri2_img->base;
2103 }
2104
2105 static EGLBoolean
dri2_query_surface(_EGLDisplay * disp,_EGLSurface * surf,EGLint attribute,EGLint * value)2106 dri2_query_surface(_EGLDisplay *disp, _EGLSurface *surf, EGLint attribute,
2107 EGLint *value)
2108 {
2109 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
2110 EGLBoolean ret;
2111
2112 if (!dri2_dpy->vtbl->query_surface) {
2113 ret = _eglQuerySurface(disp, surf, attribute, value);
2114 } else {
2115 ret = dri2_dpy->vtbl->query_surface(disp, surf, attribute, value);
2116 }
2117
2118 return ret;
2119 }
2120
2121 static struct wl_buffer *
dri2_create_wayland_buffer_from_image(_EGLDisplay * disp,_EGLImage * img)2122 dri2_create_wayland_buffer_from_image(_EGLDisplay *disp, _EGLImage *img)
2123 {
2124 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
2125 struct wl_buffer *ret = NULL;
2126
2127 if (dri2_dpy->vtbl->create_wayland_buffer_from_image)
2128 ret = dri2_dpy->vtbl->create_wayland_buffer_from_image(disp, img);
2129
2130 mtx_unlock(&dri2_dpy->lock);
2131
2132 return ret;
2133 }
2134
2135 #ifdef HAVE_LIBDRM
2136 static _EGLImage *
dri2_create_image_mesa_drm_buffer(_EGLDisplay * disp,_EGLContext * ctx,EGLClientBuffer buffer,const EGLint * attr_list)2137 dri2_create_image_mesa_drm_buffer(_EGLDisplay *disp, _EGLContext *ctx,
2138 EGLClientBuffer buffer,
2139 const EGLint *attr_list)
2140 {
2141 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
2142 EGLint name, pitch;
2143 uint32_t fourcc;
2144 _EGLImageAttribs attrs;
2145 __DRIimage *dri_image;
2146
2147 name = (EGLint)(uintptr_t)buffer;
2148
2149 if (!_eglParseImageAttribList(&attrs, disp, attr_list))
2150 return NULL;
2151
2152 if (attrs.Width <= 0 || attrs.Height <= 0 ||
2153 attrs.DRMBufferStrideMESA <= 0) {
2154 _eglError(EGL_BAD_PARAMETER, "bad width, height or stride");
2155 return NULL;
2156 }
2157
2158 switch (attrs.DRMBufferFormatMESA) {
2159 case EGL_DRM_BUFFER_FORMAT_ARGB32_MESA:
2160 fourcc = DRM_FORMAT_ARGB8888;
2161 pitch = attrs.DRMBufferStrideMESA * 4;
2162 break;
2163 default:
2164 _eglError(EGL_BAD_PARAMETER,
2165 "dri2_create_image_khr: unsupported pixmap depth");
2166 return NULL;
2167 }
2168
2169 dri_image = dri2_from_names(
2170 dri2_dpy->dri_screen_render_gpu, attrs.Width, attrs.Height, fourcc,
2171 (int *) &name, 1, (int *) &pitch, 0, NULL);
2172
2173 return dri2_create_image_from_dri(disp, dri_image);
2174 }
2175
2176 static EGLBoolean
dri2_check_dma_buf_attribs(const _EGLImageAttribs * attrs)2177 dri2_check_dma_buf_attribs(const _EGLImageAttribs *attrs)
2178 {
2179 /**
2180 * The spec says:
2181 *
2182 * "Required attributes and their values are as follows:
2183 *
2184 * * EGL_WIDTH & EGL_HEIGHT: The logical dimensions of the buffer in pixels
2185 *
2186 * * EGL_LINUX_DRM_FOURCC_EXT: The pixel format of the buffer, as specified
2187 * by drm_fourcc.h and used as the pixel_format parameter of the
2188 * drm_mode_fb_cmd2 ioctl."
2189 *
2190 * and
2191 *
2192 * "* If <target> is EGL_LINUX_DMA_BUF_EXT, and the list of attributes is
2193 * incomplete, EGL_BAD_PARAMETER is generated."
2194 */
2195 if (attrs->Width <= 0 || attrs->Height <= 0 ||
2196 !attrs->DMABufFourCC.IsPresent)
2197 return _eglError(EGL_BAD_PARAMETER, "attribute(s) missing");
2198
2199 /**
2200 * Also:
2201 *
2202 * "If <target> is EGL_LINUX_DMA_BUF_EXT and one or more of the values
2203 * specified for a plane's pitch or offset isn't supported by EGL,
2204 * EGL_BAD_ACCESS is generated."
2205 */
2206 for (unsigned i = 0; i < ARRAY_SIZE(attrs->DMABufPlanePitches); ++i) {
2207 if (attrs->DMABufPlanePitches[i].IsPresent &&
2208 attrs->DMABufPlanePitches[i].Value <= 0)
2209 return _eglError(EGL_BAD_ACCESS, "invalid pitch");
2210 }
2211
2212 /**
2213 * If <target> is EGL_LINUX_DMA_BUF_EXT, both or neither of the following
2214 * attribute values may be given.
2215 *
2216 * This is referring to EGL_DMA_BUF_PLANE0_MODIFIER_LO_EXT and
2217 * EGL_DMA_BUF_PLANE0_MODIFIER_HI_EXT, and the same for other planes.
2218 */
2219 for (unsigned i = 0; i < DMA_BUF_MAX_PLANES; ++i) {
2220 if (attrs->DMABufPlaneModifiersLo[i].IsPresent !=
2221 attrs->DMABufPlaneModifiersHi[i].IsPresent)
2222 return _eglError(EGL_BAD_PARAMETER,
2223 "modifier attribute lo or hi missing");
2224 }
2225
2226 /* Although the EGL_EXT_image_dma_buf_import_modifiers spec doesn't
2227 * mandate it, we only accept the same modifier across all planes. */
2228 for (unsigned i = 1; i < DMA_BUF_MAX_PLANES; ++i) {
2229 if (attrs->DMABufPlaneFds[i].IsPresent) {
2230 if ((attrs->DMABufPlaneModifiersLo[0].IsPresent !=
2231 attrs->DMABufPlaneModifiersLo[i].IsPresent) ||
2232 (attrs->DMABufPlaneModifiersLo[0].Value !=
2233 attrs->DMABufPlaneModifiersLo[i].Value) ||
2234 (attrs->DMABufPlaneModifiersHi[0].Value !=
2235 attrs->DMABufPlaneModifiersHi[i].Value))
2236 return _eglError(EGL_BAD_PARAMETER,
2237 "modifier attributes not equal");
2238 }
2239 }
2240
2241 return EGL_TRUE;
2242 }
2243
2244 /* Returns the total number of planes for the format or zero if it isn't a
2245 * valid fourcc format.
2246 */
2247 static unsigned
dri2_num_fourcc_format_planes(EGLint format)2248 dri2_num_fourcc_format_planes(EGLint format)
2249 {
2250 switch (format) {
2251 case DRM_FORMAT_R8:
2252 case DRM_FORMAT_RG88:
2253 case DRM_FORMAT_GR88:
2254 case DRM_FORMAT_R16:
2255 case DRM_FORMAT_GR1616:
2256 case DRM_FORMAT_RGB332:
2257 case DRM_FORMAT_BGR233:
2258 case DRM_FORMAT_XRGB4444:
2259 case DRM_FORMAT_XBGR4444:
2260 case DRM_FORMAT_RGBX4444:
2261 case DRM_FORMAT_BGRX4444:
2262 case DRM_FORMAT_ARGB4444:
2263 case DRM_FORMAT_ABGR4444:
2264 case DRM_FORMAT_RGBA4444:
2265 case DRM_FORMAT_BGRA4444:
2266 case DRM_FORMAT_XRGB1555:
2267 case DRM_FORMAT_XBGR1555:
2268 case DRM_FORMAT_RGBX5551:
2269 case DRM_FORMAT_BGRX5551:
2270 case DRM_FORMAT_ARGB1555:
2271 case DRM_FORMAT_ABGR1555:
2272 case DRM_FORMAT_RGBA5551:
2273 case DRM_FORMAT_BGRA5551:
2274 case DRM_FORMAT_RGB565:
2275 case DRM_FORMAT_BGR565:
2276 case DRM_FORMAT_RGB888:
2277 case DRM_FORMAT_BGR888:
2278 case DRM_FORMAT_XRGB8888:
2279 case DRM_FORMAT_XBGR8888:
2280 case DRM_FORMAT_RGBX8888:
2281 case DRM_FORMAT_BGRX8888:
2282 case DRM_FORMAT_ARGB8888:
2283 case DRM_FORMAT_ABGR8888:
2284 case DRM_FORMAT_RGBA8888:
2285 case DRM_FORMAT_BGRA8888:
2286 case DRM_FORMAT_XRGB2101010:
2287 case DRM_FORMAT_XBGR2101010:
2288 case DRM_FORMAT_RGBX1010102:
2289 case DRM_FORMAT_BGRX1010102:
2290 case DRM_FORMAT_ARGB2101010:
2291 case DRM_FORMAT_ABGR2101010:
2292 case DRM_FORMAT_RGBA1010102:
2293 case DRM_FORMAT_BGRA1010102:
2294 case DRM_FORMAT_ABGR16161616:
2295 case DRM_FORMAT_XBGR16161616:
2296 case DRM_FORMAT_XBGR16161616F:
2297 case DRM_FORMAT_ABGR16161616F:
2298 case DRM_FORMAT_YUYV:
2299 case DRM_FORMAT_YVYU:
2300 case DRM_FORMAT_UYVY:
2301 case DRM_FORMAT_VYUY:
2302 case DRM_FORMAT_AYUV:
2303 case DRM_FORMAT_XYUV8888:
2304 case DRM_FORMAT_Y210:
2305 case DRM_FORMAT_Y212:
2306 case DRM_FORMAT_Y216:
2307 case DRM_FORMAT_Y410:
2308 case DRM_FORMAT_Y412:
2309 case DRM_FORMAT_Y416:
2310 return 1;
2311
2312 case DRM_FORMAT_NV12:
2313 case DRM_FORMAT_NV21:
2314 case DRM_FORMAT_NV16:
2315 case DRM_FORMAT_NV61:
2316 case DRM_FORMAT_P010:
2317 case DRM_FORMAT_P012:
2318 case DRM_FORMAT_P016:
2319 case DRM_FORMAT_P030:
2320 return 2;
2321
2322 case DRM_FORMAT_YUV410:
2323 case DRM_FORMAT_YVU410:
2324 case DRM_FORMAT_YUV411:
2325 case DRM_FORMAT_YVU411:
2326 case DRM_FORMAT_YUV420:
2327 case DRM_FORMAT_YVU420:
2328 case DRM_FORMAT_YUV422:
2329 case DRM_FORMAT_YVU422:
2330 case DRM_FORMAT_YUV444:
2331 case DRM_FORMAT_YVU444:
2332 return 3;
2333
2334 default:
2335 return 0;
2336 }
2337 }
2338
2339 /* Returns the total number of file descriptors. Zero indicates an error. */
2340 static unsigned
dri2_check_dma_buf_format(const _EGLImageAttribs * attrs)2341 dri2_check_dma_buf_format(const _EGLImageAttribs *attrs)
2342 {
2343 unsigned plane_n = dri2_num_fourcc_format_planes(attrs->DMABufFourCC.Value);
2344 if (plane_n == 0) {
2345 _eglError(EGL_BAD_MATCH, "unknown drm fourcc format");
2346 return 0;
2347 }
2348
2349 for (unsigned i = plane_n; i < DMA_BUF_MAX_PLANES; i++) {
2350 /**
2351 * The modifiers extension spec says:
2352 *
2353 * "Modifiers may modify any attribute of a buffer import, including
2354 * but not limited to adding extra planes to a format which
2355 * otherwise does not have those planes. As an example, a modifier
2356 * may add a plane for an external compression buffer to a
2357 * single-plane format. The exact meaning and effect of any
2358 * modifier is canonically defined by drm_fourcc.h, not as part of
2359 * this extension."
2360 */
2361 if (attrs->DMABufPlaneModifiersLo[i].IsPresent &&
2362 attrs->DMABufPlaneModifiersHi[i].IsPresent) {
2363 plane_n = i + 1;
2364 }
2365 }
2366
2367 /**
2368 * The spec says:
2369 *
2370 * "* If <target> is EGL_LINUX_DMA_BUF_EXT, and the list of attributes is
2371 * incomplete, EGL_BAD_PARAMETER is generated."
2372 */
2373 for (unsigned i = 0; i < plane_n; ++i) {
2374 if (!attrs->DMABufPlaneFds[i].IsPresent ||
2375 !attrs->DMABufPlaneOffsets[i].IsPresent ||
2376 !attrs->DMABufPlanePitches[i].IsPresent) {
2377 _eglError(EGL_BAD_PARAMETER, "plane attribute(s) missing");
2378 return 0;
2379 }
2380 }
2381
2382 /**
2383 * The spec also says:
2384 *
2385 * "If <target> is EGL_LINUX_DMA_BUF_EXT, and the EGL_LINUX_DRM_FOURCC_EXT
2386 * attribute indicates a single-plane format, EGL_BAD_ATTRIBUTE is
2387 * generated if any of the EGL_DMA_BUF_PLANE1_* or EGL_DMA_BUF_PLANE2_*
2388 * or EGL_DMA_BUF_PLANE3_* attributes are specified."
2389 */
2390 for (unsigned i = plane_n; i < DMA_BUF_MAX_PLANES; ++i) {
2391 if (attrs->DMABufPlaneFds[i].IsPresent ||
2392 attrs->DMABufPlaneOffsets[i].IsPresent ||
2393 attrs->DMABufPlanePitches[i].IsPresent) {
2394 _eglError(EGL_BAD_ATTRIBUTE, "too many plane attributes");
2395 return 0;
2396 }
2397 }
2398
2399 return plane_n;
2400 }
2401
2402 static EGLBoolean
dri2_query_dma_buf_formats(_EGLDisplay * disp,EGLint max,EGLint * formats,EGLint * count)2403 dri2_query_dma_buf_formats(_EGLDisplay *disp, EGLint max, EGLint *formats,
2404 EGLint *count)
2405 {
2406 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
2407 if (max < 0 || (max > 0 && formats == NULL)) {
2408 _eglError(EGL_BAD_PARAMETER, "invalid value for max count of formats");
2409 goto fail;
2410 }
2411
2412 if (!dri2_dpy->has_dmabuf_import)
2413 goto fail;
2414
2415 if (!dri_query_dma_buf_formats(dri2_dpy->dri_screen_render_gpu,
2416 max, formats, count))
2417 goto fail;
2418
2419 if (max > 0) {
2420 /* Assert that all of the formats returned are actually fourcc formats.
2421 * Some day, if we want the internal interface function to be able to
2422 * return the fake fourcc formats defined in mesa_interface.h, we'll have
2423 * to do something more clever here to pair the list down to just real
2424 * fourcc formats so that we don't leak the fake internal ones.
2425 */
2426 for (int i = 0; i < *count; i++) {
2427 assert(dri2_num_fourcc_format_planes(formats[i]) > 0);
2428 }
2429 }
2430
2431 mtx_unlock(&dri2_dpy->lock);
2432
2433 return EGL_TRUE;
2434
2435 fail:
2436 mtx_unlock(&dri2_dpy->lock);
2437 return EGL_FALSE;
2438 }
2439
2440 static EGLBoolean
dri2_query_dma_buf_modifiers(_EGLDisplay * disp,EGLint format,EGLint max,EGLuint64KHR * modifiers,EGLBoolean * external_only,EGLint * count)2441 dri2_query_dma_buf_modifiers(_EGLDisplay *disp, EGLint format, EGLint max,
2442 EGLuint64KHR *modifiers, EGLBoolean *external_only,
2443 EGLint *count)
2444 {
2445 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
2446
2447 if (dri2_num_fourcc_format_planes(format) == 0)
2448 return dri2_egl_error_unlock(dri2_dpy, EGL_BAD_PARAMETER,
2449 "invalid fourcc format");
2450
2451 if (max < 0)
2452 return dri2_egl_error_unlock(dri2_dpy, EGL_BAD_PARAMETER,
2453 "invalid value for max count of formats");
2454
2455 if (max > 0 && modifiers == NULL)
2456 return dri2_egl_error_unlock(dri2_dpy, EGL_BAD_PARAMETER,
2457 "invalid modifiers array");
2458
2459 if (!dri2_dpy->has_dmabuf_import) {
2460 mtx_unlock(&dri2_dpy->lock);
2461 return EGL_FALSE;
2462 }
2463
2464 if (dri_query_dma_buf_modifiers(
2465 dri2_dpy->dri_screen_render_gpu, format, max, modifiers,
2466 (unsigned int *)external_only, count) == false)
2467 return dri2_egl_error_unlock(dri2_dpy, EGL_BAD_PARAMETER,
2468 "invalid format");
2469
2470 mtx_unlock(&dri2_dpy->lock);
2471
2472 return EGL_TRUE;
2473 }
2474
2475 /**
2476 * The spec says:
2477 *
2478 * "If eglCreateImageKHR is successful for a EGL_LINUX_DMA_BUF_EXT target, the
2479 * EGL will take a reference to the dma_buf(s) which it will release at any
2480 * time while the EGLDisplay is initialized. It is the responsibility of the
2481 * application to close the dma_buf file descriptors."
2482 *
2483 * Therefore we must never close or otherwise modify the file descriptors.
2484 */
2485 _EGLImage *
dri2_create_image_dma_buf(_EGLDisplay * disp,_EGLContext * ctx,EGLClientBuffer buffer,const EGLint * attr_list)2486 dri2_create_image_dma_buf(_EGLDisplay *disp, _EGLContext *ctx,
2487 EGLClientBuffer buffer, const EGLint *attr_list)
2488 {
2489 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
2490 _EGLImage *res;
2491 _EGLImageAttribs attrs;
2492 __DRIimage *dri_image;
2493 unsigned num_fds;
2494 int fds[DMA_BUF_MAX_PLANES];
2495 int pitches[DMA_BUF_MAX_PLANES];
2496 int offsets[DMA_BUF_MAX_PLANES];
2497 uint64_t modifier;
2498 unsigned error = __DRI_IMAGE_ERROR_SUCCESS;
2499 EGLint egl_error;
2500
2501 /**
2502 * The spec says:
2503 *
2504 * ""* If <target> is EGL_LINUX_DMA_BUF_EXT and <buffer> is not NULL, the
2505 * error EGL_BAD_PARAMETER is generated."
2506 */
2507 if (buffer != NULL) {
2508 _eglError(EGL_BAD_PARAMETER, "buffer not NULL");
2509 return NULL;
2510 }
2511
2512 if (!_eglParseImageAttribList(&attrs, disp, attr_list))
2513 return NULL;
2514
2515 if (!dri2_check_dma_buf_attribs(&attrs))
2516 return NULL;
2517
2518 num_fds = dri2_check_dma_buf_format(&attrs);
2519 if (!num_fds)
2520 return NULL;
2521
2522 for (unsigned i = 0; i < num_fds; ++i) {
2523 fds[i] = attrs.DMABufPlaneFds[i].Value;
2524 pitches[i] = attrs.DMABufPlanePitches[i].Value;
2525 offsets[i] = attrs.DMABufPlaneOffsets[i].Value;
2526 }
2527
2528 /* dri2_check_dma_buf_attribs ensures that the modifier, if available,
2529 * will be present in attrs.DMABufPlaneModifiersLo[0] and
2530 * attrs.DMABufPlaneModifiersHi[0] */
2531 if (attrs.DMABufPlaneModifiersLo[0].IsPresent) {
2532 modifier = combine_u32_into_u64(attrs.DMABufPlaneModifiersHi[0].Value,
2533 attrs.DMABufPlaneModifiersLo[0].Value);
2534 } else {
2535 modifier = DRM_FORMAT_MOD_INVALID;
2536 }
2537
2538 uint32_t flags = 0;
2539 if (attrs.ProtectedContent)
2540 flags |= __DRI_IMAGE_PROTECTED_CONTENT_FLAG;
2541
2542 dri_image = dri2_from_dma_bufs(
2543 dri2_dpy->dri_screen_render_gpu, attrs.Width, attrs.Height,
2544 attrs.DMABufFourCC.Value, modifier, fds, num_fds, pitches, offsets,
2545 attrs.DMABufYuvColorSpaceHint.Value, attrs.DMABufSampleRangeHint.Value,
2546 attrs.DMABufChromaHorizontalSiting.Value,
2547 attrs.DMABufChromaVerticalSiting.Value,
2548 flags, &error, NULL);
2549
2550 egl_error = egl_error_from_dri_image_error(error);
2551 if (egl_error != EGL_SUCCESS)
2552 _eglError(egl_error, "createImageFromDmaBufs failed");
2553
2554 if (!dri_image)
2555 return EGL_NO_IMAGE_KHR;
2556
2557 res = dri2_create_image_from_dri(disp, dri_image);
2558
2559 return res;
2560 }
2561
2562 static _EGLImage *
dri2_create_drm_image_mesa(_EGLDisplay * disp,const EGLint * attr_list)2563 dri2_create_drm_image_mesa(_EGLDisplay *disp, const EGLint *attr_list)
2564 {
2565 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
2566 struct dri2_egl_image *dri2_img;
2567 _EGLImageAttribs attrs;
2568 unsigned int dri_use, valid_mask;
2569 int format;
2570
2571 if (!attr_list) {
2572 _eglError(EGL_BAD_PARAMETER, __func__);
2573 goto fail;
2574 }
2575
2576 if (!_eglParseImageAttribList(&attrs, disp, attr_list))
2577 goto fail;
2578
2579 if (attrs.Width <= 0 || attrs.Height <= 0) {
2580 _eglError(EGL_BAD_PARAMETER, __func__);
2581 goto fail;
2582 }
2583
2584 switch (attrs.DRMBufferFormatMESA) {
2585 case EGL_DRM_BUFFER_FORMAT_ARGB32_MESA:
2586 format = PIPE_FORMAT_BGRA8888_UNORM;
2587 break;
2588 default:
2589 _eglError(EGL_BAD_PARAMETER, __func__);
2590 goto fail;
2591 }
2592
2593 valid_mask = EGL_DRM_BUFFER_USE_SCANOUT_MESA |
2594 EGL_DRM_BUFFER_USE_SHARE_MESA | EGL_DRM_BUFFER_USE_CURSOR_MESA;
2595 if (attrs.DRMBufferUseMESA & ~valid_mask) {
2596 _eglError(EGL_BAD_PARAMETER, __func__);
2597 goto fail;
2598 }
2599
2600 dri_use = 0;
2601 if (attrs.DRMBufferUseMESA & EGL_DRM_BUFFER_USE_SHARE_MESA)
2602 dri_use |= __DRI_IMAGE_USE_SHARE;
2603 if (attrs.DRMBufferUseMESA & EGL_DRM_BUFFER_USE_SCANOUT_MESA)
2604 dri_use |= __DRI_IMAGE_USE_SCANOUT;
2605 if (attrs.DRMBufferUseMESA & EGL_DRM_BUFFER_USE_CURSOR_MESA)
2606 dri_use |= __DRI_IMAGE_USE_CURSOR;
2607
2608 dri2_img = malloc(sizeof *dri2_img);
2609 if (!dri2_img) {
2610 _eglError(EGL_BAD_ALLOC, "dri2_create_image_khr");
2611 goto fail;
2612 }
2613
2614 _eglInitImage(&dri2_img->base, disp);
2615
2616 dri2_img->dri_image =
2617 dri_create_image(dri2_dpy->dri_screen_render_gpu, attrs.Width,
2618 attrs.Height, format, NULL, 0, dri_use, dri2_img);
2619 if (dri2_img->dri_image == NULL) {
2620 free(dri2_img);
2621 _eglError(EGL_BAD_ALLOC, "dri2_create_drm_image_mesa");
2622 goto fail;
2623 }
2624
2625 mtx_unlock(&dri2_dpy->lock);
2626
2627 return &dri2_img->base;
2628
2629 fail:
2630 mtx_unlock(&dri2_dpy->lock);
2631 return EGL_NO_IMAGE_KHR;
2632 }
2633
2634 static EGLBoolean
dri2_export_drm_image_mesa(_EGLDisplay * disp,_EGLImage * img,EGLint * name,EGLint * handle,EGLint * stride)2635 dri2_export_drm_image_mesa(_EGLDisplay *disp, _EGLImage *img, EGLint *name,
2636 EGLint *handle, EGLint *stride)
2637 {
2638 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
2639 struct dri2_egl_image *dri2_img = dri2_egl_image(img);
2640
2641 if (name && !dri2_query_image(dri2_img->dri_image,
2642 __DRI_IMAGE_ATTRIB_NAME, name))
2643 return dri2_egl_error_unlock(dri2_dpy, EGL_BAD_ALLOC,
2644 "dri2_export_drm_image_mesa");
2645
2646 if (handle)
2647 dri2_query_image(dri2_img->dri_image,
2648 __DRI_IMAGE_ATTRIB_HANDLE, handle);
2649
2650 if (stride)
2651 dri2_query_image(dri2_img->dri_image,
2652 __DRI_IMAGE_ATTRIB_STRIDE, stride);
2653
2654 mtx_unlock(&dri2_dpy->lock);
2655
2656 return EGL_TRUE;
2657 }
2658
2659 /**
2660 * Checks if we can support EGL_MESA_image_dma_buf_export on this image.
2661
2662 * The spec provides a boolean return for the driver to reject exporting for
2663 * basically any reason, but doesn't specify any particular error cases. For
2664 * now, we just fail if we don't have a DRM fourcc for the format.
2665 */
2666 static bool
dri2_can_export_dma_buf_image(_EGLDisplay * disp,_EGLImage * img)2667 dri2_can_export_dma_buf_image(_EGLDisplay *disp, _EGLImage *img)
2668 {
2669 struct dri2_egl_image *dri2_img = dri2_egl_image(img);
2670 EGLint fourcc;
2671
2672 if (!dri2_query_image(dri2_img->dri_image,
2673 __DRI_IMAGE_ATTRIB_FOURCC, &fourcc)) {
2674 return false;
2675 }
2676
2677 return true;
2678 }
2679
2680 static EGLBoolean
dri2_export_dma_buf_image_query_mesa(_EGLDisplay * disp,_EGLImage * img,EGLint * fourcc,EGLint * nplanes,EGLuint64KHR * modifiers)2681 dri2_export_dma_buf_image_query_mesa(_EGLDisplay *disp, _EGLImage *img,
2682 EGLint *fourcc, EGLint *nplanes,
2683 EGLuint64KHR *modifiers)
2684 {
2685 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
2686 struct dri2_egl_image *dri2_img = dri2_egl_image(img);
2687 int num_planes;
2688
2689 if (!dri2_can_export_dma_buf_image(disp, img)) {
2690 mtx_unlock(&dri2_dpy->lock);
2691 return EGL_FALSE;
2692 }
2693
2694 dri2_query_image(dri2_img->dri_image,
2695 __DRI_IMAGE_ATTRIB_NUM_PLANES, &num_planes);
2696 if (nplanes)
2697 *nplanes = num_planes;
2698
2699 if (fourcc)
2700 dri2_query_image(dri2_img->dri_image,
2701 __DRI_IMAGE_ATTRIB_FOURCC, fourcc);
2702
2703 if (modifiers) {
2704 int mod_hi, mod_lo;
2705 uint64_t modifier = DRM_FORMAT_MOD_INVALID;
2706 bool query;
2707
2708 query = dri2_query_image(
2709 dri2_img->dri_image, __DRI_IMAGE_ATTRIB_MODIFIER_UPPER, &mod_hi);
2710 query &= dri2_query_image(
2711 dri2_img->dri_image, __DRI_IMAGE_ATTRIB_MODIFIER_LOWER, &mod_lo);
2712 if (query)
2713 modifier = combine_u32_into_u64(mod_hi, mod_lo);
2714
2715 for (int i = 0; i < num_planes; i++)
2716 modifiers[i] = modifier;
2717 }
2718
2719 mtx_unlock(&dri2_dpy->lock);
2720
2721 return EGL_TRUE;
2722 }
2723
2724 static EGLBoolean
dri2_export_dma_buf_image_mesa(_EGLDisplay * disp,_EGLImage * img,int * fds,EGLint * strides,EGLint * offsets)2725 dri2_export_dma_buf_image_mesa(_EGLDisplay *disp, _EGLImage *img, int *fds,
2726 EGLint *strides, EGLint *offsets)
2727 {
2728 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
2729 struct dri2_egl_image *dri2_img = dri2_egl_image(img);
2730 EGLint nplanes;
2731
2732 if (!dri2_can_export_dma_buf_image(disp, img)) {
2733 mtx_unlock(&dri2_dpy->lock);
2734 return EGL_FALSE;
2735 }
2736
2737 /* EGL_MESA_image_dma_buf_export spec says:
2738 * "If the number of fds is less than the number of planes, then
2739 * subsequent fd slots should contain -1."
2740 */
2741 if (fds) {
2742 /* Query nplanes so that we know how big the given array is. */
2743 dri2_query_image(dri2_img->dri_image,
2744 __DRI_IMAGE_ATTRIB_NUM_PLANES, &nplanes);
2745 memset(fds, -1, nplanes * sizeof(int));
2746 }
2747
2748 /* rework later to provide multiple fds/strides/offsets */
2749 if (fds)
2750 dri2_query_image(dri2_img->dri_image, __DRI_IMAGE_ATTRIB_FD,
2751 fds);
2752
2753 if (strides)
2754 dri2_query_image(dri2_img->dri_image,
2755 __DRI_IMAGE_ATTRIB_STRIDE, strides);
2756
2757 if (offsets) {
2758 int img_offset;
2759 bool ret = dri2_query_image(
2760 dri2_img->dri_image, __DRI_IMAGE_ATTRIB_OFFSET, &img_offset);
2761 if (ret)
2762 offsets[0] = img_offset;
2763 else
2764 offsets[0] = 0;
2765 }
2766
2767 mtx_unlock(&dri2_dpy->lock);
2768
2769 return EGL_TRUE;
2770 }
2771
2772 #endif
2773
2774 _EGLImage *
dri2_create_image_khr(_EGLDisplay * disp,_EGLContext * ctx,EGLenum target,EGLClientBuffer buffer,const EGLint * attr_list)2775 dri2_create_image_khr(_EGLDisplay *disp, _EGLContext *ctx, EGLenum target,
2776 EGLClientBuffer buffer, const EGLint *attr_list)
2777 {
2778 switch (target) {
2779 case EGL_GL_TEXTURE_2D_KHR:
2780 case EGL_GL_TEXTURE_CUBE_MAP_POSITIVE_X_KHR:
2781 case EGL_GL_TEXTURE_CUBE_MAP_NEGATIVE_X_KHR:
2782 case EGL_GL_TEXTURE_CUBE_MAP_POSITIVE_Y_KHR:
2783 case EGL_GL_TEXTURE_CUBE_MAP_NEGATIVE_Y_KHR:
2784 case EGL_GL_TEXTURE_CUBE_MAP_POSITIVE_Z_KHR:
2785 case EGL_GL_TEXTURE_CUBE_MAP_NEGATIVE_Z_KHR:
2786 case EGL_GL_TEXTURE_3D_KHR:
2787 return dri2_create_image_khr_texture(disp, ctx, target, buffer,
2788 attr_list);
2789 case EGL_GL_RENDERBUFFER_KHR:
2790 return dri2_create_image_khr_renderbuffer(disp, ctx, buffer, attr_list);
2791 #ifdef HAVE_LIBDRM
2792 case EGL_DRM_BUFFER_MESA:
2793 return dri2_create_image_mesa_drm_buffer(disp, ctx, buffer, attr_list);
2794 case EGL_LINUX_DMA_BUF_EXT:
2795 return dri2_create_image_dma_buf(disp, ctx, buffer, attr_list);
2796 #endif
2797 #ifdef HAVE_WAYLAND_PLATFORM
2798 case EGL_WAYLAND_BUFFER_WL:
2799 return dri2_create_image_wayland_wl_buffer(disp, ctx, buffer, attr_list);
2800 #endif
2801 default:
2802 _eglError(EGL_BAD_PARAMETER, "dri2_create_image_khr");
2803 return EGL_NO_IMAGE_KHR;
2804 }
2805 }
2806
2807 static EGLBoolean
dri2_destroy_image_khr(_EGLDisplay * disp,_EGLImage * image)2808 dri2_destroy_image_khr(_EGLDisplay *disp, _EGLImage *image)
2809 {
2810 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
2811 struct dri2_egl_image *dri2_img = dri2_egl_image(image);
2812
2813 dri2_destroy_image(dri2_img->dri_image);
2814 free(dri2_img);
2815
2816 mtx_unlock(&dri2_dpy->lock);
2817
2818 return EGL_TRUE;
2819 }
2820
2821 #ifdef HAVE_WAYLAND_PLATFORM
2822
2823 static void
dri2_wl_reference_buffer(void * user_data,uint32_t name,int fd,struct wl_drm_buffer * buffer)2824 dri2_wl_reference_buffer(void *user_data, uint32_t name, int fd,
2825 struct wl_drm_buffer *buffer)
2826 {
2827 _EGLDisplay *disp = user_data;
2828 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
2829 __DRIimage *img;
2830 int dri_components = 0;
2831
2832 if (fd == -1)
2833 img = dri2_from_names(
2834 dri2_dpy->dri_screen_render_gpu, buffer->width, buffer->height,
2835 buffer->format, (int *)&name, 1, buffer->stride, buffer->offset, NULL);
2836 else
2837 img = dri2_from_dma_bufs(
2838 dri2_dpy->dri_screen_render_gpu, buffer->width, buffer->height,
2839 buffer->format, DRM_FORMAT_MOD_INVALID, &fd, 1, buffer->stride,
2840 buffer->offset, 0, 0, 0, 0, 0, NULL, NULL);
2841
2842 if (img == NULL)
2843 return;
2844
2845 dri2_query_image(img, __DRI_IMAGE_ATTRIB_COMPONENTS,
2846 &dri_components);
2847
2848 buffer->driver_format = NULL;
2849 for (int i = 0; i < ARRAY_SIZE(wl_drm_components); i++)
2850 if (wl_drm_components[i].dri_components == dri_components)
2851 buffer->driver_format = &wl_drm_components[i];
2852
2853 if (buffer->driver_format == NULL)
2854 dri2_destroy_image(img);
2855 else
2856 buffer->driver_buffer = img;
2857 }
2858
2859 static void
dri2_wl_release_buffer(void * user_data,struct wl_drm_buffer * buffer)2860 dri2_wl_release_buffer(void *user_data, struct wl_drm_buffer *buffer)
2861 {
2862 dri2_destroy_image(buffer->driver_buffer);
2863 }
2864
2865 static EGLBoolean
dri2_bind_wayland_display_wl(_EGLDisplay * disp,struct wl_display * wl_dpy)2866 dri2_bind_wayland_display_wl(_EGLDisplay *disp, struct wl_display *wl_dpy)
2867 {
2868 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
2869 const struct wayland_drm_callbacks wl_drm_callbacks = {
2870 .authenticate = (int (*)(void *, uint32_t))dri2_dpy->vtbl->authenticate,
2871 .reference_buffer = dri2_wl_reference_buffer,
2872 .release_buffer = dri2_wl_release_buffer,
2873 .is_format_supported = dri2_wl_is_format_supported,
2874 };
2875 int flags = 0;
2876 char *device_name;
2877
2878 if (dri2_dpy->wl_server_drm)
2879 goto fail;
2880
2881 device_name = drmGetRenderDeviceNameFromFd(dri2_dpy->fd_render_gpu);
2882 if (!device_name)
2883 device_name = strdup(dri2_dpy->device_name);
2884 if (!device_name)
2885 goto fail;
2886
2887 if (dri2_dpy->has_dmabuf_import && dri2_dpy->has_dmabuf_export)
2888 flags |= WAYLAND_DRM_PRIME;
2889
2890 dri2_dpy->wl_server_drm =
2891 wayland_drm_init(wl_dpy, device_name, &wl_drm_callbacks, disp, flags);
2892
2893 free(device_name);
2894
2895 if (!dri2_dpy->wl_server_drm)
2896 goto fail;
2897
2898 #ifdef HAVE_DRM_PLATFORM
2899 /* We have to share the wl_drm instance with gbm, so gbm can convert
2900 * wl_buffers to gbm bos. */
2901 if (dri2_dpy->gbm_dri)
2902 dri2_dpy->gbm_dri->wl_drm = dri2_dpy->wl_server_drm;
2903 #endif
2904
2905 mtx_unlock(&dri2_dpy->lock);
2906 return EGL_TRUE;
2907
2908 fail:
2909 mtx_unlock(&dri2_dpy->lock);
2910 return EGL_FALSE;
2911 }
2912
2913 static EGLBoolean
dri2_unbind_wayland_display_wl(_EGLDisplay * disp,struct wl_display * wl_dpy)2914 dri2_unbind_wayland_display_wl(_EGLDisplay *disp, struct wl_display *wl_dpy)
2915 {
2916 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
2917
2918 if (!dri2_dpy->wl_server_drm)
2919 return EGL_FALSE;
2920
2921 wayland_drm_uninit(dri2_dpy->wl_server_drm);
2922 dri2_dpy->wl_server_drm = NULL;
2923
2924 return EGL_TRUE;
2925 }
2926
2927 static EGLBoolean
dri2_query_wayland_buffer_wl(_EGLDisplay * disp,struct wl_resource * buffer_resource,EGLint attribute,EGLint * value)2928 dri2_query_wayland_buffer_wl(_EGLDisplay *disp,
2929 struct wl_resource *buffer_resource,
2930 EGLint attribute, EGLint *value)
2931 {
2932 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
2933 struct wl_drm_buffer *buffer;
2934 const struct wl_drm_components_descriptor *format;
2935
2936 buffer = wayland_drm_buffer_get(dri2_dpy->wl_server_drm, buffer_resource);
2937 if (!buffer)
2938 return EGL_FALSE;
2939
2940 format = buffer->driver_format;
2941 switch (attribute) {
2942 case EGL_TEXTURE_FORMAT:
2943 *value = format->components;
2944 return EGL_TRUE;
2945 case EGL_WIDTH:
2946 *value = buffer->width;
2947 return EGL_TRUE;
2948 case EGL_HEIGHT:
2949 *value = buffer->height;
2950 return EGL_TRUE;
2951 }
2952
2953 return EGL_FALSE;
2954 }
2955 #endif
2956
2957 static void
dri2_egl_ref_sync(struct dri2_egl_sync * sync)2958 dri2_egl_ref_sync(struct dri2_egl_sync *sync)
2959 {
2960 p_atomic_inc(&sync->refcount);
2961 }
2962
2963 static void
dri2_egl_unref_sync(struct dri2_egl_display * dri2_dpy,struct dri2_egl_sync * dri2_sync)2964 dri2_egl_unref_sync(struct dri2_egl_display *dri2_dpy,
2965 struct dri2_egl_sync *dri2_sync)
2966 {
2967 if (p_atomic_dec_zero(&dri2_sync->refcount)) {
2968 switch (dri2_sync->base.Type) {
2969 case EGL_SYNC_REUSABLE_KHR:
2970 cnd_destroy(&dri2_sync->cond);
2971 break;
2972 case EGL_SYNC_NATIVE_FENCE_ANDROID:
2973 if (dri2_sync->base.SyncFd != EGL_NO_NATIVE_FENCE_FD_ANDROID)
2974 close(dri2_sync->base.SyncFd);
2975 break;
2976 default:
2977 break;
2978 }
2979
2980 if (dri2_sync->fence)
2981 dri_destroy_fence(dri2_dpy->dri_screen_render_gpu,
2982 dri2_sync->fence);
2983
2984 free(dri2_sync);
2985 }
2986 }
2987
2988 static _EGLSync *
dri2_create_sync(_EGLDisplay * disp,EGLenum type,const EGLAttrib * attrib_list)2989 dri2_create_sync(_EGLDisplay *disp, EGLenum type, const EGLAttrib *attrib_list)
2990 {
2991 _EGLContext *ctx = _eglGetCurrentContext();
2992 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
2993 struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
2994 struct dri2_egl_sync *dri2_sync;
2995 EGLint ret;
2996 pthread_condattr_t attr;
2997
2998 dri2_sync = calloc(1, sizeof(struct dri2_egl_sync));
2999 if (!dri2_sync) {
3000 _eglError(EGL_BAD_ALLOC, "eglCreateSyncKHR");
3001 goto fail;
3002 }
3003
3004 if (!_eglInitSync(&dri2_sync->base, disp, type, attrib_list)) {
3005 goto fail;
3006 }
3007
3008 switch (type) {
3009 case EGL_SYNC_FENCE_KHR:
3010 dri2_sync->fence = dri_create_fence(dri2_ctx->dri_context);
3011 if (!dri2_sync->fence) {
3012 /* Why did it fail? DRI doesn't return an error code, so we emit
3013 * a generic EGL error that doesn't communicate user error.
3014 */
3015 _eglError(EGL_BAD_ALLOC, "eglCreateSyncKHR");
3016 goto fail;
3017 }
3018 break;
3019
3020 case EGL_SYNC_CL_EVENT_KHR:
3021 dri2_sync->fence = dri_get_fence_from_cl_event(
3022 dri2_dpy->dri_screen_render_gpu, dri2_sync->base.CLEvent);
3023 /* this can only happen if the cl_event passed in is invalid. */
3024 if (!dri2_sync->fence) {
3025 _eglError(EGL_BAD_ATTRIBUTE, "eglCreateSyncKHR");
3026 goto fail;
3027 }
3028
3029 /* the initial status must be "signaled" if the cl_event is signaled */
3030 if (dri_client_wait_sync(dri2_ctx->dri_context,
3031 dri2_sync->fence, 0, 0))
3032 dri2_sync->base.SyncStatus = EGL_SIGNALED_KHR;
3033 break;
3034
3035 case EGL_SYNC_REUSABLE_KHR:
3036 /* initialize attr */
3037 ret = pthread_condattr_init(&attr);
3038
3039 if (ret) {
3040 _eglError(EGL_BAD_ACCESS, "eglCreateSyncKHR");
3041 goto fail;
3042 }
3043
3044 #if !defined(__APPLE__) && !defined(__MACOSX)
3045 /* change clock attribute to CLOCK_MONOTONIC */
3046 ret = pthread_condattr_setclock(&attr, CLOCK_MONOTONIC);
3047
3048 if (ret) {
3049 _eglError(EGL_BAD_ACCESS, "eglCreateSyncKHR");
3050 goto fail;
3051 }
3052 #endif
3053
3054 ret = pthread_cond_init(&dri2_sync->cond, &attr);
3055
3056 if (ret) {
3057 _eglError(EGL_BAD_ACCESS, "eglCreateSyncKHR");
3058 goto fail;
3059 }
3060
3061 /* initial status of reusable sync must be "unsignaled" */
3062 dri2_sync->base.SyncStatus = EGL_UNSIGNALED_KHR;
3063 break;
3064
3065 case EGL_SYNC_NATIVE_FENCE_ANDROID:
3066 dri2_sync->fence = dri_create_fence_fd(
3067 dri2_ctx->dri_context, dri2_sync->base.SyncFd);
3068 if (!dri2_sync->fence) {
3069 _eglError(EGL_BAD_ATTRIBUTE, "eglCreateSyncKHR");
3070 goto fail;
3071 }
3072 break;
3073 }
3074
3075 p_atomic_set(&dri2_sync->refcount, 1);
3076 mtx_unlock(&dri2_dpy->lock);
3077
3078 return &dri2_sync->base;
3079
3080 fail:
3081 free(dri2_sync);
3082 mtx_unlock(&dri2_dpy->lock);
3083 return NULL;
3084 }
3085
3086 static EGLBoolean
dri2_destroy_sync(_EGLDisplay * disp,_EGLSync * sync)3087 dri2_destroy_sync(_EGLDisplay *disp, _EGLSync *sync)
3088 {
3089 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
3090 struct dri2_egl_sync *dri2_sync = dri2_egl_sync(sync);
3091 EGLint ret = EGL_TRUE;
3092 EGLint err;
3093
3094 /* if type of sync is EGL_SYNC_REUSABLE_KHR and it is not signaled yet,
3095 * then unlock all threads possibly blocked by the reusable sync before
3096 * destroying it.
3097 */
3098 if (dri2_sync->base.Type == EGL_SYNC_REUSABLE_KHR &&
3099 dri2_sync->base.SyncStatus == EGL_UNSIGNALED_KHR) {
3100 dri2_sync->base.SyncStatus = EGL_SIGNALED_KHR;
3101 /* unblock all threads currently blocked by sync */
3102 err = cnd_broadcast(&dri2_sync->cond);
3103
3104 if (err) {
3105 _eglError(EGL_BAD_ACCESS, "eglDestroySyncKHR");
3106 ret = EGL_FALSE;
3107 }
3108 }
3109
3110 dri2_egl_unref_sync(dri2_dpy, dri2_sync);
3111
3112 mtx_unlock(&dri2_dpy->lock);
3113
3114 return ret;
3115 }
3116
3117 static EGLint
dri2_dup_native_fence_fd(_EGLDisplay * disp,_EGLSync * sync)3118 dri2_dup_native_fence_fd(_EGLDisplay *disp, _EGLSync *sync)
3119 {
3120 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
3121 struct dri2_egl_sync *dri2_sync = dri2_egl_sync(sync);
3122
3123 assert(sync->Type == EGL_SYNC_NATIVE_FENCE_ANDROID);
3124
3125 if (sync->SyncFd == EGL_NO_NATIVE_FENCE_FD_ANDROID) {
3126 /* try to retrieve the actual native fence fd.. if rendering is
3127 * not flushed this will just return -1, aka NO_NATIVE_FENCE_FD:
3128 */
3129 sync->SyncFd = dri_get_fence_fd(
3130 dri2_dpy->dri_screen_render_gpu, dri2_sync->fence);
3131 }
3132
3133 mtx_unlock(&dri2_dpy->lock);
3134
3135 if (sync->SyncFd == EGL_NO_NATIVE_FENCE_FD_ANDROID) {
3136 /* if native fence fd still not created, return an error: */
3137 _eglError(EGL_BAD_PARAMETER, "eglDupNativeFenceFDANDROID");
3138 return EGL_NO_NATIVE_FENCE_FD_ANDROID;
3139 }
3140
3141 assert(sync_valid_fd(sync->SyncFd));
3142
3143 return os_dupfd_cloexec(sync->SyncFd);
3144 }
3145
3146 static void
dri2_set_blob_cache_funcs(_EGLDisplay * disp,EGLSetBlobFuncANDROID set,EGLGetBlobFuncANDROID get)3147 dri2_set_blob_cache_funcs(_EGLDisplay *disp, EGLSetBlobFuncANDROID set,
3148 EGLGetBlobFuncANDROID get)
3149 {
3150 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
3151 dri_set_blob_cache_funcs(dri2_dpy->dri_screen_render_gpu, set, get);
3152 mtx_unlock(&dri2_dpy->lock);
3153 }
3154
3155 static EGLint
dri2_client_wait_sync(_EGLDisplay * disp,_EGLSync * sync,EGLint flags,EGLTime timeout)3156 dri2_client_wait_sync(_EGLDisplay *disp, _EGLSync *sync, EGLint flags,
3157 EGLTime timeout)
3158 {
3159 _EGLContext *ctx = _eglGetCurrentContext();
3160 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
3161 struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
3162 struct dri2_egl_sync *dri2_sync = dri2_egl_sync(sync);
3163 unsigned wait_flags = 0;
3164
3165 EGLint ret = EGL_CONDITION_SATISFIED_KHR;
3166
3167 /* The EGL_KHR_fence_sync spec states:
3168 *
3169 * "If no context is current for the bound API,
3170 * the EGL_SYNC_FLUSH_COMMANDS_BIT_KHR bit is ignored.
3171 */
3172 if (dri2_ctx && flags & EGL_SYNC_FLUSH_COMMANDS_BIT_KHR)
3173 wait_flags |= __DRI2_FENCE_FLAG_FLUSH_COMMANDS;
3174
3175 /* the sync object should take a reference while waiting */
3176 dri2_egl_ref_sync(dri2_sync);
3177
3178 switch (sync->Type) {
3179 case EGL_SYNC_FENCE_KHR:
3180 case EGL_SYNC_NATIVE_FENCE_ANDROID:
3181 case EGL_SYNC_CL_EVENT_KHR:
3182 if (dri_client_wait_sync(
3183 dri2_ctx ? dri2_ctx->dri_context : NULL, dri2_sync->fence,
3184 wait_flags, timeout))
3185 dri2_sync->base.SyncStatus = EGL_SIGNALED_KHR;
3186 else
3187 ret = EGL_TIMEOUT_EXPIRED_KHR;
3188 break;
3189
3190 case EGL_SYNC_REUSABLE_KHR:
3191 if (dri2_ctx && dri2_sync->base.SyncStatus == EGL_UNSIGNALED_KHR &&
3192 (flags & EGL_SYNC_FLUSH_COMMANDS_BIT_KHR)) {
3193 /* flush context if EGL_SYNC_FLUSH_COMMANDS_BIT_KHR is set */
3194 dri2_gl_flush();
3195 }
3196
3197 /* if timeout is EGL_FOREVER_KHR, it should wait without any timeout.*/
3198 if (timeout == EGL_FOREVER_KHR) {
3199 mtx_lock(&dri2_sync->mutex);
3200 cnd_wait(&dri2_sync->cond, &dri2_sync->mutex);
3201 mtx_unlock(&dri2_sync->mutex);
3202 } else {
3203 /* if reusable sync has not been yet signaled */
3204 if (dri2_sync->base.SyncStatus != EGL_SIGNALED_KHR) {
3205 /* timespecs for cnd_timedwait */
3206 struct timespec current;
3207 struct timespec expire;
3208
3209 /* We override the clock to monotonic when creating the condition
3210 * variable. */
3211 clock_gettime(CLOCK_MONOTONIC, ¤t);
3212
3213 /* calculating when to expire */
3214 expire.tv_nsec = timeout % 1000000000L;
3215 expire.tv_sec = timeout / 1000000000L;
3216
3217 expire.tv_nsec += current.tv_nsec;
3218 expire.tv_sec += current.tv_sec;
3219
3220 /* expire.nsec now is a number between 0 and 1999999998 */
3221 if (expire.tv_nsec > 999999999L) {
3222 expire.tv_sec++;
3223 expire.tv_nsec -= 1000000000L;
3224 }
3225
3226 mtx_lock(&dri2_sync->mutex);
3227 ret = cnd_timedwait(&dri2_sync->cond, &dri2_sync->mutex, &expire);
3228 mtx_unlock(&dri2_sync->mutex);
3229
3230 if (ret == thrd_timedout) {
3231 if (dri2_sync->base.SyncStatus == EGL_UNSIGNALED_KHR) {
3232 ret = EGL_TIMEOUT_EXPIRED_KHR;
3233 } else {
3234 _eglError(EGL_BAD_ACCESS, "eglClientWaitSyncKHR");
3235 ret = EGL_FALSE;
3236 }
3237 }
3238 }
3239 }
3240 break;
3241 }
3242
3243 dri2_egl_unref_sync(dri2_dpy, dri2_sync);
3244
3245 return ret;
3246 }
3247
3248 static EGLBoolean
dri2_signal_sync(_EGLDisplay * disp,_EGLSync * sync,EGLenum mode)3249 dri2_signal_sync(_EGLDisplay *disp, _EGLSync *sync, EGLenum mode)
3250 {
3251 struct dri2_egl_sync *dri2_sync = dri2_egl_sync(sync);
3252 EGLint ret;
3253
3254 if (sync->Type != EGL_SYNC_REUSABLE_KHR)
3255 return _eglError(EGL_BAD_MATCH, "eglSignalSyncKHR");
3256
3257 if (mode != EGL_SIGNALED_KHR && mode != EGL_UNSIGNALED_KHR)
3258 return _eglError(EGL_BAD_ATTRIBUTE, "eglSignalSyncKHR");
3259
3260 dri2_sync->base.SyncStatus = mode;
3261
3262 if (mode == EGL_SIGNALED_KHR) {
3263 ret = cnd_broadcast(&dri2_sync->cond);
3264
3265 /* fail to broadcast */
3266 if (ret)
3267 return _eglError(EGL_BAD_ACCESS, "eglSignalSyncKHR");
3268 }
3269
3270 return EGL_TRUE;
3271 }
3272
3273 static EGLint
dri2_server_wait_sync(_EGLDisplay * disp,_EGLSync * sync)3274 dri2_server_wait_sync(_EGLDisplay *disp, _EGLSync *sync)
3275 {
3276 _EGLContext *ctx = _eglGetCurrentContext();
3277 struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
3278 struct dri2_egl_sync *dri2_sync = dri2_egl_sync(sync);
3279
3280 dri_server_wait_sync(dri2_ctx->dri_context, dri2_sync->fence,
3281 0);
3282 return EGL_TRUE;
3283 }
3284
3285 static int
dri2_interop_query_device_info(_EGLDisplay * disp,_EGLContext * ctx,struct mesa_glinterop_device_info * out)3286 dri2_interop_query_device_info(_EGLDisplay *disp, _EGLContext *ctx,
3287 struct mesa_glinterop_device_info *out)
3288 {
3289 struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
3290
3291 return dri_interop_query_device_info(dri2_ctx->dri_context, out);
3292 }
3293
3294 static int
dri2_interop_export_object(_EGLDisplay * disp,_EGLContext * ctx,struct mesa_glinterop_export_in * in,struct mesa_glinterop_export_out * out)3295 dri2_interop_export_object(_EGLDisplay *disp, _EGLContext *ctx,
3296 struct mesa_glinterop_export_in *in,
3297 struct mesa_glinterop_export_out *out)
3298 {
3299 struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
3300
3301 return dri_interop_export_object(dri2_ctx->dri_context, in, out);
3302 }
3303
3304 static int
dri2_interop_flush_objects(_EGLDisplay * disp,_EGLContext * ctx,unsigned count,struct mesa_glinterop_export_in * objects,struct mesa_glinterop_flush_out * out)3305 dri2_interop_flush_objects(_EGLDisplay *disp, _EGLContext *ctx, unsigned count,
3306 struct mesa_glinterop_export_in *objects,
3307 struct mesa_glinterop_flush_out *out)
3308 {
3309 struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
3310
3311 return dri_interop_flush_objects(dri2_ctx->dri_context, count,
3312 objects, out);
3313 }
3314
3315 static EGLBoolean
dri2_query_supported_compression_rates(_EGLDisplay * disp,_EGLConfig * config,const EGLAttrib * attr_list,EGLint * rates,EGLint rate_size,EGLint * num_rate)3316 dri2_query_supported_compression_rates(_EGLDisplay *disp, _EGLConfig *config,
3317 const EGLAttrib *attr_list,
3318 EGLint *rates, EGLint rate_size,
3319 EGLint *num_rate)
3320 {
3321 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
3322 struct dri2_egl_config *conf = dri2_egl_config(config);
3323 enum __DRIFixedRateCompression dri_rates[rate_size];
3324
3325 if (dri2_dpy->has_compression_modifiers) {
3326 const __DRIconfig *dri_conf =
3327 dri2_get_dri_config(conf, EGL_WINDOW_BIT, EGL_GL_COLORSPACE_LINEAR);
3328 if (!dri2_query_compression_rates(
3329 dri2_dpy->dri_screen_render_gpu, dri_conf, rate_size, dri_rates,
3330 num_rate))
3331 return EGL_FALSE;
3332
3333 for (int i = 0; i < *num_rate && i < rate_size; ++i)
3334 rates[i] = dri_rates[i];
3335 return EGL_TRUE;
3336 }
3337 *num_rate = 0;
3338 return EGL_TRUE;
3339 }
3340
3341 const _EGLDriver _eglDriver = {
3342 .Initialize = dri2_initialize,
3343 .Terminate = dri2_terminate,
3344 .CreateContext = dri2_create_context,
3345 .DestroyContext = dri2_destroy_context,
3346 .MakeCurrent = dri2_make_current,
3347 .CreateWindowSurface = dri2_create_window_surface,
3348 .CreatePixmapSurface = dri2_create_pixmap_surface,
3349 .CreatePbufferSurface = dri2_create_pbuffer_surface,
3350 .DestroySurface = dri2_destroy_surface,
3351 .WaitClient = dri2_wait_client,
3352 .WaitNative = dri2_wait_native,
3353 .BindTexImage = dri2_bind_tex_image,
3354 .ReleaseTexImage = dri2_release_tex_image,
3355 .SwapInterval = dri2_swap_interval,
3356 .SwapBuffers = dri2_swap_buffers,
3357 .SwapBuffersWithDamageEXT = dri2_swap_buffers_with_damage,
3358 .SwapBuffersRegionNOK = dri2_swap_buffers_region,
3359 .SetDamageRegion = dri2_set_damage_region,
3360 .PostSubBufferNV = dri2_post_sub_buffer,
3361 .CopyBuffers = dri2_copy_buffers,
3362 .QueryBufferAge = dri2_query_buffer_age,
3363 .CreateImageKHR = dri2_create_image,
3364 .DestroyImageKHR = dri2_destroy_image_khr,
3365 .CreateWaylandBufferFromImageWL = dri2_create_wayland_buffer_from_image,
3366 .QuerySurface = dri2_query_surface,
3367 .QueryDriverName = dri2_query_driver_name,
3368 .QueryDriverConfig = dri2_query_driver_config,
3369 #ifdef HAVE_LIBDRM
3370 .CreateDRMImageMESA = dri2_create_drm_image_mesa,
3371 .ExportDRMImageMESA = dri2_export_drm_image_mesa,
3372 .ExportDMABUFImageQueryMESA = dri2_export_dma_buf_image_query_mesa,
3373 .ExportDMABUFImageMESA = dri2_export_dma_buf_image_mesa,
3374 .QueryDmaBufFormatsEXT = dri2_query_dma_buf_formats,
3375 .QueryDmaBufModifiersEXT = dri2_query_dma_buf_modifiers,
3376 #endif
3377 #ifdef HAVE_WAYLAND_PLATFORM
3378 .BindWaylandDisplayWL = dri2_bind_wayland_display_wl,
3379 .UnbindWaylandDisplayWL = dri2_unbind_wayland_display_wl,
3380 .QueryWaylandBufferWL = dri2_query_wayland_buffer_wl,
3381 #endif
3382 .GetSyncValuesCHROMIUM = dri2_get_sync_values_chromium,
3383 .GetMscRateANGLE = dri2_get_msc_rate_angle,
3384 .CreateSyncKHR = dri2_create_sync,
3385 .ClientWaitSyncKHR = dri2_client_wait_sync,
3386 .SignalSyncKHR = dri2_signal_sync,
3387 .WaitSyncKHR = dri2_server_wait_sync,
3388 .DestroySyncKHR = dri2_destroy_sync,
3389 .GLInteropQueryDeviceInfo = dri2_interop_query_device_info,
3390 .GLInteropExportObject = dri2_interop_export_object,
3391 .GLInteropFlushObjects = dri2_interop_flush_objects,
3392 .DupNativeFenceFDANDROID = dri2_dup_native_fence_fd,
3393 .SetBlobCacheFuncsANDROID = dri2_set_blob_cache_funcs,
3394 .QuerySupportedCompressionRatesEXT = dri2_query_supported_compression_rates,
3395 };
3396