1 use crate::compiler::nir::*;
2 use crate::pipe::fence::*;
3 use crate::pipe::resource::*;
4 use crate::pipe::screen::*;
5 use crate::pipe::transfer::*;
6
7 use mesa_rust_gen::pipe_fd_type::*;
8 use mesa_rust_gen::*;
9 use mesa_rust_util::has_required_feature;
10
11 use std::mem::size_of;
12 use std::os::raw::*;
13 use std::ptr;
14 use std::ptr::*;
15 use std::sync::Arc;
16
17 pub struct PipeContext {
18 pipe: NonNull<pipe_context>,
19 screen: Arc<PipeScreen>,
20 }
21
22 unsafe impl Send for PipeContext {}
23 unsafe impl Sync for PipeContext {}
24
25 #[derive(Clone, Copy)]
26 #[repr(u32)]
27 pub enum RWFlags {
28 RD = pipe_map_flags::PIPE_MAP_READ.0,
29 WR = pipe_map_flags::PIPE_MAP_WRITE.0,
30 RW = pipe_map_flags::PIPE_MAP_READ_WRITE.0,
31 }
32
33 impl From<RWFlags> for pipe_map_flags {
from(rw: RWFlags) -> Self34 fn from(rw: RWFlags) -> Self {
35 pipe_map_flags(rw as u32)
36 }
37 }
38
39 impl PipeContext {
new(context: *mut pipe_context, screen: &Arc<PipeScreen>) -> Option<Self>40 pub(super) fn new(context: *mut pipe_context, screen: &Arc<PipeScreen>) -> Option<Self> {
41 let s = Self {
42 pipe: NonNull::new(context)?,
43 screen: screen.clone(),
44 };
45
46 if !has_required_cbs(unsafe { s.pipe.as_ref() }) {
47 assert!(false, "Context missing features. This should never happen!");
48 return None;
49 }
50
51 Some(s)
52 }
53
buffer_subdata( &self, res: &PipeResource, offset: c_uint, data: *const c_void, size: c_uint, )54 pub fn buffer_subdata(
55 &self,
56 res: &PipeResource,
57 offset: c_uint,
58 data: *const c_void,
59 size: c_uint,
60 ) {
61 unsafe {
62 self.pipe.as_ref().buffer_subdata.unwrap()(
63 self.pipe.as_ptr(),
64 res.pipe(),
65 pipe_map_flags::PIPE_MAP_WRITE.0, // TODO PIPE_MAP_x
66 offset,
67 size,
68 data,
69 )
70 }
71 }
72
texture_subdata( &self, res: &PipeResource, bx: &pipe_box, data: *const c_void, stride: u32, layer_stride: usize, )73 pub fn texture_subdata(
74 &self,
75 res: &PipeResource,
76 bx: &pipe_box,
77 data: *const c_void,
78 stride: u32,
79 layer_stride: usize,
80 ) {
81 unsafe {
82 self.pipe.as_ref().texture_subdata.unwrap()(
83 self.pipe.as_ptr(),
84 res.pipe(),
85 0,
86 pipe_map_flags::PIPE_MAP_WRITE.0, // TODO PIPE_MAP_x
87 bx,
88 data,
89 stride,
90 layer_stride,
91 )
92 }
93 }
94
clear_buffer(&self, res: &PipeResource, pattern: &[u8], offset: u32, size: u32)95 pub fn clear_buffer(&self, res: &PipeResource, pattern: &[u8], offset: u32, size: u32) {
96 unsafe {
97 self.pipe
98 .as_ref()
99 .clear_buffer
100 .unwrap_or(u_default_clear_buffer)(
101 self.pipe.as_ptr(),
102 res.pipe(),
103 offset,
104 size,
105 pattern.as_ptr().cast(),
106 pattern.len() as i32,
107 )
108 }
109 }
110
clear_image_buffer( &self, res: &PipeResource, pattern: &[u32], origin: &[usize; 3], region: &[usize; 3], strides: (usize, usize), pixel_size: usize, )111 pub fn clear_image_buffer(
112 &self,
113 res: &PipeResource,
114 pattern: &[u32],
115 origin: &[usize; 3],
116 region: &[usize; 3],
117 strides: (usize, usize),
118 pixel_size: usize,
119 ) {
120 let (row_pitch, slice_pitch) = strides;
121 for z in 0..region[2] {
122 for y in 0..region[1] {
123 let pitch = [pixel_size, row_pitch, slice_pitch];
124 // Convoluted way of doing (origin + [0, y, z]) * pitch
125 let offset = (0..3)
126 .map(|i| ((origin[i] + [0, y, z][i]) * pitch[i]) as u32)
127 .sum();
128
129 unsafe {
130 self.pipe.as_ref().clear_buffer.unwrap()(
131 self.pipe.as_ptr(),
132 res.pipe(),
133 offset,
134 (region[0] * pixel_size) as u32,
135 pattern.as_ptr().cast(),
136 pixel_size as i32,
137 )
138 };
139 }
140 }
141 }
142
clear_texture(&self, res: &PipeResource, pattern: &[u32], bx: &pipe_box)143 pub fn clear_texture(&self, res: &PipeResource, pattern: &[u32], bx: &pipe_box) {
144 unsafe {
145 let clear_texture = self
146 .pipe
147 .as_ref()
148 .clear_texture
149 .unwrap_or(u_default_clear_texture);
150 clear_texture(
151 self.pipe.as_ptr(),
152 res.pipe(),
153 0,
154 bx,
155 pattern.as_ptr().cast(),
156 )
157 }
158 }
159
resource_copy_region( &self, src: &PipeResource, dst: &PipeResource, dst_offset: &[u32; 3], bx: &pipe_box, )160 pub fn resource_copy_region(
161 &self,
162 src: &PipeResource,
163 dst: &PipeResource,
164 dst_offset: &[u32; 3],
165 bx: &pipe_box,
166 ) {
167 unsafe {
168 self.pipe.as_ref().resource_copy_region.unwrap()(
169 self.pipe.as_ptr(),
170 dst.pipe(),
171 0,
172 dst_offset[0],
173 dst_offset[1],
174 dst_offset[2],
175 src.pipe(),
176 0,
177 bx,
178 )
179 }
180 }
181
resource_map( &self, res: &PipeResource, bx: &pipe_box, flags: pipe_map_flags, is_buffer: bool, ) -> Option<PipeTransfer>182 fn resource_map(
183 &self,
184 res: &PipeResource,
185 bx: &pipe_box,
186 flags: pipe_map_flags,
187 is_buffer: bool,
188 ) -> Option<PipeTransfer> {
189 let mut out: *mut pipe_transfer = ptr::null_mut();
190
191 let ptr = unsafe {
192 let func = if is_buffer {
193 self.pipe.as_ref().buffer_map
194 } else {
195 self.pipe.as_ref().texture_map
196 };
197
198 func.unwrap()(self.pipe.as_ptr(), res.pipe(), 0, flags.0, bx, &mut out)
199 };
200
201 if ptr.is_null() {
202 None
203 } else {
204 Some(PipeTransfer::new(self, is_buffer, out, ptr))
205 }
206 }
207
buffer_map( &self, res: &PipeResource, offset: i32, size: i32, rw: RWFlags, ) -> Option<PipeTransfer>208 pub fn buffer_map(
209 &self,
210 res: &PipeResource,
211 offset: i32,
212 size: i32,
213 rw: RWFlags,
214 ) -> Option<PipeTransfer> {
215 let b = pipe_box {
216 x: offset,
217 width: size,
218 height: 1,
219 depth: 1,
220 ..Default::default()
221 };
222
223 self.resource_map(res, &b, rw.into(), true)
224 }
225
buffer_unmap(&self, tx: *mut pipe_transfer)226 pub(super) fn buffer_unmap(&self, tx: *mut pipe_transfer) {
227 unsafe { self.pipe.as_ref().buffer_unmap.unwrap()(self.pipe.as_ptr(), tx) };
228 }
229
texture_map( &self, res: &PipeResource, bx: &pipe_box, rw: RWFlags, ) -> Option<PipeTransfer>230 pub fn texture_map(
231 &self,
232 res: &PipeResource,
233 bx: &pipe_box,
234 rw: RWFlags,
235 ) -> Option<PipeTransfer> {
236 self.resource_map(res, bx, rw.into(), false)
237 }
238
texture_unmap(&self, tx: *mut pipe_transfer)239 pub(super) fn texture_unmap(&self, tx: *mut pipe_transfer) {
240 unsafe { self.pipe.as_ref().texture_unmap.unwrap()(self.pipe.as_ptr(), tx) };
241 }
242
create_compute_state(&self, nir: &NirShader, static_local_mem: u32) -> *mut c_void243 pub fn create_compute_state(&self, nir: &NirShader, static_local_mem: u32) -> *mut c_void {
244 let state = pipe_compute_state {
245 ir_type: pipe_shader_ir::PIPE_SHADER_IR_NIR,
246 prog: nir.dup_for_driver().cast(),
247 req_input_mem: 0,
248 static_shared_mem: static_local_mem,
249 };
250 unsafe { self.pipe.as_ref().create_compute_state.unwrap()(self.pipe.as_ptr(), &state) }
251 }
252
bind_compute_state(&self, state: *mut c_void)253 pub fn bind_compute_state(&self, state: *mut c_void) {
254 unsafe { self.pipe.as_ref().bind_compute_state.unwrap()(self.pipe.as_ptr(), state) }
255 }
256
delete_compute_state(&self, state: *mut c_void)257 pub fn delete_compute_state(&self, state: *mut c_void) {
258 unsafe { self.pipe.as_ref().delete_compute_state.unwrap()(self.pipe.as_ptr(), state) }
259 }
260
compute_state_info(&self, state: *mut c_void) -> pipe_compute_state_object_info261 pub fn compute_state_info(&self, state: *mut c_void) -> pipe_compute_state_object_info {
262 let mut info = pipe_compute_state_object_info::default();
263 unsafe {
264 self.pipe.as_ref().get_compute_state_info.unwrap()(self.pipe.as_ptr(), state, &mut info)
265 }
266 info
267 }
268
compute_state_subgroup_size(&self, state: *mut c_void, block: &[u32; 3]) -> u32269 pub fn compute_state_subgroup_size(&self, state: *mut c_void, block: &[u32; 3]) -> u32 {
270 unsafe {
271 if let Some(cb) = self.pipe.as_ref().get_compute_state_subgroup_size {
272 cb(self.pipe.as_ptr(), state, block)
273 } else {
274 0
275 }
276 }
277 }
278
is_create_fence_fd_supported(&self) -> bool279 pub fn is_create_fence_fd_supported(&self) -> bool {
280 unsafe { self.pipe.as_ref().create_fence_fd.is_some() }
281 }
282
create_sampler_state(&self, state: &pipe_sampler_state) -> *mut c_void283 pub fn create_sampler_state(&self, state: &pipe_sampler_state) -> *mut c_void {
284 unsafe { self.pipe.as_ref().create_sampler_state.unwrap()(self.pipe.as_ptr(), state) }
285 }
286
bind_sampler_states(&self, samplers: &[*mut c_void])287 pub fn bind_sampler_states(&self, samplers: &[*mut c_void]) {
288 let mut samplers = samplers.to_owned();
289 unsafe {
290 self.pipe.as_ref().bind_sampler_states.unwrap()(
291 self.pipe.as_ptr(),
292 pipe_shader_type::PIPE_SHADER_COMPUTE,
293 0,
294 samplers.len() as u32,
295 samplers.as_mut_ptr(),
296 )
297 }
298 }
299
clear_sampler_states(&self, count: u32)300 pub fn clear_sampler_states(&self, count: u32) {
301 let mut samplers = vec![ptr::null_mut(); count as usize];
302 unsafe {
303 self.pipe.as_ref().bind_sampler_states.unwrap()(
304 self.pipe.as_ptr(),
305 pipe_shader_type::PIPE_SHADER_COMPUTE,
306 0,
307 count,
308 samplers.as_mut_ptr(),
309 )
310 }
311 }
312
delete_sampler_state(&self, ptr: *mut c_void)313 pub fn delete_sampler_state(&self, ptr: *mut c_void) {
314 unsafe { self.pipe.as_ref().delete_sampler_state.unwrap()(self.pipe.as_ptr(), ptr) }
315 }
316
bind_constant_buffer(&self, idx: u32, res: &PipeResource)317 pub fn bind_constant_buffer(&self, idx: u32, res: &PipeResource) {
318 let cb = pipe_constant_buffer {
319 buffer: res.pipe(),
320 buffer_offset: 0,
321 buffer_size: res.width(),
322 user_buffer: ptr::null(),
323 };
324 unsafe {
325 self.pipe.as_ref().set_constant_buffer.unwrap()(
326 self.pipe.as_ptr(),
327 pipe_shader_type::PIPE_SHADER_COMPUTE,
328 idx,
329 false,
330 &cb,
331 )
332 }
333 }
334
set_constant_buffer(&self, idx: u32, data: &[u8])335 pub fn set_constant_buffer(&self, idx: u32, data: &[u8]) {
336 let cb = pipe_constant_buffer {
337 buffer: ptr::null_mut(),
338 buffer_offset: 0,
339 buffer_size: data.len() as u32,
340 user_buffer: data.as_ptr().cast(),
341 };
342 unsafe {
343 self.pipe.as_ref().set_constant_buffer.unwrap()(
344 self.pipe.as_ptr(),
345 pipe_shader_type::PIPE_SHADER_COMPUTE,
346 idx,
347 false,
348 if data.is_empty() { ptr::null() } else { &cb },
349 )
350 }
351 }
352
353 /// returns false when failing to allocate GPU memory.
354 #[must_use]
set_constant_buffer_stream(&self, idx: u32, data: &[u8]) -> bool355 pub fn set_constant_buffer_stream(&self, idx: u32, data: &[u8]) -> bool {
356 let mut cb = pipe_constant_buffer {
357 buffer: ptr::null_mut(),
358 buffer_offset: 0,
359 buffer_size: data.len() as u32,
360 user_buffer: ptr::null_mut(),
361 };
362
363 unsafe {
364 let stream = self.pipe.as_ref().stream_uploader;
365 u_upload_data(
366 stream,
367 0,
368 data.len() as u32,
369 size_of::<[u64; 16]>() as u32,
370 data.as_ptr().cast(),
371 &mut cb.buffer_offset,
372 &mut cb.buffer,
373 );
374 u_upload_unmap(stream);
375
376 if cb.buffer.is_null() {
377 return false;
378 }
379
380 self.pipe.as_ref().set_constant_buffer.unwrap()(
381 self.pipe.as_ptr(),
382 pipe_shader_type::PIPE_SHADER_COMPUTE,
383 idx,
384 true,
385 &cb,
386 );
387
388 true
389 }
390 }
391
launch_grid( &self, work_dim: u32, block: [u32; 3], grid: [u32; 3], variable_local_mem: u32, )392 pub fn launch_grid(
393 &self,
394 work_dim: u32,
395 block: [u32; 3],
396 grid: [u32; 3],
397 variable_local_mem: u32,
398 ) {
399 let info = pipe_grid_info {
400 pc: 0,
401 input: ptr::null(),
402 variable_shared_mem: variable_local_mem,
403 work_dim: work_dim,
404 block: block,
405 last_block: [0; 3],
406 grid: grid,
407 grid_base: [0; 3],
408 indirect: ptr::null_mut(),
409 indirect_offset: 0,
410 indirect_stride: 0,
411 draw_count: 0,
412 indirect_draw_count_offset: 0,
413 indirect_draw_count: ptr::null_mut(),
414 };
415 unsafe { self.pipe.as_ref().launch_grid.unwrap()(self.pipe.as_ptr(), &info) }
416 }
417
set_global_binding(&self, res: &[&PipeResource], out: &mut [*mut u32])418 pub fn set_global_binding(&self, res: &[&PipeResource], out: &mut [*mut u32]) {
419 let mut res: Vec<_> = res.iter().copied().map(PipeResource::pipe).collect();
420 unsafe {
421 self.pipe.as_ref().set_global_binding.unwrap()(
422 self.pipe.as_ptr(),
423 0,
424 res.len() as u32,
425 res.as_mut_ptr(),
426 out.as_mut_ptr(),
427 )
428 }
429 }
430
create_sampler_view( &self, res: &PipeResource, format: pipe_format, app_img_info: Option<&AppImgInfo>, ) -> *mut pipe_sampler_view431 pub fn create_sampler_view(
432 &self,
433 res: &PipeResource,
434 format: pipe_format,
435 app_img_info: Option<&AppImgInfo>,
436 ) -> *mut pipe_sampler_view {
437 let template = res.pipe_sampler_view_template(format, app_img_info);
438
439 unsafe {
440 let s_view = self.pipe.as_ref().create_sampler_view.unwrap()(
441 self.pipe.as_ptr(),
442 res.pipe(),
443 &template,
444 );
445
446 s_view
447 }
448 }
449
clear_global_binding(&self, count: u32)450 pub fn clear_global_binding(&self, count: u32) {
451 unsafe {
452 self.pipe.as_ref().set_global_binding.unwrap()(
453 self.pipe.as_ptr(),
454 0,
455 count,
456 ptr::null_mut(),
457 ptr::null_mut(),
458 )
459 }
460 }
461
set_sampler_views(&self, views: &mut [*mut pipe_sampler_view])462 pub fn set_sampler_views(&self, views: &mut [*mut pipe_sampler_view]) {
463 unsafe {
464 self.pipe.as_ref().set_sampler_views.unwrap()(
465 self.pipe.as_ptr(),
466 pipe_shader_type::PIPE_SHADER_COMPUTE,
467 0,
468 views.len() as u32,
469 0,
470 false,
471 views.as_mut_ptr(),
472 )
473 }
474 }
475
clear_sampler_views(&self, count: u32)476 pub fn clear_sampler_views(&self, count: u32) {
477 let mut samplers = vec![ptr::null_mut(); count as usize];
478 unsafe {
479 self.pipe.as_ref().set_sampler_views.unwrap()(
480 self.pipe.as_ptr(),
481 pipe_shader_type::PIPE_SHADER_COMPUTE,
482 0,
483 count,
484 0,
485 false,
486 samplers.as_mut_ptr(),
487 )
488 }
489 }
490
sampler_view_destroy(&self, view: *mut pipe_sampler_view)491 pub fn sampler_view_destroy(&self, view: *mut pipe_sampler_view) {
492 unsafe { self.pipe.as_ref().sampler_view_destroy.unwrap()(self.pipe.as_ptr(), view) }
493 }
494
set_shader_images(&self, images: &[PipeImageView])495 pub fn set_shader_images(&self, images: &[PipeImageView]) {
496 let images = PipeImageView::slice_to_pipe(images);
497 unsafe {
498 self.pipe.as_ref().set_shader_images.unwrap()(
499 self.pipe.as_ptr(),
500 pipe_shader_type::PIPE_SHADER_COMPUTE,
501 0,
502 images.len() as u32,
503 0,
504 images.as_ptr(),
505 )
506 }
507 }
508
clear_shader_images(&self, count: u32)509 pub fn clear_shader_images(&self, count: u32) {
510 unsafe {
511 self.pipe.as_ref().set_shader_images.unwrap()(
512 self.pipe.as_ptr(),
513 pipe_shader_type::PIPE_SHADER_COMPUTE,
514 0,
515 count,
516 0,
517 ptr::null_mut(),
518 )
519 }
520 }
521
create_query(&self, query_type: c_uint, index: c_uint) -> *mut pipe_query522 pub(crate) fn create_query(&self, query_type: c_uint, index: c_uint) -> *mut pipe_query {
523 unsafe { self.pipe.as_ref().create_query.unwrap()(self.pipe.as_ptr(), query_type, index) }
524 }
525
526 /// # Safety
527 ///
528 /// usual rules on raw mut pointers apply, specifically no concurrent access
end_query(&self, pq: *mut pipe_query) -> bool529 pub(crate) unsafe fn end_query(&self, pq: *mut pipe_query) -> bool {
530 unsafe { self.pipe.as_ref().end_query.unwrap()(self.pipe.as_ptr(), pq) }
531 }
532
533 /// # Safety
534 ///
535 /// usual rules on raw mut pointers apply, specifically no concurrent access
get_query_result( &self, pq: *mut pipe_query, wait: bool, pqr: *mut pipe_query_result, ) -> bool536 pub(crate) unsafe fn get_query_result(
537 &self,
538 pq: *mut pipe_query,
539 wait: bool,
540 pqr: *mut pipe_query_result,
541 ) -> bool {
542 unsafe { self.pipe.as_ref().get_query_result.unwrap()(self.pipe.as_ptr(), pq, wait, pqr) }
543 }
544
545 /// # Safety
546 ///
547 /// usual rules on raw mut pointers apply, specifically no concurrent access
destroy_query(&self, pq: *mut pipe_query)548 pub(crate) unsafe fn destroy_query(&self, pq: *mut pipe_query) {
549 unsafe { self.pipe.as_ref().destroy_query.unwrap()(self.pipe.as_ptr(), pq) }
550 }
551
memory_barrier(&self, barriers: u32)552 pub fn memory_barrier(&self, barriers: u32) {
553 unsafe { self.pipe.as_ref().memory_barrier.unwrap()(self.pipe.as_ptr(), barriers) }
554 }
555
flush(&self) -> PipeFence556 pub fn flush(&self) -> PipeFence {
557 unsafe {
558 let mut fence = ptr::null_mut();
559 self.pipe.as_ref().flush.unwrap()(self.pipe.as_ptr(), &mut fence, 0);
560 PipeFence::new(fence, &self.screen)
561 }
562 }
563
import_fence(&self, fence_fd: &FenceFd) -> PipeFence564 pub fn import_fence(&self, fence_fd: &FenceFd) -> PipeFence {
565 unsafe {
566 let mut fence = ptr::null_mut();
567 self.pipe.as_ref().create_fence_fd.unwrap()(
568 self.pipe.as_ptr(),
569 &mut fence,
570 fence_fd.fd,
571 PIPE_FD_TYPE_NATIVE_SYNC,
572 );
573 PipeFence::new(fence, &self.screen)
574 }
575 }
576
svm_migrate( &self, ptrs: &[usize], sizes: &[usize], to_device: bool, content_undefined: bool, )577 pub fn svm_migrate(
578 &self,
579 ptrs: &[usize],
580 sizes: &[usize],
581 to_device: bool,
582 content_undefined: bool,
583 ) {
584 assert_eq!(ptrs.len(), sizes.len());
585 unsafe {
586 if let Some(cb) = self.pipe.as_ref().svm_migrate {
587 cb(
588 self.pipe.as_ptr(),
589 ptrs.len() as u32,
590 ptrs.as_ptr().cast(),
591 sizes.as_ptr(),
592 to_device,
593 content_undefined,
594 );
595 }
596 }
597 }
598 }
599
600 impl Drop for PipeContext {
drop(&mut self)601 fn drop(&mut self) {
602 self.flush().wait();
603 unsafe {
604 self.pipe.as_ref().destroy.unwrap()(self.pipe.as_ptr());
605 }
606 }
607 }
608
has_required_cbs(context: &pipe_context) -> bool609 fn has_required_cbs(context: &pipe_context) -> bool {
610 // Use '&' to evaluate all features and to not stop
611 // on first missing one to list all missing features.
612 has_required_feature!(context, destroy)
613 & has_required_feature!(context, bind_compute_state)
614 & has_required_feature!(context, bind_sampler_states)
615 & has_required_feature!(context, buffer_map)
616 & has_required_feature!(context, buffer_subdata)
617 & has_required_feature!(context, buffer_unmap)
618 & has_required_feature!(context, create_compute_state)
619 & has_required_feature!(context, create_query)
620 & has_required_feature!(context, delete_compute_state)
621 & has_required_feature!(context, delete_sampler_state)
622 & has_required_feature!(context, destroy_query)
623 & has_required_feature!(context, end_query)
624 & has_required_feature!(context, flush)
625 & has_required_feature!(context, get_compute_state_info)
626 & has_required_feature!(context, launch_grid)
627 & has_required_feature!(context, memory_barrier)
628 & has_required_feature!(context, resource_copy_region)
629 & has_required_feature!(context, sampler_view_destroy)
630 & has_required_feature!(context, set_constant_buffer)
631 & has_required_feature!(context, set_global_binding)
632 & has_required_feature!(context, set_sampler_views)
633 & has_required_feature!(context, set_shader_images)
634 & has_required_feature!(context, texture_map)
635 & has_required_feature!(context, texture_subdata)
636 & has_required_feature!(context, texture_unmap)
637 }
638