1 // Copyright 2024 The ChromiumOS Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 extern crate rutabaga_gfx;
6
7 mod virtgpu;
8
9 use std::boxed::Box;
10 use std::ffi::CStr;
11 use std::os::raw::c_char;
12 use std::os::raw::c_void;
13 use std::panic::catch_unwind;
14 use std::panic::AssertUnwindSafe;
15 use std::ptr::null_mut;
16 use std::slice::from_raw_parts;
17 use std::slice::from_raw_parts_mut;
18 use std::sync::Mutex;
19
20 use libc::EINVAL;
21 use libc::ESRCH;
22 use log::error;
23 use rutabaga_gfx::kumquat_support::RUTABAGA_DEFAULT_RAW_DESCRIPTOR;
24 use rutabaga_gfx::RutabagaDescriptor;
25 use rutabaga_gfx::RutabagaFromRawDescriptor;
26 use rutabaga_gfx::RutabagaHandle;
27 use rutabaga_gfx::RutabagaIntoRawDescriptor;
28 use rutabaga_gfx::RutabagaRawDescriptor;
29 use rutabaga_gfx::RutabagaResult;
30 use virtgpu::defines::*;
31 use virtgpu::VirtGpuKumquat;
32
33 const NO_ERROR: i32 = 0;
34
return_result<T>(result: RutabagaResult<T>) -> i3235 fn return_result<T>(result: RutabagaResult<T>) -> i32 {
36 if let Err(e) = result {
37 error!("An error occurred: {}", e);
38 -EINVAL
39 } else {
40 NO_ERROR
41 }
42 }
43
44 macro_rules! return_on_error {
45 ($result:expr) => {
46 match $result {
47 Ok(t) => t,
48 Err(e) => {
49 error!("An error occurred: {}", e);
50 return -EINVAL;
51 }
52 }
53 };
54 }
55
56 #[allow(non_camel_case_types)]
57 type virtgpu_kumquat = Mutex<VirtGpuKumquat>;
58
59 // The following structs (in define.rs) must be ABI-compatible with FFI header
60 // (virtgpu_kumquat_ffi.h).
61
62 #[allow(non_camel_case_types)]
63 type drm_kumquat_getparam = VirtGpuParam;
64
65 #[allow(non_camel_case_types)]
66 type drm_kumquat_resource_unref = VirtGpuResourceUnref;
67
68 #[allow(non_camel_case_types)]
69 type drm_kumquat_get_caps = VirtGpuGetCaps;
70
71 #[allow(non_camel_case_types)]
72 type drm_kumquat_context_init = VirtGpuContextInit;
73
74 #[allow(non_camel_case_types)]
75 type drm_kumquat_resource_create_3d = VirtGpuResourceCreate3D;
76
77 #[allow(non_camel_case_types)]
78 type drm_kumquat_resource_create_blob = VirtGpuResourceCreateBlob;
79
80 #[allow(non_camel_case_types)]
81 type drm_kumquat_transfer_to_host = VirtGpuTransfer;
82
83 #[allow(non_camel_case_types)]
84 type drm_kumquat_transfer_from_host = VirtGpuTransfer;
85
86 #[allow(non_camel_case_types)]
87 type drm_kumquat_execbuffer = VirtGpuExecBuffer;
88
89 #[allow(non_camel_case_types)]
90 type drm_kumquat_wait = VirtGpuWait;
91
92 #[allow(non_camel_case_types)]
93 type drm_kumquat_resource_map = VirtGpuResourceMap;
94
95 #[allow(non_camel_case_types)]
96 type drm_kumquat_resource_export = VirtGpuResourceExport;
97
98 #[allow(non_camel_case_types)]
99 type drm_kumquat_resource_import = VirtGpuResourceImport;
100
101 #[no_mangle]
virtgpu_kumquat_init( ptr: &mut *mut virtgpu_kumquat, gpu_socket: Option<&c_char>, ) -> i32102 pub unsafe extern "C" fn virtgpu_kumquat_init(
103 ptr: &mut *mut virtgpu_kumquat,
104 gpu_socket: Option<&c_char>,
105 ) -> i32 {
106 catch_unwind(AssertUnwindSafe(|| {
107 let gpu_socket_str = match gpu_socket {
108 Some(value) => {
109 let c_str_slice = CStr::from_ptr(value);
110 let result = c_str_slice.to_str();
111 return_on_error!(result)
112 }
113 None => "/tmp/kumquat-gpu-0",
114 };
115
116 let result = VirtGpuKumquat::new(gpu_socket_str);
117 let kmqt = return_on_error!(result);
118 *ptr = Box::into_raw(Box::new(Mutex::new(kmqt))) as _;
119 NO_ERROR
120 }))
121 .unwrap_or(-ESRCH)
122 }
123
124 #[no_mangle]
virtgpu_kumquat_finish(ptr: &mut *mut virtgpu_kumquat) -> i32125 pub extern "C" fn virtgpu_kumquat_finish(ptr: &mut *mut virtgpu_kumquat) -> i32 {
126 catch_unwind(AssertUnwindSafe(|| {
127 let _ = unsafe { Box::from_raw(*ptr) };
128 *ptr = null_mut();
129 NO_ERROR
130 }))
131 .unwrap_or(-ESRCH)
132 }
133
134 #[no_mangle]
virtgpu_kumquat_get_param( ptr: &mut virtgpu_kumquat, cmd: &mut drm_kumquat_getparam, ) -> i32135 pub unsafe extern "C" fn virtgpu_kumquat_get_param(
136 ptr: &mut virtgpu_kumquat,
137 cmd: &mut drm_kumquat_getparam,
138 ) -> i32 {
139 catch_unwind(AssertUnwindSafe(|| {
140 let result = ptr.lock().unwrap().get_param(cmd);
141 return_result(result)
142 }))
143 .unwrap_or(-ESRCH)
144 }
145
146 #[no_mangle]
virtgpu_kumquat_get_caps( ptr: &mut virtgpu_kumquat, cmd: &drm_kumquat_get_caps, ) -> i32147 pub unsafe extern "C" fn virtgpu_kumquat_get_caps(
148 ptr: &mut virtgpu_kumquat,
149 cmd: &drm_kumquat_get_caps,
150 ) -> i32 {
151 catch_unwind(AssertUnwindSafe(|| {
152 let caps_slice = from_raw_parts_mut(cmd.addr as *mut u8, cmd.size as usize);
153 let result = ptr.lock().unwrap().get_caps(cmd.cap_set_id, caps_slice);
154 return_result(result)
155 }))
156 .unwrap_or(-ESRCH)
157 }
158
159 #[no_mangle]
virtgpu_kumquat_context_init( ptr: &mut virtgpu_kumquat, cmd: &drm_kumquat_context_init, ) -> i32160 pub unsafe extern "C" fn virtgpu_kumquat_context_init(
161 ptr: &mut virtgpu_kumquat,
162 cmd: &drm_kumquat_context_init,
163 ) -> i32 {
164 catch_unwind(AssertUnwindSafe(|| {
165 let context_params: &[VirtGpuParam] = from_raw_parts(
166 cmd.ctx_set_params as *const VirtGpuParam,
167 cmd.num_params as usize,
168 );
169
170 let mut capset_id: u64 = 0;
171
172 for param in context_params {
173 match param.param {
174 VIRTGPU_KUMQUAT_CONTEXT_PARAM_CAPSET_ID => {
175 capset_id = param.value;
176 }
177 _ => (),
178 }
179 }
180
181 let result = ptr.lock().unwrap().context_create(capset_id, "");
182 return_result(result)
183 }))
184 .unwrap_or(-ESRCH)
185 }
186
187 #[no_mangle]
virtgpu_kumquat_resource_create_3d( ptr: &mut virtgpu_kumquat, cmd: &mut drm_kumquat_resource_create_3d, ) -> i32188 pub unsafe extern "C" fn virtgpu_kumquat_resource_create_3d(
189 ptr: &mut virtgpu_kumquat,
190 cmd: &mut drm_kumquat_resource_create_3d,
191 ) -> i32 {
192 catch_unwind(AssertUnwindSafe(|| {
193 let result = ptr.lock().unwrap().resource_create_3d(cmd);
194 return_result(result)
195 }))
196 .unwrap_or(-ESRCH)
197 }
198
199 #[no_mangle]
virtgpu_kumquat_resource_create_blob( ptr: &mut virtgpu_kumquat, cmd: &mut drm_kumquat_resource_create_blob, ) -> i32200 pub unsafe extern "C" fn virtgpu_kumquat_resource_create_blob(
201 ptr: &mut virtgpu_kumquat,
202 cmd: &mut drm_kumquat_resource_create_blob,
203 ) -> i32 {
204 catch_unwind(AssertUnwindSafe(|| {
205 let blob_cmd = from_raw_parts(cmd.cmd as *const u8, cmd.cmd_size as usize);
206 let result = ptr.lock().unwrap().resource_create_blob(cmd, blob_cmd);
207 return_result(result)
208 }))
209 .unwrap_or(-ESRCH)
210 }
211
212 #[no_mangle]
virtgpu_kumquat_resource_unref( ptr: &mut virtgpu_kumquat, cmd: &mut drm_kumquat_resource_unref, ) -> i32213 pub unsafe extern "C" fn virtgpu_kumquat_resource_unref(
214 ptr: &mut virtgpu_kumquat,
215 cmd: &mut drm_kumquat_resource_unref,
216 ) -> i32 {
217 catch_unwind(AssertUnwindSafe(|| {
218 let result = ptr.lock().unwrap().resource_unref(cmd.bo_handle);
219 return_result(result)
220 }))
221 .unwrap_or(-ESRCH)
222 }
223
224 #[no_mangle]
virtgpu_kumquat_resource_map( ptr: &mut virtgpu_kumquat, cmd: &mut drm_kumquat_resource_map, ) -> i32225 pub unsafe extern "C" fn virtgpu_kumquat_resource_map(
226 ptr: &mut virtgpu_kumquat,
227 cmd: &mut drm_kumquat_resource_map,
228 ) -> i32 {
229 catch_unwind(AssertUnwindSafe(|| {
230 let result = ptr.lock().unwrap().map(cmd.bo_handle);
231 let internal_map = return_on_error!(result);
232 (*cmd).ptr = internal_map.ptr as *mut c_void;
233 (*cmd).size = internal_map.size;
234 NO_ERROR
235 }))
236 .unwrap_or(-ESRCH)
237 }
238
239 #[no_mangle]
virtgpu_kumquat_resource_unmap( ptr: &mut virtgpu_kumquat, bo_handle: u32, ) -> i32240 pub unsafe extern "C" fn virtgpu_kumquat_resource_unmap(
241 ptr: &mut virtgpu_kumquat,
242 bo_handle: u32,
243 ) -> i32 {
244 catch_unwind(AssertUnwindSafe(|| {
245 let result = ptr.lock().unwrap().unmap(bo_handle);
246 return_result(result)
247 }))
248 .unwrap_or(-ESRCH)
249 }
250
251 #[no_mangle]
virtgpu_kumquat_transfer_to_host( ptr: &mut virtgpu_kumquat, cmd: &mut drm_kumquat_transfer_to_host, ) -> i32252 pub unsafe extern "C" fn virtgpu_kumquat_transfer_to_host(
253 ptr: &mut virtgpu_kumquat,
254 cmd: &mut drm_kumquat_transfer_to_host,
255 ) -> i32 {
256 catch_unwind(AssertUnwindSafe(|| {
257 let result = ptr.lock().unwrap().transfer_to_host(cmd);
258 return_result(result)
259 }))
260 .unwrap_or(-ESRCH)
261 }
262
263 #[no_mangle]
virtgpu_kumquat_transfer_from_host( ptr: &mut virtgpu_kumquat, cmd: &mut drm_kumquat_transfer_from_host, ) -> i32264 pub unsafe extern "C" fn virtgpu_kumquat_transfer_from_host(
265 ptr: &mut virtgpu_kumquat,
266 cmd: &mut drm_kumquat_transfer_from_host,
267 ) -> i32 {
268 catch_unwind(AssertUnwindSafe(|| {
269 let result = ptr.lock().unwrap().transfer_from_host(cmd);
270 return_result(result)
271 }))
272 .unwrap_or(-ESRCH)
273 }
274
275 #[no_mangle]
virtgpu_kumquat_execbuffer( ptr: &mut virtgpu_kumquat, cmd: &mut drm_kumquat_execbuffer, ) -> i32276 pub unsafe extern "C" fn virtgpu_kumquat_execbuffer(
277 ptr: &mut virtgpu_kumquat,
278 cmd: &mut drm_kumquat_execbuffer,
279 ) -> i32 {
280 catch_unwind(AssertUnwindSafe(|| {
281 let bo_handles = from_raw_parts(cmd.bo_handles as *const u32, cmd.num_bo_handles as usize);
282 let cmd_buf = from_raw_parts(cmd.command as *const u8, cmd.size as usize);
283
284 // TODO
285 let in_fences: &[u64] = &[0; 0];
286
287 let mut descriptor: RutabagaRawDescriptor = RUTABAGA_DEFAULT_RAW_DESCRIPTOR;
288 let result = ptr.lock().unwrap().submit_command(
289 cmd.flags,
290 bo_handles,
291 cmd_buf,
292 cmd.ring_idx,
293 in_fences,
294 &mut descriptor,
295 );
296
297 cmd.fence_handle = descriptor as i64;
298 return_result(result)
299 }))
300 .unwrap_or(-ESRCH)
301 }
302
303 #[no_mangle]
virtgpu_kumquat_wait( ptr: &mut virtgpu_kumquat, cmd: &mut drm_kumquat_wait, ) -> i32304 pub unsafe extern "C" fn virtgpu_kumquat_wait(
305 ptr: &mut virtgpu_kumquat,
306 cmd: &mut drm_kumquat_wait,
307 ) -> i32 {
308 catch_unwind(AssertUnwindSafe(|| {
309 let result = ptr.lock().unwrap().wait(cmd.bo_handle);
310 return_result(result)
311 }))
312 .unwrap_or(-ESRCH)
313 }
314
315 #[no_mangle]
virtgpu_kumquat_resource_export( ptr: &mut virtgpu_kumquat, cmd: &mut drm_kumquat_resource_export, ) -> i32316 pub extern "C" fn virtgpu_kumquat_resource_export(
317 ptr: &mut virtgpu_kumquat,
318 cmd: &mut drm_kumquat_resource_export,
319 ) -> i32 {
320 catch_unwind(AssertUnwindSafe(|| {
321 let result = ptr
322 .lock()
323 .unwrap()
324 .resource_export(cmd.bo_handle, cmd.flags);
325 let hnd = return_on_error!(result);
326
327 (*cmd).handle_type = hnd.handle_type;
328 (*cmd).os_handle = hnd.os_handle.into_raw_descriptor() as i64;
329 NO_ERROR
330 }))
331 .unwrap_or(-ESRCH)
332 }
333
334 #[no_mangle]
virtgpu_kumquat_resource_import( ptr: &mut virtgpu_kumquat, cmd: &mut drm_kumquat_resource_import, ) -> i32335 pub unsafe extern "C" fn virtgpu_kumquat_resource_import(
336 ptr: &mut virtgpu_kumquat,
337 cmd: &mut drm_kumquat_resource_import,
338 ) -> i32 {
339 catch_unwind(AssertUnwindSafe(|| {
340 let handle = RutabagaHandle {
341 os_handle: RutabagaDescriptor::from_raw_descriptor(
342 (*cmd).os_handle.into_raw_descriptor(),
343 ),
344 handle_type: (*cmd).handle_type,
345 };
346
347 let result = ptr.lock().unwrap().resource_import(
348 handle,
349 &mut cmd.bo_handle,
350 &mut cmd.res_handle,
351 &mut cmd.size,
352 );
353
354 return_result(result)
355 }))
356 .unwrap_or(-ESRCH)
357 }
358
359 #[no_mangle]
virtgpu_kumquat_snapshot_save(ptr: &mut virtgpu_kumquat) -> i32360 pub unsafe extern "C" fn virtgpu_kumquat_snapshot_save(ptr: &mut virtgpu_kumquat) -> i32 {
361 catch_unwind(AssertUnwindSafe(|| {
362 let result = ptr.lock().unwrap().snapshot();
363 return_result(result)
364 }))
365 .unwrap_or(-ESRCH)
366 }
367
368 #[no_mangle]
virtgpu_kumquat_snapshot_restore(ptr: &mut virtgpu_kumquat) -> i32369 pub unsafe extern "C" fn virtgpu_kumquat_snapshot_restore(ptr: &mut virtgpu_kumquat) -> i32 {
370 catch_unwind(AssertUnwindSafe(|| {
371 let result = ptr.lock().unwrap().restore();
372 return_result(result)
373 }))
374 .unwrap_or(-ESRCH)
375 }
376