1*bb4ee6a4SAndroid Build Coastguard Worker // Copyright 2023 The ChromiumOS Authors
2*bb4ee6a4SAndroid Build Coastguard Worker // Use of this source code is governed by a BSD-style license that can be
3*bb4ee6a4SAndroid Build Coastguard Worker // found in the LICENSE file.
4*bb4ee6a4SAndroid Build Coastguard Worker
5*bb4ee6a4SAndroid Build Coastguard Worker #[cfg(any(target_arch = "arm", target_arch = "aarch64"))]
6*bb4ee6a4SAndroid Build Coastguard Worker mod aarch64;
7*bb4ee6a4SAndroid Build Coastguard Worker
8*bb4ee6a4SAndroid Build Coastguard Worker mod gunyah_sys;
9*bb4ee6a4SAndroid Build Coastguard Worker use std::cmp::Reverse;
10*bb4ee6a4SAndroid Build Coastguard Worker use std::collections::BTreeMap;
11*bb4ee6a4SAndroid Build Coastguard Worker use std::collections::BinaryHeap;
12*bb4ee6a4SAndroid Build Coastguard Worker use std::collections::HashSet;
13*bb4ee6a4SAndroid Build Coastguard Worker use std::ffi::CString;
14*bb4ee6a4SAndroid Build Coastguard Worker use std::fs::File;
15*bb4ee6a4SAndroid Build Coastguard Worker use std::mem::size_of;
16*bb4ee6a4SAndroid Build Coastguard Worker use std::os::raw::c_ulong;
17*bb4ee6a4SAndroid Build Coastguard Worker use std::os::unix::prelude::OsStrExt;
18*bb4ee6a4SAndroid Build Coastguard Worker use std::path::Path;
19*bb4ee6a4SAndroid Build Coastguard Worker use std::path::PathBuf;
20*bb4ee6a4SAndroid Build Coastguard Worker use std::sync::Arc;
21*bb4ee6a4SAndroid Build Coastguard Worker
22*bb4ee6a4SAndroid Build Coastguard Worker use base::errno_result;
23*bb4ee6a4SAndroid Build Coastguard Worker use base::info;
24*bb4ee6a4SAndroid Build Coastguard Worker use base::ioctl;
25*bb4ee6a4SAndroid Build Coastguard Worker use base::ioctl_with_ref;
26*bb4ee6a4SAndroid Build Coastguard Worker use base::ioctl_with_val;
27*bb4ee6a4SAndroid Build Coastguard Worker use base::pagesize;
28*bb4ee6a4SAndroid Build Coastguard Worker use base::warn;
29*bb4ee6a4SAndroid Build Coastguard Worker use base::Error;
30*bb4ee6a4SAndroid Build Coastguard Worker use base::FromRawDescriptor;
31*bb4ee6a4SAndroid Build Coastguard Worker use base::MemoryMapping;
32*bb4ee6a4SAndroid Build Coastguard Worker use base::MemoryMappingBuilder;
33*bb4ee6a4SAndroid Build Coastguard Worker use base::MmapError;
34*bb4ee6a4SAndroid Build Coastguard Worker use base::RawDescriptor;
35*bb4ee6a4SAndroid Build Coastguard Worker use gunyah_sys::*;
36*bb4ee6a4SAndroid Build Coastguard Worker use libc::open;
37*bb4ee6a4SAndroid Build Coastguard Worker use libc::EFAULT;
38*bb4ee6a4SAndroid Build Coastguard Worker use libc::EINVAL;
39*bb4ee6a4SAndroid Build Coastguard Worker use libc::EIO;
40*bb4ee6a4SAndroid Build Coastguard Worker use libc::ENOENT;
41*bb4ee6a4SAndroid Build Coastguard Worker use libc::ENOSPC;
42*bb4ee6a4SAndroid Build Coastguard Worker use libc::ENOTSUP;
43*bb4ee6a4SAndroid Build Coastguard Worker use libc::EOVERFLOW;
44*bb4ee6a4SAndroid Build Coastguard Worker use libc::O_CLOEXEC;
45*bb4ee6a4SAndroid Build Coastguard Worker use libc::O_RDWR;
46*bb4ee6a4SAndroid Build Coastguard Worker use sync::Mutex;
47*bb4ee6a4SAndroid Build Coastguard Worker use vm_memory::MemoryRegionPurpose;
48*bb4ee6a4SAndroid Build Coastguard Worker
49*bb4ee6a4SAndroid Build Coastguard Worker use crate::*;
50*bb4ee6a4SAndroid Build Coastguard Worker
51*bb4ee6a4SAndroid Build Coastguard Worker pub struct Gunyah {
52*bb4ee6a4SAndroid Build Coastguard Worker gunyah: SafeDescriptor,
53*bb4ee6a4SAndroid Build Coastguard Worker }
54*bb4ee6a4SAndroid Build Coastguard Worker
55*bb4ee6a4SAndroid Build Coastguard Worker impl AsRawDescriptor for Gunyah {
as_raw_descriptor(&self) -> RawDescriptor56*bb4ee6a4SAndroid Build Coastguard Worker fn as_raw_descriptor(&self) -> RawDescriptor {
57*bb4ee6a4SAndroid Build Coastguard Worker self.gunyah.as_raw_descriptor()
58*bb4ee6a4SAndroid Build Coastguard Worker }
59*bb4ee6a4SAndroid Build Coastguard Worker }
60*bb4ee6a4SAndroid Build Coastguard Worker
61*bb4ee6a4SAndroid Build Coastguard Worker impl Gunyah {
new_with_path(device_path: &Path) -> Result<Gunyah>62*bb4ee6a4SAndroid Build Coastguard Worker pub fn new_with_path(device_path: &Path) -> Result<Gunyah> {
63*bb4ee6a4SAndroid Build Coastguard Worker let c_path = CString::new(device_path.as_os_str().as_bytes()).unwrap();
64*bb4ee6a4SAndroid Build Coastguard Worker // SAFETY:
65*bb4ee6a4SAndroid Build Coastguard Worker // Open calls are safe because we give a nul-terminated string and verify the result.
66*bb4ee6a4SAndroid Build Coastguard Worker let ret = unsafe { open(c_path.as_ptr(), O_RDWR | O_CLOEXEC) };
67*bb4ee6a4SAndroid Build Coastguard Worker if ret < 0 {
68*bb4ee6a4SAndroid Build Coastguard Worker return errno_result();
69*bb4ee6a4SAndroid Build Coastguard Worker }
70*bb4ee6a4SAndroid Build Coastguard Worker Ok(Gunyah {
71*bb4ee6a4SAndroid Build Coastguard Worker // SAFETY:
72*bb4ee6a4SAndroid Build Coastguard Worker // Safe because we verify that ret is valid and we own the fd.
73*bb4ee6a4SAndroid Build Coastguard Worker gunyah: unsafe { SafeDescriptor::from_raw_descriptor(ret) },
74*bb4ee6a4SAndroid Build Coastguard Worker })
75*bb4ee6a4SAndroid Build Coastguard Worker }
76*bb4ee6a4SAndroid Build Coastguard Worker
new() -> Result<Gunyah>77*bb4ee6a4SAndroid Build Coastguard Worker pub fn new() -> Result<Gunyah> {
78*bb4ee6a4SAndroid Build Coastguard Worker Gunyah::new_with_path(&PathBuf::from("/dev/gunyah"))
79*bb4ee6a4SAndroid Build Coastguard Worker }
80*bb4ee6a4SAndroid Build Coastguard Worker }
81*bb4ee6a4SAndroid Build Coastguard Worker
82*bb4ee6a4SAndroid Build Coastguard Worker impl Hypervisor for Gunyah {
try_clone(&self) -> Result<Self> where Self: Sized,83*bb4ee6a4SAndroid Build Coastguard Worker fn try_clone(&self) -> Result<Self>
84*bb4ee6a4SAndroid Build Coastguard Worker where
85*bb4ee6a4SAndroid Build Coastguard Worker Self: Sized,
86*bb4ee6a4SAndroid Build Coastguard Worker {
87*bb4ee6a4SAndroid Build Coastguard Worker Ok(Gunyah {
88*bb4ee6a4SAndroid Build Coastguard Worker gunyah: self.gunyah.try_clone()?,
89*bb4ee6a4SAndroid Build Coastguard Worker })
90*bb4ee6a4SAndroid Build Coastguard Worker }
91*bb4ee6a4SAndroid Build Coastguard Worker
check_capability(&self, cap: HypervisorCap) -> bool92*bb4ee6a4SAndroid Build Coastguard Worker fn check_capability(&self, cap: HypervisorCap) -> bool {
93*bb4ee6a4SAndroid Build Coastguard Worker match cap {
94*bb4ee6a4SAndroid Build Coastguard Worker HypervisorCap::UserMemory => true,
95*bb4ee6a4SAndroid Build Coastguard Worker HypervisorCap::ArmPmuV3 => false,
96*bb4ee6a4SAndroid Build Coastguard Worker HypervisorCap::ImmediateExit => true,
97*bb4ee6a4SAndroid Build Coastguard Worker HypervisorCap::StaticSwiotlbAllocationRequired => true,
98*bb4ee6a4SAndroid Build Coastguard Worker HypervisorCap::HypervisorInitializedBootContext => true,
99*bb4ee6a4SAndroid Build Coastguard Worker HypervisorCap::S390UserSigp | HypervisorCap::TscDeadlineTimer => false,
100*bb4ee6a4SAndroid Build Coastguard Worker #[cfg(target_arch = "x86_64")]
101*bb4ee6a4SAndroid Build Coastguard Worker HypervisorCap::Xcrs | HypervisorCap::CalibratedTscLeafRequired => false,
102*bb4ee6a4SAndroid Build Coastguard Worker }
103*bb4ee6a4SAndroid Build Coastguard Worker }
104*bb4ee6a4SAndroid Build Coastguard Worker }
105*bb4ee6a4SAndroid Build Coastguard Worker
android_lend_user_memory_region( vm: &SafeDescriptor, slot: MemSlot, read_only: bool, guest_addr: u64, memory_size: u64, userspace_addr: *mut u8, ) -> Result<()>106*bb4ee6a4SAndroid Build Coastguard Worker unsafe fn android_lend_user_memory_region(
107*bb4ee6a4SAndroid Build Coastguard Worker vm: &SafeDescriptor,
108*bb4ee6a4SAndroid Build Coastguard Worker slot: MemSlot,
109*bb4ee6a4SAndroid Build Coastguard Worker read_only: bool,
110*bb4ee6a4SAndroid Build Coastguard Worker guest_addr: u64,
111*bb4ee6a4SAndroid Build Coastguard Worker memory_size: u64,
112*bb4ee6a4SAndroid Build Coastguard Worker userspace_addr: *mut u8,
113*bb4ee6a4SAndroid Build Coastguard Worker ) -> Result<()> {
114*bb4ee6a4SAndroid Build Coastguard Worker let mut flags = 0;
115*bb4ee6a4SAndroid Build Coastguard Worker
116*bb4ee6a4SAndroid Build Coastguard Worker flags |= GH_MEM_ALLOW_READ | GH_MEM_ALLOW_EXEC;
117*bb4ee6a4SAndroid Build Coastguard Worker if !read_only {
118*bb4ee6a4SAndroid Build Coastguard Worker flags |= GH_MEM_ALLOW_WRITE;
119*bb4ee6a4SAndroid Build Coastguard Worker }
120*bb4ee6a4SAndroid Build Coastguard Worker
121*bb4ee6a4SAndroid Build Coastguard Worker let region = gh_userspace_memory_region {
122*bb4ee6a4SAndroid Build Coastguard Worker label: slot,
123*bb4ee6a4SAndroid Build Coastguard Worker flags,
124*bb4ee6a4SAndroid Build Coastguard Worker guest_phys_addr: guest_addr,
125*bb4ee6a4SAndroid Build Coastguard Worker memory_size,
126*bb4ee6a4SAndroid Build Coastguard Worker userspace_addr: userspace_addr as u64,
127*bb4ee6a4SAndroid Build Coastguard Worker };
128*bb4ee6a4SAndroid Build Coastguard Worker
129*bb4ee6a4SAndroid Build Coastguard Worker let ret = ioctl_with_ref(vm, GH_VM_ANDROID_LEND_USER_MEM, ®ion);
130*bb4ee6a4SAndroid Build Coastguard Worker if ret == 0 {
131*bb4ee6a4SAndroid Build Coastguard Worker Ok(())
132*bb4ee6a4SAndroid Build Coastguard Worker } else {
133*bb4ee6a4SAndroid Build Coastguard Worker errno_result()
134*bb4ee6a4SAndroid Build Coastguard Worker }
135*bb4ee6a4SAndroid Build Coastguard Worker }
136*bb4ee6a4SAndroid Build Coastguard Worker
137*bb4ee6a4SAndroid Build Coastguard Worker // Wrapper around GH_SET_USER_MEMORY_REGION ioctl, which creates, modifies, or deletes a mapping
138*bb4ee6a4SAndroid Build Coastguard Worker // from guest physical to host user pages.
139*bb4ee6a4SAndroid Build Coastguard Worker //
140*bb4ee6a4SAndroid Build Coastguard Worker // SAFETY:
141*bb4ee6a4SAndroid Build Coastguard Worker // Safe when the guest regions are guaranteed not to overlap.
set_user_memory_region( vm: &SafeDescriptor, slot: MemSlot, read_only: bool, guest_addr: u64, memory_size: u64, userspace_addr: *mut u8, ) -> Result<()>142*bb4ee6a4SAndroid Build Coastguard Worker unsafe fn set_user_memory_region(
143*bb4ee6a4SAndroid Build Coastguard Worker vm: &SafeDescriptor,
144*bb4ee6a4SAndroid Build Coastguard Worker slot: MemSlot,
145*bb4ee6a4SAndroid Build Coastguard Worker read_only: bool,
146*bb4ee6a4SAndroid Build Coastguard Worker guest_addr: u64,
147*bb4ee6a4SAndroid Build Coastguard Worker memory_size: u64,
148*bb4ee6a4SAndroid Build Coastguard Worker userspace_addr: *mut u8,
149*bb4ee6a4SAndroid Build Coastguard Worker ) -> Result<()> {
150*bb4ee6a4SAndroid Build Coastguard Worker let mut flags = 0;
151*bb4ee6a4SAndroid Build Coastguard Worker
152*bb4ee6a4SAndroid Build Coastguard Worker flags |= GH_MEM_ALLOW_READ | GH_MEM_ALLOW_EXEC;
153*bb4ee6a4SAndroid Build Coastguard Worker if !read_only {
154*bb4ee6a4SAndroid Build Coastguard Worker flags |= GH_MEM_ALLOW_WRITE;
155*bb4ee6a4SAndroid Build Coastguard Worker }
156*bb4ee6a4SAndroid Build Coastguard Worker
157*bb4ee6a4SAndroid Build Coastguard Worker let region = gh_userspace_memory_region {
158*bb4ee6a4SAndroid Build Coastguard Worker label: slot,
159*bb4ee6a4SAndroid Build Coastguard Worker flags,
160*bb4ee6a4SAndroid Build Coastguard Worker guest_phys_addr: guest_addr,
161*bb4ee6a4SAndroid Build Coastguard Worker memory_size,
162*bb4ee6a4SAndroid Build Coastguard Worker userspace_addr: userspace_addr as u64,
163*bb4ee6a4SAndroid Build Coastguard Worker };
164*bb4ee6a4SAndroid Build Coastguard Worker
165*bb4ee6a4SAndroid Build Coastguard Worker let ret = ioctl_with_ref(vm, GH_VM_SET_USER_MEM_REGION, ®ion);
166*bb4ee6a4SAndroid Build Coastguard Worker if ret == 0 {
167*bb4ee6a4SAndroid Build Coastguard Worker Ok(())
168*bb4ee6a4SAndroid Build Coastguard Worker } else {
169*bb4ee6a4SAndroid Build Coastguard Worker errno_result()
170*bb4ee6a4SAndroid Build Coastguard Worker }
171*bb4ee6a4SAndroid Build Coastguard Worker }
172*bb4ee6a4SAndroid Build Coastguard Worker
173*bb4ee6a4SAndroid Build Coastguard Worker #[derive(PartialEq, Eq, Hash)]
174*bb4ee6a4SAndroid Build Coastguard Worker pub struct GunyahIrqRoute {
175*bb4ee6a4SAndroid Build Coastguard Worker irq: u32,
176*bb4ee6a4SAndroid Build Coastguard Worker level: bool,
177*bb4ee6a4SAndroid Build Coastguard Worker }
178*bb4ee6a4SAndroid Build Coastguard Worker
179*bb4ee6a4SAndroid Build Coastguard Worker pub struct GunyahVm {
180*bb4ee6a4SAndroid Build Coastguard Worker gh: Gunyah,
181*bb4ee6a4SAndroid Build Coastguard Worker vm: SafeDescriptor,
182*bb4ee6a4SAndroid Build Coastguard Worker guest_mem: GuestMemory,
183*bb4ee6a4SAndroid Build Coastguard Worker mem_regions: Arc<Mutex<BTreeMap<MemSlot, (Box<dyn MappedRegion>, GuestAddress)>>>,
184*bb4ee6a4SAndroid Build Coastguard Worker /// A min heap of MemSlot numbers that were used and then removed and can now be re-used
185*bb4ee6a4SAndroid Build Coastguard Worker mem_slot_gaps: Arc<Mutex<BinaryHeap<Reverse<MemSlot>>>>,
186*bb4ee6a4SAndroid Build Coastguard Worker routes: Arc<Mutex<HashSet<GunyahIrqRoute>>>,
187*bb4ee6a4SAndroid Build Coastguard Worker hv_cfg: crate::Config,
188*bb4ee6a4SAndroid Build Coastguard Worker }
189*bb4ee6a4SAndroid Build Coastguard Worker
190*bb4ee6a4SAndroid Build Coastguard Worker impl AsRawDescriptor for GunyahVm {
as_raw_descriptor(&self) -> RawDescriptor191*bb4ee6a4SAndroid Build Coastguard Worker fn as_raw_descriptor(&self) -> RawDescriptor {
192*bb4ee6a4SAndroid Build Coastguard Worker self.vm.as_raw_descriptor()
193*bb4ee6a4SAndroid Build Coastguard Worker }
194*bb4ee6a4SAndroid Build Coastguard Worker }
195*bb4ee6a4SAndroid Build Coastguard Worker
196*bb4ee6a4SAndroid Build Coastguard Worker impl GunyahVm {
new(gh: &Gunyah, guest_mem: GuestMemory, cfg: Config) -> Result<GunyahVm>197*bb4ee6a4SAndroid Build Coastguard Worker pub fn new(gh: &Gunyah, guest_mem: GuestMemory, cfg: Config) -> Result<GunyahVm> {
198*bb4ee6a4SAndroid Build Coastguard Worker // SAFETY:
199*bb4ee6a4SAndroid Build Coastguard Worker // Safe because we know gunyah is a real gunyah fd as this module is the only one that can
200*bb4ee6a4SAndroid Build Coastguard Worker // make Gunyah objects.
201*bb4ee6a4SAndroid Build Coastguard Worker let ret = unsafe { ioctl_with_val(gh, GH_CREATE_VM, 0 as c_ulong) };
202*bb4ee6a4SAndroid Build Coastguard Worker if ret < 0 {
203*bb4ee6a4SAndroid Build Coastguard Worker return errno_result();
204*bb4ee6a4SAndroid Build Coastguard Worker }
205*bb4ee6a4SAndroid Build Coastguard Worker
206*bb4ee6a4SAndroid Build Coastguard Worker // SAFETY:
207*bb4ee6a4SAndroid Build Coastguard Worker // Safe because we verify that ret is valid and we own the fd.
208*bb4ee6a4SAndroid Build Coastguard Worker let vm_descriptor = unsafe { SafeDescriptor::from_raw_descriptor(ret) };
209*bb4ee6a4SAndroid Build Coastguard Worker for region in guest_mem.regions() {
210*bb4ee6a4SAndroid Build Coastguard Worker let lend = if cfg.protection_type.isolates_memory() {
211*bb4ee6a4SAndroid Build Coastguard Worker match region.options.purpose {
212*bb4ee6a4SAndroid Build Coastguard Worker MemoryRegionPurpose::GuestMemoryRegion => true,
213*bb4ee6a4SAndroid Build Coastguard Worker #[cfg(any(target_arch = "arm", target_arch = "aarch64"))]
214*bb4ee6a4SAndroid Build Coastguard Worker MemoryRegionPurpose::ProtectedFirmwareRegion => true,
215*bb4ee6a4SAndroid Build Coastguard Worker #[cfg(any(target_arch = "arm", target_arch = "aarch64"))]
216*bb4ee6a4SAndroid Build Coastguard Worker MemoryRegionPurpose::StaticSwiotlbRegion => false,
217*bb4ee6a4SAndroid Build Coastguard Worker }
218*bb4ee6a4SAndroid Build Coastguard Worker } else {
219*bb4ee6a4SAndroid Build Coastguard Worker false
220*bb4ee6a4SAndroid Build Coastguard Worker };
221*bb4ee6a4SAndroid Build Coastguard Worker if lend {
222*bb4ee6a4SAndroid Build Coastguard Worker // SAFETY:
223*bb4ee6a4SAndroid Build Coastguard Worker // Safe because the guest regions are guarnteed not to overlap.
224*bb4ee6a4SAndroid Build Coastguard Worker unsafe {
225*bb4ee6a4SAndroid Build Coastguard Worker android_lend_user_memory_region(
226*bb4ee6a4SAndroid Build Coastguard Worker &vm_descriptor,
227*bb4ee6a4SAndroid Build Coastguard Worker region.index as MemSlot,
228*bb4ee6a4SAndroid Build Coastguard Worker false,
229*bb4ee6a4SAndroid Build Coastguard Worker region.guest_addr.offset(),
230*bb4ee6a4SAndroid Build Coastguard Worker region.size.try_into().unwrap(),
231*bb4ee6a4SAndroid Build Coastguard Worker region.host_addr as *mut u8,
232*bb4ee6a4SAndroid Build Coastguard Worker )?;
233*bb4ee6a4SAndroid Build Coastguard Worker }
234*bb4ee6a4SAndroid Build Coastguard Worker } else {
235*bb4ee6a4SAndroid Build Coastguard Worker // SAFETY:
236*bb4ee6a4SAndroid Build Coastguard Worker // Safe because the guest regions are guarnteed not to overlap.
237*bb4ee6a4SAndroid Build Coastguard Worker unsafe {
238*bb4ee6a4SAndroid Build Coastguard Worker set_user_memory_region(
239*bb4ee6a4SAndroid Build Coastguard Worker &vm_descriptor,
240*bb4ee6a4SAndroid Build Coastguard Worker region.index as MemSlot,
241*bb4ee6a4SAndroid Build Coastguard Worker false,
242*bb4ee6a4SAndroid Build Coastguard Worker region.guest_addr.offset(),
243*bb4ee6a4SAndroid Build Coastguard Worker region.size.try_into().unwrap(),
244*bb4ee6a4SAndroid Build Coastguard Worker region.host_addr as *mut u8,
245*bb4ee6a4SAndroid Build Coastguard Worker )?;
246*bb4ee6a4SAndroid Build Coastguard Worker }
247*bb4ee6a4SAndroid Build Coastguard Worker }
248*bb4ee6a4SAndroid Build Coastguard Worker }
249*bb4ee6a4SAndroid Build Coastguard Worker
250*bb4ee6a4SAndroid Build Coastguard Worker Ok(GunyahVm {
251*bb4ee6a4SAndroid Build Coastguard Worker gh: gh.try_clone()?,
252*bb4ee6a4SAndroid Build Coastguard Worker vm: vm_descriptor,
253*bb4ee6a4SAndroid Build Coastguard Worker guest_mem,
254*bb4ee6a4SAndroid Build Coastguard Worker mem_regions: Arc::new(Mutex::new(BTreeMap::new())),
255*bb4ee6a4SAndroid Build Coastguard Worker mem_slot_gaps: Arc::new(Mutex::new(BinaryHeap::new())),
256*bb4ee6a4SAndroid Build Coastguard Worker routes: Arc::new(Mutex::new(HashSet::new())),
257*bb4ee6a4SAndroid Build Coastguard Worker hv_cfg: cfg,
258*bb4ee6a4SAndroid Build Coastguard Worker })
259*bb4ee6a4SAndroid Build Coastguard Worker }
260*bb4ee6a4SAndroid Build Coastguard Worker
create_vcpu(&self, id: usize) -> Result<GunyahVcpu>261*bb4ee6a4SAndroid Build Coastguard Worker fn create_vcpu(&self, id: usize) -> Result<GunyahVcpu> {
262*bb4ee6a4SAndroid Build Coastguard Worker let gh_fn_vcpu_arg = gh_fn_vcpu_arg {
263*bb4ee6a4SAndroid Build Coastguard Worker id: id.try_into().unwrap(),
264*bb4ee6a4SAndroid Build Coastguard Worker };
265*bb4ee6a4SAndroid Build Coastguard Worker
266*bb4ee6a4SAndroid Build Coastguard Worker let function_desc = gh_fn_desc {
267*bb4ee6a4SAndroid Build Coastguard Worker type_: GH_FN_VCPU,
268*bb4ee6a4SAndroid Build Coastguard Worker arg_size: size_of::<gh_fn_vcpu_arg>() as u32,
269*bb4ee6a4SAndroid Build Coastguard Worker // Safe because kernel is expecting pointer with non-zero arg_size
270*bb4ee6a4SAndroid Build Coastguard Worker arg: &gh_fn_vcpu_arg as *const gh_fn_vcpu_arg as u64,
271*bb4ee6a4SAndroid Build Coastguard Worker };
272*bb4ee6a4SAndroid Build Coastguard Worker
273*bb4ee6a4SAndroid Build Coastguard Worker // SAFETY:
274*bb4ee6a4SAndroid Build Coastguard Worker // Safe because we know that our file is a VM fd and we verify the return result.
275*bb4ee6a4SAndroid Build Coastguard Worker let fd = unsafe { ioctl_with_ref(self, GH_VM_ADD_FUNCTION, &function_desc) };
276*bb4ee6a4SAndroid Build Coastguard Worker if fd < 0 {
277*bb4ee6a4SAndroid Build Coastguard Worker return errno_result();
278*bb4ee6a4SAndroid Build Coastguard Worker }
279*bb4ee6a4SAndroid Build Coastguard Worker
280*bb4ee6a4SAndroid Build Coastguard Worker // SAFETY:
281*bb4ee6a4SAndroid Build Coastguard Worker // Wrap the vcpu now in case the following ? returns early. This is safe because we verified
282*bb4ee6a4SAndroid Build Coastguard Worker // the value of the fd and we own the fd.
283*bb4ee6a4SAndroid Build Coastguard Worker let vcpu = unsafe { File::from_raw_descriptor(fd) };
284*bb4ee6a4SAndroid Build Coastguard Worker
285*bb4ee6a4SAndroid Build Coastguard Worker // SAFETY:
286*bb4ee6a4SAndroid Build Coastguard Worker // Safe because we know this is a Gunyah VCPU
287*bb4ee6a4SAndroid Build Coastguard Worker let res = unsafe { ioctl(&vcpu, GH_VCPU_MMAP_SIZE) };
288*bb4ee6a4SAndroid Build Coastguard Worker if res < 0 {
289*bb4ee6a4SAndroid Build Coastguard Worker return errno_result();
290*bb4ee6a4SAndroid Build Coastguard Worker }
291*bb4ee6a4SAndroid Build Coastguard Worker let run_mmap_size = res as usize;
292*bb4ee6a4SAndroid Build Coastguard Worker
293*bb4ee6a4SAndroid Build Coastguard Worker let run_mmap = MemoryMappingBuilder::new(run_mmap_size)
294*bb4ee6a4SAndroid Build Coastguard Worker .from_file(&vcpu)
295*bb4ee6a4SAndroid Build Coastguard Worker .build()
296*bb4ee6a4SAndroid Build Coastguard Worker .map_err(|_| Error::new(ENOSPC))?;
297*bb4ee6a4SAndroid Build Coastguard Worker
298*bb4ee6a4SAndroid Build Coastguard Worker Ok(GunyahVcpu {
299*bb4ee6a4SAndroid Build Coastguard Worker vm: self.vm.try_clone()?,
300*bb4ee6a4SAndroid Build Coastguard Worker vcpu,
301*bb4ee6a4SAndroid Build Coastguard Worker id,
302*bb4ee6a4SAndroid Build Coastguard Worker run_mmap: Arc::new(run_mmap),
303*bb4ee6a4SAndroid Build Coastguard Worker })
304*bb4ee6a4SAndroid Build Coastguard Worker }
305*bb4ee6a4SAndroid Build Coastguard Worker
register_irqfd(&self, label: u32, evt: &Event, level: bool) -> Result<()>306*bb4ee6a4SAndroid Build Coastguard Worker pub fn register_irqfd(&self, label: u32, evt: &Event, level: bool) -> Result<()> {
307*bb4ee6a4SAndroid Build Coastguard Worker let gh_fn_irqfd_arg = gh_fn_irqfd_arg {
308*bb4ee6a4SAndroid Build Coastguard Worker fd: evt.as_raw_descriptor() as u32,
309*bb4ee6a4SAndroid Build Coastguard Worker label,
310*bb4ee6a4SAndroid Build Coastguard Worker flags: if level { GH_IRQFD_LEVEL } else { 0 },
311*bb4ee6a4SAndroid Build Coastguard Worker ..Default::default()
312*bb4ee6a4SAndroid Build Coastguard Worker };
313*bb4ee6a4SAndroid Build Coastguard Worker
314*bb4ee6a4SAndroid Build Coastguard Worker let function_desc = gh_fn_desc {
315*bb4ee6a4SAndroid Build Coastguard Worker type_: GH_FN_IRQFD,
316*bb4ee6a4SAndroid Build Coastguard Worker arg_size: size_of::<gh_fn_irqfd_arg>() as u32,
317*bb4ee6a4SAndroid Build Coastguard Worker // SAFETY:
318*bb4ee6a4SAndroid Build Coastguard Worker // Safe because kernel is expecting pointer with non-zero arg_size
319*bb4ee6a4SAndroid Build Coastguard Worker arg: &gh_fn_irqfd_arg as *const gh_fn_irqfd_arg as u64,
320*bb4ee6a4SAndroid Build Coastguard Worker };
321*bb4ee6a4SAndroid Build Coastguard Worker
322*bb4ee6a4SAndroid Build Coastguard Worker // SAFETY: safe because the return value is checked.
323*bb4ee6a4SAndroid Build Coastguard Worker let ret = unsafe { ioctl_with_ref(self, GH_VM_ADD_FUNCTION, &function_desc) };
324*bb4ee6a4SAndroid Build Coastguard Worker if ret == 0 {
325*bb4ee6a4SAndroid Build Coastguard Worker self.routes
326*bb4ee6a4SAndroid Build Coastguard Worker .lock()
327*bb4ee6a4SAndroid Build Coastguard Worker .insert(GunyahIrqRoute { irq: label, level });
328*bb4ee6a4SAndroid Build Coastguard Worker Ok(())
329*bb4ee6a4SAndroid Build Coastguard Worker } else {
330*bb4ee6a4SAndroid Build Coastguard Worker errno_result()
331*bb4ee6a4SAndroid Build Coastguard Worker }
332*bb4ee6a4SAndroid Build Coastguard Worker }
333*bb4ee6a4SAndroid Build Coastguard Worker
unregister_irqfd(&self, label: u32, _evt: &Event) -> Result<()>334*bb4ee6a4SAndroid Build Coastguard Worker pub fn unregister_irqfd(&self, label: u32, _evt: &Event) -> Result<()> {
335*bb4ee6a4SAndroid Build Coastguard Worker let gh_fn_irqfd_arg = gh_fn_irqfd_arg {
336*bb4ee6a4SAndroid Build Coastguard Worker label,
337*bb4ee6a4SAndroid Build Coastguard Worker ..Default::default()
338*bb4ee6a4SAndroid Build Coastguard Worker };
339*bb4ee6a4SAndroid Build Coastguard Worker
340*bb4ee6a4SAndroid Build Coastguard Worker let function_desc = gh_fn_desc {
341*bb4ee6a4SAndroid Build Coastguard Worker type_: GH_FN_IRQFD,
342*bb4ee6a4SAndroid Build Coastguard Worker arg_size: size_of::<gh_fn_irqfd_arg>() as u32,
343*bb4ee6a4SAndroid Build Coastguard Worker // Safe because kernel is expecting pointer with non-zero arg_size
344*bb4ee6a4SAndroid Build Coastguard Worker arg: &gh_fn_irqfd_arg as *const gh_fn_irqfd_arg as u64,
345*bb4ee6a4SAndroid Build Coastguard Worker };
346*bb4ee6a4SAndroid Build Coastguard Worker
347*bb4ee6a4SAndroid Build Coastguard Worker // SAFETY: safe because memory is not modified and the return value is checked.
348*bb4ee6a4SAndroid Build Coastguard Worker let ret = unsafe { ioctl_with_ref(self, GH_VM_REMOVE_FUNCTION, &function_desc) };
349*bb4ee6a4SAndroid Build Coastguard Worker if ret == 0 {
350*bb4ee6a4SAndroid Build Coastguard Worker Ok(())
351*bb4ee6a4SAndroid Build Coastguard Worker } else {
352*bb4ee6a4SAndroid Build Coastguard Worker errno_result()
353*bb4ee6a4SAndroid Build Coastguard Worker }
354*bb4ee6a4SAndroid Build Coastguard Worker }
355*bb4ee6a4SAndroid Build Coastguard Worker
try_clone(&self) -> Result<Self> where Self: Sized,356*bb4ee6a4SAndroid Build Coastguard Worker pub fn try_clone(&self) -> Result<Self>
357*bb4ee6a4SAndroid Build Coastguard Worker where
358*bb4ee6a4SAndroid Build Coastguard Worker Self: Sized,
359*bb4ee6a4SAndroid Build Coastguard Worker {
360*bb4ee6a4SAndroid Build Coastguard Worker Ok(GunyahVm {
361*bb4ee6a4SAndroid Build Coastguard Worker gh: self.gh.try_clone()?,
362*bb4ee6a4SAndroid Build Coastguard Worker vm: self.vm.try_clone()?,
363*bb4ee6a4SAndroid Build Coastguard Worker guest_mem: self.guest_mem.clone(),
364*bb4ee6a4SAndroid Build Coastguard Worker mem_regions: self.mem_regions.clone(),
365*bb4ee6a4SAndroid Build Coastguard Worker mem_slot_gaps: self.mem_slot_gaps.clone(),
366*bb4ee6a4SAndroid Build Coastguard Worker routes: self.routes.clone(),
367*bb4ee6a4SAndroid Build Coastguard Worker hv_cfg: self.hv_cfg,
368*bb4ee6a4SAndroid Build Coastguard Worker })
369*bb4ee6a4SAndroid Build Coastguard Worker }
370*bb4ee6a4SAndroid Build Coastguard Worker
set_dtb_config(&self, fdt_address: GuestAddress, fdt_size: usize) -> Result<()>371*bb4ee6a4SAndroid Build Coastguard Worker fn set_dtb_config(&self, fdt_address: GuestAddress, fdt_size: usize) -> Result<()> {
372*bb4ee6a4SAndroid Build Coastguard Worker let dtb_config = gh_vm_dtb_config {
373*bb4ee6a4SAndroid Build Coastguard Worker guest_phys_addr: fdt_address.offset(),
374*bb4ee6a4SAndroid Build Coastguard Worker size: fdt_size.try_into().unwrap(),
375*bb4ee6a4SAndroid Build Coastguard Worker };
376*bb4ee6a4SAndroid Build Coastguard Worker
377*bb4ee6a4SAndroid Build Coastguard Worker // SAFETY:
378*bb4ee6a4SAndroid Build Coastguard Worker // Safe because we know this is a Gunyah VM
379*bb4ee6a4SAndroid Build Coastguard Worker let ret = unsafe { ioctl_with_ref(self, GH_VM_SET_DTB_CONFIG, &dtb_config) };
380*bb4ee6a4SAndroid Build Coastguard Worker if ret == 0 {
381*bb4ee6a4SAndroid Build Coastguard Worker Ok(())
382*bb4ee6a4SAndroid Build Coastguard Worker } else {
383*bb4ee6a4SAndroid Build Coastguard Worker errno_result()
384*bb4ee6a4SAndroid Build Coastguard Worker }
385*bb4ee6a4SAndroid Build Coastguard Worker }
386*bb4ee6a4SAndroid Build Coastguard Worker
set_protected_vm_firmware_ipa(&self, fw_addr: GuestAddress, fw_size: u64) -> Result<()>387*bb4ee6a4SAndroid Build Coastguard Worker fn set_protected_vm_firmware_ipa(&self, fw_addr: GuestAddress, fw_size: u64) -> Result<()> {
388*bb4ee6a4SAndroid Build Coastguard Worker let fw_config = gh_vm_firmware_config {
389*bb4ee6a4SAndroid Build Coastguard Worker guest_phys_addr: fw_addr.offset(),
390*bb4ee6a4SAndroid Build Coastguard Worker size: fw_size,
391*bb4ee6a4SAndroid Build Coastguard Worker };
392*bb4ee6a4SAndroid Build Coastguard Worker
393*bb4ee6a4SAndroid Build Coastguard Worker // SAFETY:
394*bb4ee6a4SAndroid Build Coastguard Worker // Safe because we know this is a Gunyah VM
395*bb4ee6a4SAndroid Build Coastguard Worker let ret = unsafe { ioctl_with_ref(self, GH_VM_ANDROID_SET_FW_CONFIG, &fw_config) };
396*bb4ee6a4SAndroid Build Coastguard Worker if ret == 0 {
397*bb4ee6a4SAndroid Build Coastguard Worker Ok(())
398*bb4ee6a4SAndroid Build Coastguard Worker } else {
399*bb4ee6a4SAndroid Build Coastguard Worker errno_result()
400*bb4ee6a4SAndroid Build Coastguard Worker }
401*bb4ee6a4SAndroid Build Coastguard Worker }
402*bb4ee6a4SAndroid Build Coastguard Worker
start(&self) -> Result<()>403*bb4ee6a4SAndroid Build Coastguard Worker fn start(&self) -> Result<()> {
404*bb4ee6a4SAndroid Build Coastguard Worker // SAFETY: safe because memory is not modified and the return value is checked.
405*bb4ee6a4SAndroid Build Coastguard Worker let ret = unsafe { ioctl(self, GH_VM_START) };
406*bb4ee6a4SAndroid Build Coastguard Worker if ret == 0 {
407*bb4ee6a4SAndroid Build Coastguard Worker Ok(())
408*bb4ee6a4SAndroid Build Coastguard Worker } else {
409*bb4ee6a4SAndroid Build Coastguard Worker errno_result()
410*bb4ee6a4SAndroid Build Coastguard Worker }
411*bb4ee6a4SAndroid Build Coastguard Worker }
412*bb4ee6a4SAndroid Build Coastguard Worker }
413*bb4ee6a4SAndroid Build Coastguard Worker
414*bb4ee6a4SAndroid Build Coastguard Worker impl Vm for GunyahVm {
try_clone(&self) -> Result<Self> where Self: Sized,415*bb4ee6a4SAndroid Build Coastguard Worker fn try_clone(&self) -> Result<Self>
416*bb4ee6a4SAndroid Build Coastguard Worker where
417*bb4ee6a4SAndroid Build Coastguard Worker Self: Sized,
418*bb4ee6a4SAndroid Build Coastguard Worker {
419*bb4ee6a4SAndroid Build Coastguard Worker Ok(GunyahVm {
420*bb4ee6a4SAndroid Build Coastguard Worker gh: self.gh.try_clone()?,
421*bb4ee6a4SAndroid Build Coastguard Worker vm: self.vm.try_clone()?,
422*bb4ee6a4SAndroid Build Coastguard Worker guest_mem: self.guest_mem.clone(),
423*bb4ee6a4SAndroid Build Coastguard Worker mem_regions: self.mem_regions.clone(),
424*bb4ee6a4SAndroid Build Coastguard Worker mem_slot_gaps: self.mem_slot_gaps.clone(),
425*bb4ee6a4SAndroid Build Coastguard Worker routes: self.routes.clone(),
426*bb4ee6a4SAndroid Build Coastguard Worker hv_cfg: self.hv_cfg,
427*bb4ee6a4SAndroid Build Coastguard Worker })
428*bb4ee6a4SAndroid Build Coastguard Worker }
429*bb4ee6a4SAndroid Build Coastguard Worker
check_capability(&self, c: VmCap) -> bool430*bb4ee6a4SAndroid Build Coastguard Worker fn check_capability(&self, c: VmCap) -> bool {
431*bb4ee6a4SAndroid Build Coastguard Worker match c {
432*bb4ee6a4SAndroid Build Coastguard Worker VmCap::DirtyLog => false,
433*bb4ee6a4SAndroid Build Coastguard Worker // Strictly speaking, Gunyah supports pvclock, but Gunyah takes care
434*bb4ee6a4SAndroid Build Coastguard Worker // of it and crosvm doesn't need to do anything for it
435*bb4ee6a4SAndroid Build Coastguard Worker VmCap::PvClock => false,
436*bb4ee6a4SAndroid Build Coastguard Worker VmCap::Protected => true,
437*bb4ee6a4SAndroid Build Coastguard Worker VmCap::EarlyInitCpuid => false,
438*bb4ee6a4SAndroid Build Coastguard Worker #[cfg(target_arch = "x86_64")]
439*bb4ee6a4SAndroid Build Coastguard Worker VmCap::BusLockDetect => false,
440*bb4ee6a4SAndroid Build Coastguard Worker VmCap::ReadOnlyMemoryRegion => false,
441*bb4ee6a4SAndroid Build Coastguard Worker VmCap::MemNoncoherentDma => false,
442*bb4ee6a4SAndroid Build Coastguard Worker }
443*bb4ee6a4SAndroid Build Coastguard Worker }
444*bb4ee6a4SAndroid Build Coastguard Worker
get_guest_phys_addr_bits(&self) -> u8445*bb4ee6a4SAndroid Build Coastguard Worker fn get_guest_phys_addr_bits(&self) -> u8 {
446*bb4ee6a4SAndroid Build Coastguard Worker 40
447*bb4ee6a4SAndroid Build Coastguard Worker }
448*bb4ee6a4SAndroid Build Coastguard Worker
get_memory(&self) -> &GuestMemory449*bb4ee6a4SAndroid Build Coastguard Worker fn get_memory(&self) -> &GuestMemory {
450*bb4ee6a4SAndroid Build Coastguard Worker &self.guest_mem
451*bb4ee6a4SAndroid Build Coastguard Worker }
452*bb4ee6a4SAndroid Build Coastguard Worker
add_memory_region( &mut self, guest_addr: GuestAddress, mem_region: Box<dyn MappedRegion>, read_only: bool, _log_dirty_pages: bool, _cache: MemCacheType, ) -> Result<MemSlot>453*bb4ee6a4SAndroid Build Coastguard Worker fn add_memory_region(
454*bb4ee6a4SAndroid Build Coastguard Worker &mut self,
455*bb4ee6a4SAndroid Build Coastguard Worker guest_addr: GuestAddress,
456*bb4ee6a4SAndroid Build Coastguard Worker mem_region: Box<dyn MappedRegion>,
457*bb4ee6a4SAndroid Build Coastguard Worker read_only: bool,
458*bb4ee6a4SAndroid Build Coastguard Worker _log_dirty_pages: bool,
459*bb4ee6a4SAndroid Build Coastguard Worker _cache: MemCacheType,
460*bb4ee6a4SAndroid Build Coastguard Worker ) -> Result<MemSlot> {
461*bb4ee6a4SAndroid Build Coastguard Worker let pgsz = pagesize() as u64;
462*bb4ee6a4SAndroid Build Coastguard Worker // Gunyah require to set the user memory region with page size aligned size. Safe to extend
463*bb4ee6a4SAndroid Build Coastguard Worker // the mem.size() to be page size aligned because the mmap will round up the size to be
464*bb4ee6a4SAndroid Build Coastguard Worker // page size aligned if it is not.
465*bb4ee6a4SAndroid Build Coastguard Worker let size = (mem_region.size() as u64 + pgsz - 1) / pgsz * pgsz;
466*bb4ee6a4SAndroid Build Coastguard Worker let end_addr = guest_addr.checked_add(size).ok_or(Error::new(EOVERFLOW))?;
467*bb4ee6a4SAndroid Build Coastguard Worker
468*bb4ee6a4SAndroid Build Coastguard Worker if self.guest_mem.range_overlap(guest_addr, end_addr) {
469*bb4ee6a4SAndroid Build Coastguard Worker return Err(Error::new(ENOSPC));
470*bb4ee6a4SAndroid Build Coastguard Worker }
471*bb4ee6a4SAndroid Build Coastguard Worker
472*bb4ee6a4SAndroid Build Coastguard Worker let mut regions = self.mem_regions.lock();
473*bb4ee6a4SAndroid Build Coastguard Worker let mut gaps = self.mem_slot_gaps.lock();
474*bb4ee6a4SAndroid Build Coastguard Worker let slot = match gaps.pop() {
475*bb4ee6a4SAndroid Build Coastguard Worker Some(gap) => gap.0,
476*bb4ee6a4SAndroid Build Coastguard Worker None => (regions.len() + self.guest_mem.num_regions() as usize) as MemSlot,
477*bb4ee6a4SAndroid Build Coastguard Worker };
478*bb4ee6a4SAndroid Build Coastguard Worker
479*bb4ee6a4SAndroid Build Coastguard Worker // SAFETY: safe because memory is not modified and the return value is checked.
480*bb4ee6a4SAndroid Build Coastguard Worker let res = unsafe {
481*bb4ee6a4SAndroid Build Coastguard Worker set_user_memory_region(
482*bb4ee6a4SAndroid Build Coastguard Worker &self.vm,
483*bb4ee6a4SAndroid Build Coastguard Worker slot,
484*bb4ee6a4SAndroid Build Coastguard Worker read_only,
485*bb4ee6a4SAndroid Build Coastguard Worker guest_addr.offset(),
486*bb4ee6a4SAndroid Build Coastguard Worker size,
487*bb4ee6a4SAndroid Build Coastguard Worker mem_region.as_ptr(),
488*bb4ee6a4SAndroid Build Coastguard Worker )
489*bb4ee6a4SAndroid Build Coastguard Worker };
490*bb4ee6a4SAndroid Build Coastguard Worker
491*bb4ee6a4SAndroid Build Coastguard Worker if let Err(e) = res {
492*bb4ee6a4SAndroid Build Coastguard Worker gaps.push(Reverse(slot));
493*bb4ee6a4SAndroid Build Coastguard Worker return Err(e);
494*bb4ee6a4SAndroid Build Coastguard Worker }
495*bb4ee6a4SAndroid Build Coastguard Worker regions.insert(slot, (mem_region, guest_addr));
496*bb4ee6a4SAndroid Build Coastguard Worker Ok(slot)
497*bb4ee6a4SAndroid Build Coastguard Worker }
498*bb4ee6a4SAndroid Build Coastguard Worker
msync_memory_region(&mut self, slot: MemSlot, offset: usize, size: usize) -> Result<()>499*bb4ee6a4SAndroid Build Coastguard Worker fn msync_memory_region(&mut self, slot: MemSlot, offset: usize, size: usize) -> Result<()> {
500*bb4ee6a4SAndroid Build Coastguard Worker let mut regions = self.mem_regions.lock();
501*bb4ee6a4SAndroid Build Coastguard Worker let (mem, _) = regions.get_mut(&slot).ok_or_else(|| Error::new(ENOENT))?;
502*bb4ee6a4SAndroid Build Coastguard Worker
503*bb4ee6a4SAndroid Build Coastguard Worker mem.msync(offset, size).map_err(|err| match err {
504*bb4ee6a4SAndroid Build Coastguard Worker MmapError::InvalidAddress => Error::new(EFAULT),
505*bb4ee6a4SAndroid Build Coastguard Worker MmapError::NotPageAligned => Error::new(EINVAL),
506*bb4ee6a4SAndroid Build Coastguard Worker MmapError::SystemCallFailed(e) => e,
507*bb4ee6a4SAndroid Build Coastguard Worker _ => Error::new(EIO),
508*bb4ee6a4SAndroid Build Coastguard Worker })
509*bb4ee6a4SAndroid Build Coastguard Worker }
510*bb4ee6a4SAndroid Build Coastguard Worker
madvise_pageout_memory_region( &mut self, _slot: MemSlot, _offset: usize, _size: usize, ) -> Result<()>511*bb4ee6a4SAndroid Build Coastguard Worker fn madvise_pageout_memory_region(
512*bb4ee6a4SAndroid Build Coastguard Worker &mut self,
513*bb4ee6a4SAndroid Build Coastguard Worker _slot: MemSlot,
514*bb4ee6a4SAndroid Build Coastguard Worker _offset: usize,
515*bb4ee6a4SAndroid Build Coastguard Worker _size: usize,
516*bb4ee6a4SAndroid Build Coastguard Worker ) -> Result<()> {
517*bb4ee6a4SAndroid Build Coastguard Worker Err(Error::new(ENOTSUP))
518*bb4ee6a4SAndroid Build Coastguard Worker }
519*bb4ee6a4SAndroid Build Coastguard Worker
madvise_remove_memory_region( &mut self, _slot: MemSlot, _offset: usize, _size: usize, ) -> Result<()>520*bb4ee6a4SAndroid Build Coastguard Worker fn madvise_remove_memory_region(
521*bb4ee6a4SAndroid Build Coastguard Worker &mut self,
522*bb4ee6a4SAndroid Build Coastguard Worker _slot: MemSlot,
523*bb4ee6a4SAndroid Build Coastguard Worker _offset: usize,
524*bb4ee6a4SAndroid Build Coastguard Worker _size: usize,
525*bb4ee6a4SAndroid Build Coastguard Worker ) -> Result<()> {
526*bb4ee6a4SAndroid Build Coastguard Worker Err(Error::new(ENOTSUP))
527*bb4ee6a4SAndroid Build Coastguard Worker }
528*bb4ee6a4SAndroid Build Coastguard Worker
remove_memory_region(&mut self, _slot: MemSlot) -> Result<Box<dyn MappedRegion>>529*bb4ee6a4SAndroid Build Coastguard Worker fn remove_memory_region(&mut self, _slot: MemSlot) -> Result<Box<dyn MappedRegion>> {
530*bb4ee6a4SAndroid Build Coastguard Worker unimplemented!()
531*bb4ee6a4SAndroid Build Coastguard Worker }
532*bb4ee6a4SAndroid Build Coastguard Worker
create_device(&self, _kind: DeviceKind) -> Result<SafeDescriptor>533*bb4ee6a4SAndroid Build Coastguard Worker fn create_device(&self, _kind: DeviceKind) -> Result<SafeDescriptor> {
534*bb4ee6a4SAndroid Build Coastguard Worker unimplemented!()
535*bb4ee6a4SAndroid Build Coastguard Worker }
536*bb4ee6a4SAndroid Build Coastguard Worker
get_dirty_log(&self, _slot: MemSlot, _dirty_log: &mut [u8]) -> Result<()>537*bb4ee6a4SAndroid Build Coastguard Worker fn get_dirty_log(&self, _slot: MemSlot, _dirty_log: &mut [u8]) -> Result<()> {
538*bb4ee6a4SAndroid Build Coastguard Worker unimplemented!()
539*bb4ee6a4SAndroid Build Coastguard Worker }
540*bb4ee6a4SAndroid Build Coastguard Worker
register_ioevent( &mut self, evt: &Event, addr: IoEventAddress, datamatch: Datamatch, ) -> Result<()>541*bb4ee6a4SAndroid Build Coastguard Worker fn register_ioevent(
542*bb4ee6a4SAndroid Build Coastguard Worker &mut self,
543*bb4ee6a4SAndroid Build Coastguard Worker evt: &Event,
544*bb4ee6a4SAndroid Build Coastguard Worker addr: IoEventAddress,
545*bb4ee6a4SAndroid Build Coastguard Worker datamatch: Datamatch,
546*bb4ee6a4SAndroid Build Coastguard Worker ) -> Result<()> {
547*bb4ee6a4SAndroid Build Coastguard Worker let (do_datamatch, datamatch_value, datamatch_len) = match datamatch {
548*bb4ee6a4SAndroid Build Coastguard Worker Datamatch::AnyLength => (false, 0, 0),
549*bb4ee6a4SAndroid Build Coastguard Worker Datamatch::U8(v) => match v {
550*bb4ee6a4SAndroid Build Coastguard Worker Some(u) => (true, u as u64, 1),
551*bb4ee6a4SAndroid Build Coastguard Worker None => (false, 0, 1),
552*bb4ee6a4SAndroid Build Coastguard Worker },
553*bb4ee6a4SAndroid Build Coastguard Worker Datamatch::U16(v) => match v {
554*bb4ee6a4SAndroid Build Coastguard Worker Some(u) => (true, u as u64, 2),
555*bb4ee6a4SAndroid Build Coastguard Worker None => (false, 0, 2),
556*bb4ee6a4SAndroid Build Coastguard Worker },
557*bb4ee6a4SAndroid Build Coastguard Worker Datamatch::U32(v) => match v {
558*bb4ee6a4SAndroid Build Coastguard Worker Some(u) => (true, u as u64, 4),
559*bb4ee6a4SAndroid Build Coastguard Worker None => (false, 0, 4),
560*bb4ee6a4SAndroid Build Coastguard Worker },
561*bb4ee6a4SAndroid Build Coastguard Worker Datamatch::U64(v) => match v {
562*bb4ee6a4SAndroid Build Coastguard Worker Some(u) => (true, u, 8),
563*bb4ee6a4SAndroid Build Coastguard Worker None => (false, 0, 8),
564*bb4ee6a4SAndroid Build Coastguard Worker },
565*bb4ee6a4SAndroid Build Coastguard Worker };
566*bb4ee6a4SAndroid Build Coastguard Worker
567*bb4ee6a4SAndroid Build Coastguard Worker let mut flags = 0;
568*bb4ee6a4SAndroid Build Coastguard Worker if do_datamatch {
569*bb4ee6a4SAndroid Build Coastguard Worker flags |= 1 << GH_IOEVENTFD_DATAMATCH;
570*bb4ee6a4SAndroid Build Coastguard Worker }
571*bb4ee6a4SAndroid Build Coastguard Worker
572*bb4ee6a4SAndroid Build Coastguard Worker let maddr = if let IoEventAddress::Mmio(maddr) = addr {
573*bb4ee6a4SAndroid Build Coastguard Worker maddr
574*bb4ee6a4SAndroid Build Coastguard Worker } else {
575*bb4ee6a4SAndroid Build Coastguard Worker todo!()
576*bb4ee6a4SAndroid Build Coastguard Worker };
577*bb4ee6a4SAndroid Build Coastguard Worker
578*bb4ee6a4SAndroid Build Coastguard Worker let gh_fn_ioeventfd_arg = gh_fn_ioeventfd_arg {
579*bb4ee6a4SAndroid Build Coastguard Worker fd: evt.as_raw_descriptor(),
580*bb4ee6a4SAndroid Build Coastguard Worker datamatch: datamatch_value,
581*bb4ee6a4SAndroid Build Coastguard Worker len: datamatch_len,
582*bb4ee6a4SAndroid Build Coastguard Worker addr: maddr,
583*bb4ee6a4SAndroid Build Coastguard Worker flags,
584*bb4ee6a4SAndroid Build Coastguard Worker ..Default::default()
585*bb4ee6a4SAndroid Build Coastguard Worker };
586*bb4ee6a4SAndroid Build Coastguard Worker
587*bb4ee6a4SAndroid Build Coastguard Worker let function_desc = gh_fn_desc {
588*bb4ee6a4SAndroid Build Coastguard Worker type_: GH_FN_IOEVENTFD,
589*bb4ee6a4SAndroid Build Coastguard Worker arg_size: size_of::<gh_fn_ioeventfd_arg>() as u32,
590*bb4ee6a4SAndroid Build Coastguard Worker arg: &gh_fn_ioeventfd_arg as *const gh_fn_ioeventfd_arg as u64,
591*bb4ee6a4SAndroid Build Coastguard Worker };
592*bb4ee6a4SAndroid Build Coastguard Worker
593*bb4ee6a4SAndroid Build Coastguard Worker // SAFETY: safe because memory is not modified and the return value is checked.
594*bb4ee6a4SAndroid Build Coastguard Worker let ret = unsafe { ioctl_with_ref(self, GH_VM_ADD_FUNCTION, &function_desc) };
595*bb4ee6a4SAndroid Build Coastguard Worker if ret == 0 {
596*bb4ee6a4SAndroid Build Coastguard Worker Ok(())
597*bb4ee6a4SAndroid Build Coastguard Worker } else {
598*bb4ee6a4SAndroid Build Coastguard Worker errno_result()
599*bb4ee6a4SAndroid Build Coastguard Worker }
600*bb4ee6a4SAndroid Build Coastguard Worker }
601*bb4ee6a4SAndroid Build Coastguard Worker
unregister_ioevent( &mut self, _evt: &Event, addr: IoEventAddress, _datamatch: Datamatch, ) -> Result<()>602*bb4ee6a4SAndroid Build Coastguard Worker fn unregister_ioevent(
603*bb4ee6a4SAndroid Build Coastguard Worker &mut self,
604*bb4ee6a4SAndroid Build Coastguard Worker _evt: &Event,
605*bb4ee6a4SAndroid Build Coastguard Worker addr: IoEventAddress,
606*bb4ee6a4SAndroid Build Coastguard Worker _datamatch: Datamatch,
607*bb4ee6a4SAndroid Build Coastguard Worker ) -> Result<()> {
608*bb4ee6a4SAndroid Build Coastguard Worker let maddr = if let IoEventAddress::Mmio(maddr) = addr {
609*bb4ee6a4SAndroid Build Coastguard Worker maddr
610*bb4ee6a4SAndroid Build Coastguard Worker } else {
611*bb4ee6a4SAndroid Build Coastguard Worker todo!()
612*bb4ee6a4SAndroid Build Coastguard Worker };
613*bb4ee6a4SAndroid Build Coastguard Worker
614*bb4ee6a4SAndroid Build Coastguard Worker let gh_fn_ioeventfd_arg = gh_fn_ioeventfd_arg {
615*bb4ee6a4SAndroid Build Coastguard Worker addr: maddr,
616*bb4ee6a4SAndroid Build Coastguard Worker ..Default::default()
617*bb4ee6a4SAndroid Build Coastguard Worker };
618*bb4ee6a4SAndroid Build Coastguard Worker
619*bb4ee6a4SAndroid Build Coastguard Worker let function_desc = gh_fn_desc {
620*bb4ee6a4SAndroid Build Coastguard Worker type_: GH_FN_IOEVENTFD,
621*bb4ee6a4SAndroid Build Coastguard Worker arg_size: size_of::<gh_fn_ioeventfd_arg>() as u32,
622*bb4ee6a4SAndroid Build Coastguard Worker arg: &gh_fn_ioeventfd_arg as *const gh_fn_ioeventfd_arg as u64,
623*bb4ee6a4SAndroid Build Coastguard Worker };
624*bb4ee6a4SAndroid Build Coastguard Worker
625*bb4ee6a4SAndroid Build Coastguard Worker // SAFETY: safe because memory is not modified and the return value is checked.
626*bb4ee6a4SAndroid Build Coastguard Worker let ret = unsafe { ioctl_with_ref(self, GH_VM_REMOVE_FUNCTION, &function_desc) };
627*bb4ee6a4SAndroid Build Coastguard Worker if ret == 0 {
628*bb4ee6a4SAndroid Build Coastguard Worker Ok(())
629*bb4ee6a4SAndroid Build Coastguard Worker } else {
630*bb4ee6a4SAndroid Build Coastguard Worker errno_result()
631*bb4ee6a4SAndroid Build Coastguard Worker }
632*bb4ee6a4SAndroid Build Coastguard Worker }
633*bb4ee6a4SAndroid Build Coastguard Worker
handle_io_events(&self, _addr: IoEventAddress, _data: &[u8]) -> Result<()>634*bb4ee6a4SAndroid Build Coastguard Worker fn handle_io_events(&self, _addr: IoEventAddress, _data: &[u8]) -> Result<()> {
635*bb4ee6a4SAndroid Build Coastguard Worker Ok(())
636*bb4ee6a4SAndroid Build Coastguard Worker }
637*bb4ee6a4SAndroid Build Coastguard Worker
get_pvclock(&self) -> Result<ClockState>638*bb4ee6a4SAndroid Build Coastguard Worker fn get_pvclock(&self) -> Result<ClockState> {
639*bb4ee6a4SAndroid Build Coastguard Worker unimplemented!()
640*bb4ee6a4SAndroid Build Coastguard Worker }
641*bb4ee6a4SAndroid Build Coastguard Worker
set_pvclock(&self, _state: &ClockState) -> Result<()>642*bb4ee6a4SAndroid Build Coastguard Worker fn set_pvclock(&self, _state: &ClockState) -> Result<()> {
643*bb4ee6a4SAndroid Build Coastguard Worker unimplemented!()
644*bb4ee6a4SAndroid Build Coastguard Worker }
645*bb4ee6a4SAndroid Build Coastguard Worker
add_fd_mapping( &mut self, slot: u32, offset: usize, size: usize, fd: &dyn AsRawDescriptor, fd_offset: u64, prot: Protection, ) -> Result<()>646*bb4ee6a4SAndroid Build Coastguard Worker fn add_fd_mapping(
647*bb4ee6a4SAndroid Build Coastguard Worker &mut self,
648*bb4ee6a4SAndroid Build Coastguard Worker slot: u32,
649*bb4ee6a4SAndroid Build Coastguard Worker offset: usize,
650*bb4ee6a4SAndroid Build Coastguard Worker size: usize,
651*bb4ee6a4SAndroid Build Coastguard Worker fd: &dyn AsRawDescriptor,
652*bb4ee6a4SAndroid Build Coastguard Worker fd_offset: u64,
653*bb4ee6a4SAndroid Build Coastguard Worker prot: Protection,
654*bb4ee6a4SAndroid Build Coastguard Worker ) -> Result<()> {
655*bb4ee6a4SAndroid Build Coastguard Worker let mut regions = self.mem_regions.lock();
656*bb4ee6a4SAndroid Build Coastguard Worker let (region, _) = regions.get_mut(&slot).ok_or_else(|| Error::new(EINVAL))?;
657*bb4ee6a4SAndroid Build Coastguard Worker
658*bb4ee6a4SAndroid Build Coastguard Worker match region.add_fd_mapping(offset, size, fd, fd_offset, prot) {
659*bb4ee6a4SAndroid Build Coastguard Worker Ok(()) => Ok(()),
660*bb4ee6a4SAndroid Build Coastguard Worker Err(MmapError::SystemCallFailed(e)) => Err(e),
661*bb4ee6a4SAndroid Build Coastguard Worker Err(_) => Err(Error::new(EIO)),
662*bb4ee6a4SAndroid Build Coastguard Worker }
663*bb4ee6a4SAndroid Build Coastguard Worker }
664*bb4ee6a4SAndroid Build Coastguard Worker
remove_mapping(&mut self, slot: u32, offset: usize, size: usize) -> Result<()>665*bb4ee6a4SAndroid Build Coastguard Worker fn remove_mapping(&mut self, slot: u32, offset: usize, size: usize) -> Result<()> {
666*bb4ee6a4SAndroid Build Coastguard Worker let mut regions = self.mem_regions.lock();
667*bb4ee6a4SAndroid Build Coastguard Worker let (region, _) = regions.get_mut(&slot).ok_or_else(|| Error::new(EINVAL))?;
668*bb4ee6a4SAndroid Build Coastguard Worker
669*bb4ee6a4SAndroid Build Coastguard Worker match region.remove_mapping(offset, size) {
670*bb4ee6a4SAndroid Build Coastguard Worker Ok(()) => Ok(()),
671*bb4ee6a4SAndroid Build Coastguard Worker Err(MmapError::SystemCallFailed(e)) => Err(e),
672*bb4ee6a4SAndroid Build Coastguard Worker Err(_) => Err(Error::new(EIO)),
673*bb4ee6a4SAndroid Build Coastguard Worker }
674*bb4ee6a4SAndroid Build Coastguard Worker }
675*bb4ee6a4SAndroid Build Coastguard Worker
handle_balloon_event(&mut self, _event: BalloonEvent) -> Result<()>676*bb4ee6a4SAndroid Build Coastguard Worker fn handle_balloon_event(&mut self, _event: BalloonEvent) -> Result<()> {
677*bb4ee6a4SAndroid Build Coastguard Worker unimplemented!()
678*bb4ee6a4SAndroid Build Coastguard Worker }
679*bb4ee6a4SAndroid Build Coastguard Worker }
680*bb4ee6a4SAndroid Build Coastguard Worker
681*bb4ee6a4SAndroid Build Coastguard Worker const GH_RM_EXIT_TYPE_VM_EXIT: u16 = 0;
682*bb4ee6a4SAndroid Build Coastguard Worker const GH_RM_EXIT_TYPE_PSCI_POWER_OFF: u16 = 1;
683*bb4ee6a4SAndroid Build Coastguard Worker const GH_RM_EXIT_TYPE_PSCI_SYSTEM_RESET: u16 = 2;
684*bb4ee6a4SAndroid Build Coastguard Worker const GH_RM_EXIT_TYPE_PSCI_SYSTEM_RESET2: u16 = 3;
685*bb4ee6a4SAndroid Build Coastguard Worker const GH_RM_EXIT_TYPE_WDT_BITE: u16 = 4;
686*bb4ee6a4SAndroid Build Coastguard Worker const GH_RM_EXIT_TYPE_HYP_ERROR: u16 = 5;
687*bb4ee6a4SAndroid Build Coastguard Worker const GH_RM_EXIT_TYPE_ASYNC_EXT_ABORT: u16 = 6;
688*bb4ee6a4SAndroid Build Coastguard Worker const GH_RM_EXIT_TYPE_VM_FORCE_STOPPED: u16 = 7;
689*bb4ee6a4SAndroid Build Coastguard Worker
690*bb4ee6a4SAndroid Build Coastguard Worker pub struct GunyahVcpu {
691*bb4ee6a4SAndroid Build Coastguard Worker vm: SafeDescriptor,
692*bb4ee6a4SAndroid Build Coastguard Worker vcpu: File,
693*bb4ee6a4SAndroid Build Coastguard Worker id: usize,
694*bb4ee6a4SAndroid Build Coastguard Worker run_mmap: Arc<MemoryMapping>,
695*bb4ee6a4SAndroid Build Coastguard Worker }
696*bb4ee6a4SAndroid Build Coastguard Worker
697*bb4ee6a4SAndroid Build Coastguard Worker struct GunyahVcpuSignalHandle {
698*bb4ee6a4SAndroid Build Coastguard Worker run_mmap: Arc<MemoryMapping>,
699*bb4ee6a4SAndroid Build Coastguard Worker }
700*bb4ee6a4SAndroid Build Coastguard Worker
701*bb4ee6a4SAndroid Build Coastguard Worker impl VcpuSignalHandleInner for GunyahVcpuSignalHandle {
signal_immediate_exit(&self)702*bb4ee6a4SAndroid Build Coastguard Worker fn signal_immediate_exit(&self) {
703*bb4ee6a4SAndroid Build Coastguard Worker // SAFETY: we ensure `run_mmap` is a valid mapping of `kvm_run` at creation time, and the
704*bb4ee6a4SAndroid Build Coastguard Worker // `Arc` ensures the mapping still exists while we hold a reference to it.
705*bb4ee6a4SAndroid Build Coastguard Worker unsafe {
706*bb4ee6a4SAndroid Build Coastguard Worker let run = self.run_mmap.as_ptr() as *mut gh_vcpu_run;
707*bb4ee6a4SAndroid Build Coastguard Worker (*run).immediate_exit = 1;
708*bb4ee6a4SAndroid Build Coastguard Worker }
709*bb4ee6a4SAndroid Build Coastguard Worker }
710*bb4ee6a4SAndroid Build Coastguard Worker }
711*bb4ee6a4SAndroid Build Coastguard Worker
712*bb4ee6a4SAndroid Build Coastguard Worker impl AsRawDescriptor for GunyahVcpu {
as_raw_descriptor(&self) -> RawDescriptor713*bb4ee6a4SAndroid Build Coastguard Worker fn as_raw_descriptor(&self) -> RawDescriptor {
714*bb4ee6a4SAndroid Build Coastguard Worker self.vcpu.as_raw_descriptor()
715*bb4ee6a4SAndroid Build Coastguard Worker }
716*bb4ee6a4SAndroid Build Coastguard Worker }
717*bb4ee6a4SAndroid Build Coastguard Worker
718*bb4ee6a4SAndroid Build Coastguard Worker impl Vcpu for GunyahVcpu {
try_clone(&self) -> Result<Self> where Self: Sized,719*bb4ee6a4SAndroid Build Coastguard Worker fn try_clone(&self) -> Result<Self>
720*bb4ee6a4SAndroid Build Coastguard Worker where
721*bb4ee6a4SAndroid Build Coastguard Worker Self: Sized,
722*bb4ee6a4SAndroid Build Coastguard Worker {
723*bb4ee6a4SAndroid Build Coastguard Worker let vcpu = self.vcpu.try_clone()?;
724*bb4ee6a4SAndroid Build Coastguard Worker
725*bb4ee6a4SAndroid Build Coastguard Worker Ok(GunyahVcpu {
726*bb4ee6a4SAndroid Build Coastguard Worker vm: self.vm.try_clone()?,
727*bb4ee6a4SAndroid Build Coastguard Worker vcpu,
728*bb4ee6a4SAndroid Build Coastguard Worker id: self.id,
729*bb4ee6a4SAndroid Build Coastguard Worker run_mmap: self.run_mmap.clone(),
730*bb4ee6a4SAndroid Build Coastguard Worker })
731*bb4ee6a4SAndroid Build Coastguard Worker }
732*bb4ee6a4SAndroid Build Coastguard Worker
as_vcpu(&self) -> &dyn Vcpu733*bb4ee6a4SAndroid Build Coastguard Worker fn as_vcpu(&self) -> &dyn Vcpu {
734*bb4ee6a4SAndroid Build Coastguard Worker self
735*bb4ee6a4SAndroid Build Coastguard Worker }
736*bb4ee6a4SAndroid Build Coastguard Worker
run(&mut self) -> Result<VcpuExit>737*bb4ee6a4SAndroid Build Coastguard Worker fn run(&mut self) -> Result<VcpuExit> {
738*bb4ee6a4SAndroid Build Coastguard Worker // SAFETY:
739*bb4ee6a4SAndroid Build Coastguard Worker // Safe because we know our file is a VCPU fd and we verify the return result.
740*bb4ee6a4SAndroid Build Coastguard Worker let ret = unsafe { ioctl(self, GH_VCPU_RUN) };
741*bb4ee6a4SAndroid Build Coastguard Worker if ret != 0 {
742*bb4ee6a4SAndroid Build Coastguard Worker return errno_result();
743*bb4ee6a4SAndroid Build Coastguard Worker }
744*bb4ee6a4SAndroid Build Coastguard Worker
745*bb4ee6a4SAndroid Build Coastguard Worker // SAFETY:
746*bb4ee6a4SAndroid Build Coastguard Worker // Safe because we know we mapped enough memory to hold the gh_vcpu_run struct
747*bb4ee6a4SAndroid Build Coastguard Worker // because the kernel told us how large it is.
748*bb4ee6a4SAndroid Build Coastguard Worker let run = unsafe { &mut *(self.run_mmap.as_ptr() as *mut gh_vcpu_run) };
749*bb4ee6a4SAndroid Build Coastguard Worker match run.exit_reason {
750*bb4ee6a4SAndroid Build Coastguard Worker GH_VCPU_EXIT_MMIO => Ok(VcpuExit::Mmio),
751*bb4ee6a4SAndroid Build Coastguard Worker GH_VCPU_EXIT_STATUS => {
752*bb4ee6a4SAndroid Build Coastguard Worker // SAFETY:
753*bb4ee6a4SAndroid Build Coastguard Worker // Safe because the exit_reason (which comes from the kernel) told us which
754*bb4ee6a4SAndroid Build Coastguard Worker // union field to use.
755*bb4ee6a4SAndroid Build Coastguard Worker let status = unsafe { &mut run.__bindgen_anon_1.status };
756*bb4ee6a4SAndroid Build Coastguard Worker match status.status {
757*bb4ee6a4SAndroid Build Coastguard Worker GH_VM_STATUS_GH_VM_STATUS_LOAD_FAILED => Ok(VcpuExit::FailEntry {
758*bb4ee6a4SAndroid Build Coastguard Worker hardware_entry_failure_reason: 0,
759*bb4ee6a4SAndroid Build Coastguard Worker }),
760*bb4ee6a4SAndroid Build Coastguard Worker GH_VM_STATUS_GH_VM_STATUS_CRASHED => Ok(VcpuExit::SystemEventCrash),
761*bb4ee6a4SAndroid Build Coastguard Worker GH_VM_STATUS_GH_VM_STATUS_EXITED => {
762*bb4ee6a4SAndroid Build Coastguard Worker info!("exit type {}", status.exit_info.type_);
763*bb4ee6a4SAndroid Build Coastguard Worker match status.exit_info.type_ {
764*bb4ee6a4SAndroid Build Coastguard Worker GH_RM_EXIT_TYPE_VM_EXIT => Ok(VcpuExit::SystemEventShutdown),
765*bb4ee6a4SAndroid Build Coastguard Worker GH_RM_EXIT_TYPE_PSCI_POWER_OFF => Ok(VcpuExit::SystemEventShutdown),
766*bb4ee6a4SAndroid Build Coastguard Worker GH_RM_EXIT_TYPE_PSCI_SYSTEM_RESET => Ok(VcpuExit::SystemEventReset),
767*bb4ee6a4SAndroid Build Coastguard Worker GH_RM_EXIT_TYPE_PSCI_SYSTEM_RESET2 => Ok(VcpuExit::SystemEventReset),
768*bb4ee6a4SAndroid Build Coastguard Worker GH_RM_EXIT_TYPE_WDT_BITE => Ok(VcpuExit::SystemEventCrash),
769*bb4ee6a4SAndroid Build Coastguard Worker GH_RM_EXIT_TYPE_HYP_ERROR => Ok(VcpuExit::SystemEventCrash),
770*bb4ee6a4SAndroid Build Coastguard Worker GH_RM_EXIT_TYPE_ASYNC_EXT_ABORT => Ok(VcpuExit::SystemEventCrash),
771*bb4ee6a4SAndroid Build Coastguard Worker GH_RM_EXIT_TYPE_VM_FORCE_STOPPED => Ok(VcpuExit::SystemEventShutdown),
772*bb4ee6a4SAndroid Build Coastguard Worker r => {
773*bb4ee6a4SAndroid Build Coastguard Worker warn!("Unknown exit type: {}", r);
774*bb4ee6a4SAndroid Build Coastguard Worker Err(Error::new(EINVAL))
775*bb4ee6a4SAndroid Build Coastguard Worker }
776*bb4ee6a4SAndroid Build Coastguard Worker }
777*bb4ee6a4SAndroid Build Coastguard Worker }
778*bb4ee6a4SAndroid Build Coastguard Worker r => {
779*bb4ee6a4SAndroid Build Coastguard Worker warn!("Unknown vm status: {}", r);
780*bb4ee6a4SAndroid Build Coastguard Worker Err(Error::new(EINVAL))
781*bb4ee6a4SAndroid Build Coastguard Worker }
782*bb4ee6a4SAndroid Build Coastguard Worker }
783*bb4ee6a4SAndroid Build Coastguard Worker }
784*bb4ee6a4SAndroid Build Coastguard Worker r => {
785*bb4ee6a4SAndroid Build Coastguard Worker warn!("unknown gh exit reason: {}", r);
786*bb4ee6a4SAndroid Build Coastguard Worker Err(Error::new(EINVAL))
787*bb4ee6a4SAndroid Build Coastguard Worker }
788*bb4ee6a4SAndroid Build Coastguard Worker }
789*bb4ee6a4SAndroid Build Coastguard Worker }
790*bb4ee6a4SAndroid Build Coastguard Worker
id(&self) -> usize791*bb4ee6a4SAndroid Build Coastguard Worker fn id(&self) -> usize {
792*bb4ee6a4SAndroid Build Coastguard Worker self.id
793*bb4ee6a4SAndroid Build Coastguard Worker }
794*bb4ee6a4SAndroid Build Coastguard Worker
set_immediate_exit(&self, exit: bool)795*bb4ee6a4SAndroid Build Coastguard Worker fn set_immediate_exit(&self, exit: bool) {
796*bb4ee6a4SAndroid Build Coastguard Worker // SAFETY:
797*bb4ee6a4SAndroid Build Coastguard Worker // Safe because we know we mapped enough memory to hold the kvm_run struct because the
798*bb4ee6a4SAndroid Build Coastguard Worker // kernel told us how large it was. The pointer is page aligned so casting to a different
799*bb4ee6a4SAndroid Build Coastguard Worker // type is well defined, hence the clippy allow attribute.
800*bb4ee6a4SAndroid Build Coastguard Worker let run = unsafe { &mut *(self.run_mmap.as_ptr() as *mut gh_vcpu_run) };
801*bb4ee6a4SAndroid Build Coastguard Worker run.immediate_exit = exit.into();
802*bb4ee6a4SAndroid Build Coastguard Worker }
803*bb4ee6a4SAndroid Build Coastguard Worker
signal_handle(&self) -> VcpuSignalHandle804*bb4ee6a4SAndroid Build Coastguard Worker fn signal_handle(&self) -> VcpuSignalHandle {
805*bb4ee6a4SAndroid Build Coastguard Worker VcpuSignalHandle {
806*bb4ee6a4SAndroid Build Coastguard Worker inner: Box::new(GunyahVcpuSignalHandle {
807*bb4ee6a4SAndroid Build Coastguard Worker run_mmap: self.run_mmap.clone(),
808*bb4ee6a4SAndroid Build Coastguard Worker }),
809*bb4ee6a4SAndroid Build Coastguard Worker }
810*bb4ee6a4SAndroid Build Coastguard Worker }
811*bb4ee6a4SAndroid Build Coastguard Worker
handle_mmio(&self, handle_fn: &mut dyn FnMut(IoParams) -> Result<()>) -> Result<()>812*bb4ee6a4SAndroid Build Coastguard Worker fn handle_mmio(&self, handle_fn: &mut dyn FnMut(IoParams) -> Result<()>) -> Result<()> {
813*bb4ee6a4SAndroid Build Coastguard Worker // SAFETY:
814*bb4ee6a4SAndroid Build Coastguard Worker // Safe because we know we mapped enough memory to hold the gh_vcpu_run struct because the
815*bb4ee6a4SAndroid Build Coastguard Worker // kernel told us how large it was. The pointer is page aligned so casting to a different
816*bb4ee6a4SAndroid Build Coastguard Worker // type is well defined
817*bb4ee6a4SAndroid Build Coastguard Worker let run = unsafe { &mut *(self.run_mmap.as_ptr() as *mut gh_vcpu_run) };
818*bb4ee6a4SAndroid Build Coastguard Worker // Verify that the handler is called in the right context.
819*bb4ee6a4SAndroid Build Coastguard Worker assert!(run.exit_reason == GH_VCPU_EXIT_MMIO);
820*bb4ee6a4SAndroid Build Coastguard Worker // SAFETY:
821*bb4ee6a4SAndroid Build Coastguard Worker // Safe because the exit_reason (which comes from the kernel) told us which
822*bb4ee6a4SAndroid Build Coastguard Worker // union field to use.
823*bb4ee6a4SAndroid Build Coastguard Worker let mmio = unsafe { &mut run.__bindgen_anon_1.mmio };
824*bb4ee6a4SAndroid Build Coastguard Worker let address = mmio.phys_addr;
825*bb4ee6a4SAndroid Build Coastguard Worker let data = &mut mmio.data[..mmio.len as usize];
826*bb4ee6a4SAndroid Build Coastguard Worker if mmio.is_write != 0 {
827*bb4ee6a4SAndroid Build Coastguard Worker handle_fn(IoParams {
828*bb4ee6a4SAndroid Build Coastguard Worker address,
829*bb4ee6a4SAndroid Build Coastguard Worker operation: IoOperation::Write(data),
830*bb4ee6a4SAndroid Build Coastguard Worker })
831*bb4ee6a4SAndroid Build Coastguard Worker } else {
832*bb4ee6a4SAndroid Build Coastguard Worker handle_fn(IoParams {
833*bb4ee6a4SAndroid Build Coastguard Worker address,
834*bb4ee6a4SAndroid Build Coastguard Worker operation: IoOperation::Read(data),
835*bb4ee6a4SAndroid Build Coastguard Worker })
836*bb4ee6a4SAndroid Build Coastguard Worker }
837*bb4ee6a4SAndroid Build Coastguard Worker }
838*bb4ee6a4SAndroid Build Coastguard Worker
handle_io(&self, _handle_fn: &mut dyn FnMut(IoParams)) -> Result<()>839*bb4ee6a4SAndroid Build Coastguard Worker fn handle_io(&self, _handle_fn: &mut dyn FnMut(IoParams)) -> Result<()> {
840*bb4ee6a4SAndroid Build Coastguard Worker unreachable!()
841*bb4ee6a4SAndroid Build Coastguard Worker }
842*bb4ee6a4SAndroid Build Coastguard Worker
on_suspend(&self) -> Result<()>843*bb4ee6a4SAndroid Build Coastguard Worker fn on_suspend(&self) -> Result<()> {
844*bb4ee6a4SAndroid Build Coastguard Worker Ok(())
845*bb4ee6a4SAndroid Build Coastguard Worker }
846*bb4ee6a4SAndroid Build Coastguard Worker
enable_raw_capability(&self, _cap: u32, _args: &[u64; 4]) -> Result<()>847*bb4ee6a4SAndroid Build Coastguard Worker unsafe fn enable_raw_capability(&self, _cap: u32, _args: &[u64; 4]) -> Result<()> {
848*bb4ee6a4SAndroid Build Coastguard Worker unimplemented!()
849*bb4ee6a4SAndroid Build Coastguard Worker }
850*bb4ee6a4SAndroid Build Coastguard Worker }
851