1 // automatically generated by the FlatBuffers compiler, do not modify 2 // @generated 3 extern crate alloc; 4 extern crate flatbuffers; 5 use alloc::boxed::Box; 6 use alloc::string::{String, ToString}; 7 use alloc::vec::Vec; 8 use core::mem; 9 use core::cmp::Ordering; 10 use self::flatbuffers::{EndianScalar, Follow}; 11 use super::*; 12 // struct Object, aligned to 4 13 #[repr(transparent)] 14 #[derive(Clone, Copy, PartialEq)] 15 pub(crate) struct Object(pub [u8; 4]); 16 impl Default for Object { default() -> Self17 fn default() -> Self { 18 Self([0; 4]) 19 } 20 } 21 impl core::fmt::Debug for Object { fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result22 fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { 23 f.debug_struct("Object") 24 .field("value", &self.value()) 25 .finish() 26 } 27 } 28 29 impl flatbuffers::SimpleToVerifyInSlice for Object {} 30 impl flatbuffers::SafeSliceAccess for Object {} 31 impl<'a> flatbuffers::Follow<'a> for Object { 32 type Inner = &'a Object; 33 #[inline] follow(buf: &'a [u8], loc: usize) -> Self::Inner34 fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { 35 <&'a Object>::follow(buf, loc) 36 } 37 } 38 impl<'a> flatbuffers::Follow<'a> for &'a Object { 39 type Inner = &'a Object; 40 #[inline] follow(buf: &'a [u8], loc: usize) -> Self::Inner41 fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { 42 flatbuffers::follow_cast_ref::<Object>(buf, loc) 43 } 44 } 45 impl<'b> flatbuffers::Push for Object { 46 type Output = Object; 47 #[inline] push(&self, dst: &mut [u8], _rest: &[u8])48 fn push(&self, dst: &mut [u8], _rest: &[u8]) { 49 let src = unsafe { 50 ::core::slice::from_raw_parts(self as *const Object as *const u8, Self::size()) 51 }; 52 dst.copy_from_slice(src); 53 } 54 } 55 impl<'b> flatbuffers::Push for &'b Object { 56 type Output = Object; 57 58 #[inline] push(&self, dst: &mut [u8], _rest: &[u8])59 fn push(&self, dst: &mut [u8], _rest: &[u8]) { 60 let src = unsafe { 61 ::core::slice::from_raw_parts(*self as *const Object as *const u8, Self::size()) 62 }; 63 dst.copy_from_slice(src); 64 } 65 } 66 67 impl<'a> flatbuffers::Verifiable for Object { 68 #[inline] run_verifier( v: &mut flatbuffers::Verifier, pos: usize ) -> Result<(), flatbuffers::InvalidFlatbuffer>69 fn run_verifier( 70 v: &mut flatbuffers::Verifier, pos: usize 71 ) -> Result<(), flatbuffers::InvalidFlatbuffer> { 72 use self::flatbuffers::Verifiable; 73 v.in_buffer::<Self>(pos) 74 } 75 } 76 77 impl<'a> Object { 78 #[allow(clippy::too_many_arguments)] new( value: i32, ) -> Self79 pub fn new( 80 value: i32, 81 ) -> Self { 82 let mut s = Self([0; 4]); 83 s.set_value(value); 84 s 85 } 86 get_fully_qualified_name() -> &'static str87 pub const fn get_fully_qualified_name() -> &'static str { 88 "Object" 89 } 90 value(&self) -> i3291 pub fn value(&self) -> i32 { 92 let mut mem = core::mem::MaybeUninit::<i32>::uninit(); 93 unsafe { 94 core::ptr::copy_nonoverlapping( 95 self.0[0..].as_ptr(), 96 mem.as_mut_ptr() as *mut u8, 97 core::mem::size_of::<i32>(), 98 ); 99 mem.assume_init() 100 }.from_little_endian() 101 } 102 set_value(&mut self, x: i32)103 pub fn set_value(&mut self, x: i32) { 104 let x_le = x.to_little_endian(); 105 unsafe { 106 core::ptr::copy_nonoverlapping( 107 &x_le as *const i32 as *const u8, 108 self.0[0..].as_mut_ptr(), 109 core::mem::size_of::<i32>(), 110 ); 111 } 112 } 113 unpack(&self) -> ObjectT114 pub fn unpack(&self) -> ObjectT { 115 ObjectT { 116 value: self.value(), 117 } 118 } 119 } 120 121 #[derive(Debug, Clone, PartialEq, Default)] 122 pub(crate) struct ObjectT { 123 pub value: i32, 124 } 125 impl ObjectT { pack(&self) -> Object126 pub fn pack(&self) -> Object { 127 Object::new( 128 self.value, 129 ) 130 } 131 } 132 133