1 #![cfg(feature = "extern_crate_alloc")]
2 #![allow(clippy::duplicated_attributes)]
3
4 //! Stuff to boost things in the `alloc` crate.
5 //!
6 //! * You must enable the `extern_crate_alloc` feature of `bytemuck` or you will
7 //! not be able to use this module! This is generally done by adding the
8 //! feature to the dependency in Cargo.toml like so:
9 //!
10 //! `bytemuck = { version = "VERSION_YOU_ARE_USING", features =
11 //! ["extern_crate_alloc"]}`
12
13 use super::*;
14 #[cfg(target_has_atomic = "ptr")]
15 use alloc::sync::Arc;
16 use alloc::{
17 alloc::{alloc_zeroed, Layout},
18 boxed::Box,
19 rc::Rc,
20 vec,
21 vec::Vec,
22 };
23 use core::{
24 mem::{size_of_val, ManuallyDrop},
25 ops::{Deref, DerefMut},
26 };
27
28 /// As [`try_cast_box`], but unwraps for you.
29 #[inline]
cast_box<A: NoUninit, B: AnyBitPattern>(input: Box<A>) -> Box<B>30 pub fn cast_box<A: NoUninit, B: AnyBitPattern>(input: Box<A>) -> Box<B> {
31 try_cast_box(input).map_err(|(e, _v)| e).unwrap()
32 }
33
34 /// Attempts to cast the content type of a [`Box`].
35 ///
36 /// On failure you get back an error along with the starting `Box`.
37 ///
38 /// ## Failure
39 ///
40 /// * The start and end content type of the `Box` must have the exact same
41 /// alignment.
42 /// * The start and end size of the `Box` must have the exact same size.
43 #[inline]
try_cast_box<A: NoUninit, B: AnyBitPattern>( input: Box<A>, ) -> Result<Box<B>, (PodCastError, Box<A>)>44 pub fn try_cast_box<A: NoUninit, B: AnyBitPattern>(
45 input: Box<A>,
46 ) -> Result<Box<B>, (PodCastError, Box<A>)> {
47 if align_of::<A>() != align_of::<B>() {
48 Err((PodCastError::AlignmentMismatch, input))
49 } else if size_of::<A>() != size_of::<B>() {
50 Err((PodCastError::SizeMismatch, input))
51 } else {
52 // Note(Lokathor): This is much simpler than with the Vec casting!
53 let ptr: *mut B = Box::into_raw(input) as *mut B;
54 Ok(unsafe { Box::from_raw(ptr) })
55 }
56 }
57
58 /// Allocates a `Box<T>` with all of the contents being zeroed out.
59 ///
60 /// This uses the global allocator to create a zeroed allocation and _then_
61 /// turns it into a Box. In other words, it's 100% assured that the zeroed data
62 /// won't be put temporarily on the stack. You can make a box of any size
63 /// without fear of a stack overflow.
64 ///
65 /// ## Failure
66 ///
67 /// This fails if the allocation fails.
68 #[inline]
try_zeroed_box<T: Zeroable>() -> Result<Box<T>, ()>69 pub fn try_zeroed_box<T: Zeroable>() -> Result<Box<T>, ()> {
70 if size_of::<T>() == 0 {
71 // This will not allocate but simply create an arbitrary non-null
72 // aligned pointer, valid for Box for a zero-sized pointee.
73 let ptr = core::ptr::NonNull::dangling().as_ptr();
74 return Ok(unsafe { Box::from_raw(ptr) });
75 }
76 let layout = Layout::new::<T>();
77 let ptr = unsafe { alloc_zeroed(layout) };
78 if ptr.is_null() {
79 // we don't know what the error is because `alloc_zeroed` is a dumb API
80 Err(())
81 } else {
82 Ok(unsafe { Box::<T>::from_raw(ptr as *mut T) })
83 }
84 }
85
86 /// As [`try_zeroed_box`], but unwraps for you.
87 #[inline]
zeroed_box<T: Zeroable>() -> Box<T>88 pub fn zeroed_box<T: Zeroable>() -> Box<T> {
89 try_zeroed_box().unwrap()
90 }
91
92 /// Allocates a `Vec<T>` of length and capacity exactly equal to `length` and
93 /// all elements zeroed.
94 ///
95 /// ## Failure
96 ///
97 /// This fails if the allocation fails, or if a layout cannot be calculated for
98 /// the allocation.
try_zeroed_vec<T: Zeroable>(length: usize) -> Result<Vec<T>, ()>99 pub fn try_zeroed_vec<T: Zeroable>(length: usize) -> Result<Vec<T>, ()> {
100 if length == 0 {
101 Ok(Vec::new())
102 } else {
103 let boxed_slice = try_zeroed_slice_box(length)?;
104 Ok(boxed_slice.into_vec())
105 }
106 }
107
108 /// As [`try_zeroed_vec`] but unwraps for you
zeroed_vec<T: Zeroable>(length: usize) -> Vec<T>109 pub fn zeroed_vec<T: Zeroable>(length: usize) -> Vec<T> {
110 try_zeroed_vec(length).unwrap()
111 }
112
113 /// Allocates a `Box<[T]>` with all contents being zeroed out.
114 ///
115 /// This uses the global allocator to create a zeroed allocation and _then_
116 /// turns it into a Box. In other words, it's 100% assured that the zeroed data
117 /// won't be put temporarily on the stack. You can make a box of any size
118 /// without fear of a stack overflow.
119 ///
120 /// ## Failure
121 ///
122 /// This fails if the allocation fails, or if a layout cannot be calculated for
123 /// the allocation.
124 #[inline]
try_zeroed_slice_box<T: Zeroable>( length: usize, ) -> Result<Box<[T]>, ()>125 pub fn try_zeroed_slice_box<T: Zeroable>(
126 length: usize,
127 ) -> Result<Box<[T]>, ()> {
128 if size_of::<T>() == 0 || length == 0 {
129 // This will not allocate but simply create an arbitrary non-null aligned
130 // slice pointer, valid for Box for a zero-sized pointee.
131 let ptr = core::ptr::NonNull::dangling().as_ptr();
132 let slice_ptr = core::ptr::slice_from_raw_parts_mut(ptr, length);
133 return Ok(unsafe { Box::from_raw(slice_ptr) });
134 }
135 let layout = core::alloc::Layout::array::<T>(length).map_err(|_| ())?;
136 let ptr = unsafe { alloc_zeroed(layout) };
137 if ptr.is_null() {
138 // we don't know what the error is because `alloc_zeroed` is a dumb API
139 Err(())
140 } else {
141 let slice =
142 unsafe { core::slice::from_raw_parts_mut(ptr as *mut T, length) };
143 Ok(unsafe { Box::<[T]>::from_raw(slice) })
144 }
145 }
146
147 /// As [`try_zeroed_slice_box`], but unwraps for you.
zeroed_slice_box<T: Zeroable>(length: usize) -> Box<[T]>148 pub fn zeroed_slice_box<T: Zeroable>(length: usize) -> Box<[T]> {
149 try_zeroed_slice_box(length).unwrap()
150 }
151
152 /// As [`try_cast_slice_box`], but unwraps for you.
153 #[inline]
cast_slice_box<A: NoUninit, B: AnyBitPattern>( input: Box<[A]>, ) -> Box<[B]>154 pub fn cast_slice_box<A: NoUninit, B: AnyBitPattern>(
155 input: Box<[A]>,
156 ) -> Box<[B]> {
157 try_cast_slice_box(input).map_err(|(e, _v)| e).unwrap()
158 }
159
160 /// Attempts to cast the content type of a `Box<[T]>`.
161 ///
162 /// On failure you get back an error along with the starting `Box<[T]>`.
163 ///
164 /// ## Failure
165 ///
166 /// * The start and end content type of the `Box<[T]>` must have the exact same
167 /// alignment.
168 /// * The start and end content size in bytes of the `Box<[T]>` must be the
169 /// exact same.
170 #[inline]
try_cast_slice_box<A: NoUninit, B: AnyBitPattern>( input: Box<[A]>, ) -> Result<Box<[B]>, (PodCastError, Box<[A]>)>171 pub fn try_cast_slice_box<A: NoUninit, B: AnyBitPattern>(
172 input: Box<[A]>,
173 ) -> Result<Box<[B]>, (PodCastError, Box<[A]>)> {
174 if align_of::<A>() != align_of::<B>() {
175 Err((PodCastError::AlignmentMismatch, input))
176 } else if size_of::<A>() != size_of::<B>() {
177 let input_bytes = size_of_val::<[A]>(&*input);
178 if (size_of::<B>() == 0 && input_bytes != 0)
179 || (size_of::<B>() != 0 && input_bytes % size_of::<B>() != 0)
180 {
181 // If the size in bytes of the underlying buffer does not match an exact
182 // multiple of the size of B, we cannot cast between them.
183 Err((PodCastError::OutputSliceWouldHaveSlop, input))
184 } else {
185 // Because the size is an exact multiple, we can now change the length
186 // of the slice and recreate the Box
187 // NOTE: This is a valid operation because according to the docs of
188 // std::alloc::GlobalAlloc::dealloc(), the Layout that was used to alloc
189 // the block must be the same Layout that is used to dealloc the block.
190 // Luckily, Layout only stores two things, the alignment, and the size in
191 // bytes. So as long as both of those stay the same, the Layout will
192 // remain a valid input to dealloc.
193 let length =
194 if size_of::<B>() != 0 { input_bytes / size_of::<B>() } else { 0 };
195 let box_ptr: *mut A = Box::into_raw(input) as *mut A;
196 let ptr: *mut [B] =
197 unsafe { core::slice::from_raw_parts_mut(box_ptr as *mut B, length) };
198 Ok(unsafe { Box::<[B]>::from_raw(ptr) })
199 }
200 } else {
201 let box_ptr: *mut [A] = Box::into_raw(input);
202 let ptr: *mut [B] = box_ptr as *mut [B];
203 Ok(unsafe { Box::<[B]>::from_raw(ptr) })
204 }
205 }
206
207 /// As [`try_cast_vec`], but unwraps for you.
208 #[inline]
cast_vec<A: NoUninit, B: AnyBitPattern>(input: Vec<A>) -> Vec<B>209 pub fn cast_vec<A: NoUninit, B: AnyBitPattern>(input: Vec<A>) -> Vec<B> {
210 try_cast_vec(input).map_err(|(e, _v)| e).unwrap()
211 }
212
213 /// Attempts to cast the content type of a [`Vec`].
214 ///
215 /// On failure you get back an error along with the starting `Vec`.
216 ///
217 /// ## Failure
218 ///
219 /// * The start and end content type of the `Vec` must have the exact same
220 /// alignment.
221 /// * The start and end content size in bytes of the `Vec` must be the exact
222 /// same.
223 /// * The start and end capacity in bytes of the `Vec` must be the exact same.
224 #[inline]
try_cast_vec<A: NoUninit, B: AnyBitPattern>( input: Vec<A>, ) -> Result<Vec<B>, (PodCastError, Vec<A>)>225 pub fn try_cast_vec<A: NoUninit, B: AnyBitPattern>(
226 input: Vec<A>,
227 ) -> Result<Vec<B>, (PodCastError, Vec<A>)> {
228 if align_of::<A>() != align_of::<B>() {
229 Err((PodCastError::AlignmentMismatch, input))
230 } else if size_of::<A>() != size_of::<B>() {
231 let input_size = size_of_val::<[A]>(&*input);
232 let input_capacity = input.capacity() * size_of::<A>();
233 if (size_of::<B>() == 0 && input_capacity != 0)
234 || (size_of::<B>() != 0
235 && (input_size % size_of::<B>() != 0
236 || input_capacity % size_of::<B>() != 0))
237 {
238 // If the size in bytes of the underlying buffer does not match an exact
239 // multiple of the size of B, we cannot cast between them.
240 // Note that we have to pay special attention to make sure that both
241 // length and capacity are valid under B, as we do not want to
242 // change which bytes are considered part of the initialized slice
243 // of the Vec
244 Err((PodCastError::OutputSliceWouldHaveSlop, input))
245 } else {
246 // Because the size is an exact multiple, we can now change the length and
247 // capacity and recreate the Vec
248 // NOTE: This is a valid operation because according to the docs of
249 // std::alloc::GlobalAlloc::dealloc(), the Layout that was used to alloc
250 // the block must be the same Layout that is used to dealloc the block.
251 // Luckily, Layout only stores two things, the alignment, and the size in
252 // bytes. So as long as both of those stay the same, the Layout will
253 // remain a valid input to dealloc.
254
255 // Note(Lokathor): First we record the length and capacity, which don't
256 // have any secret provenance metadata.
257 let length: usize =
258 if size_of::<B>() != 0 { input_size / size_of::<B>() } else { 0 };
259 let capacity: usize =
260 if size_of::<B>() != 0 { input_capacity / size_of::<B>() } else { 0 };
261 // Note(Lokathor): Next we "pre-forget" the old Vec by wrapping with
262 // ManuallyDrop, because if we used `core::mem::forget` after taking the
263 // pointer then that would invalidate our pointer. In nightly there's a
264 // "into raw parts" method, which we can switch this too eventually.
265 let mut manual_drop_vec = ManuallyDrop::new(input);
266 let vec_ptr: *mut A = manual_drop_vec.as_mut_ptr();
267 let ptr: *mut B = vec_ptr as *mut B;
268 Ok(unsafe { Vec::from_raw_parts(ptr, length, capacity) })
269 }
270 } else {
271 // Note(Lokathor): First we record the length and capacity, which don't have
272 // any secret provenance metadata.
273 let length: usize = input.len();
274 let capacity: usize = input.capacity();
275 // Note(Lokathor): Next we "pre-forget" the old Vec by wrapping with
276 // ManuallyDrop, because if we used `core::mem::forget` after taking the
277 // pointer then that would invalidate our pointer. In nightly there's a
278 // "into raw parts" method, which we can switch this too eventually.
279 let mut manual_drop_vec = ManuallyDrop::new(input);
280 let vec_ptr: *mut A = manual_drop_vec.as_mut_ptr();
281 let ptr: *mut B = vec_ptr as *mut B;
282 Ok(unsafe { Vec::from_raw_parts(ptr, length, capacity) })
283 }
284 }
285
286 /// This "collects" a slice of pod data into a vec of a different pod type.
287 ///
288 /// Unlike with [`cast_slice`] and [`cast_slice_mut`], this will always work.
289 ///
290 /// The output vec will be of a minimal size/capacity to hold the slice given.
291 ///
292 /// ```rust
293 /// # use bytemuck::*;
294 /// let halfwords: [u16; 4] = [5, 6, 7, 8];
295 /// let vec_of_words: Vec<u32> = pod_collect_to_vec(&halfwords);
296 /// if cfg!(target_endian = "little") {
297 /// assert_eq!(&vec_of_words[..], &[0x0006_0005, 0x0008_0007][..])
298 /// } else {
299 /// assert_eq!(&vec_of_words[..], &[0x0005_0006, 0x0007_0008][..])
300 /// }
301 /// ```
pod_collect_to_vec<A: NoUninit, B: NoUninit + AnyBitPattern>( src: &[A], ) -> Vec<B>302 pub fn pod_collect_to_vec<A: NoUninit, B: NoUninit + AnyBitPattern>(
303 src: &[A],
304 ) -> Vec<B> {
305 let src_size = core::mem::size_of_val(src);
306 // Note(Lokathor): dst_count is rounded up so that the dest will always be at
307 // least as many bytes as the src.
308 let dst_count = src_size / size_of::<B>()
309 + if src_size % size_of::<B>() != 0 { 1 } else { 0 };
310 let mut dst = vec![B::zeroed(); dst_count];
311
312 let src_bytes: &[u8] = cast_slice(src);
313 let dst_bytes: &mut [u8] = cast_slice_mut(&mut dst[..]);
314 dst_bytes[..src_size].copy_from_slice(src_bytes);
315 dst
316 }
317
318 /// As [`try_cast_rc`], but unwraps for you.
319 #[inline]
cast_rc<A: NoUninit + AnyBitPattern, B: NoUninit + AnyBitPattern>( input: Rc<A>, ) -> Rc<B>320 pub fn cast_rc<A: NoUninit + AnyBitPattern, B: NoUninit + AnyBitPattern>(
321 input: Rc<A>,
322 ) -> Rc<B> {
323 try_cast_rc(input).map_err(|(e, _v)| e).unwrap()
324 }
325
326 /// Attempts to cast the content type of a [`Rc`].
327 ///
328 /// On failure you get back an error along with the starting `Rc`.
329 ///
330 /// The bounds on this function are the same as [`cast_mut`], because a user
331 /// could call `Rc::get_unchecked_mut` on the output, which could be observable
332 /// in the input.
333 ///
334 /// ## Failure
335 ///
336 /// * The start and end content type of the `Rc` must have the exact same
337 /// alignment.
338 /// * The start and end size of the `Rc` must have the exact same size.
339 #[inline]
try_cast_rc<A: NoUninit + AnyBitPattern, B: NoUninit + AnyBitPattern>( input: Rc<A>, ) -> Result<Rc<B>, (PodCastError, Rc<A>)>340 pub fn try_cast_rc<A: NoUninit + AnyBitPattern, B: NoUninit + AnyBitPattern>(
341 input: Rc<A>,
342 ) -> Result<Rc<B>, (PodCastError, Rc<A>)> {
343 if align_of::<A>() != align_of::<B>() {
344 Err((PodCastError::AlignmentMismatch, input))
345 } else if size_of::<A>() != size_of::<B>() {
346 Err((PodCastError::SizeMismatch, input))
347 } else {
348 // Safety: Rc::from_raw requires size and alignment match, which is met.
349 let ptr: *const B = Rc::into_raw(input) as *const B;
350 Ok(unsafe { Rc::from_raw(ptr) })
351 }
352 }
353
354 /// As [`try_cast_arc`], but unwraps for you.
355 #[inline]
356 #[cfg(target_has_atomic = "ptr")]
cast_arc<A: NoUninit + AnyBitPattern, B: NoUninit + AnyBitPattern>( input: Arc<A>, ) -> Arc<B>357 pub fn cast_arc<A: NoUninit + AnyBitPattern, B: NoUninit + AnyBitPattern>(
358 input: Arc<A>,
359 ) -> Arc<B> {
360 try_cast_arc(input).map_err(|(e, _v)| e).unwrap()
361 }
362
363 /// Attempts to cast the content type of a [`Arc`].
364 ///
365 /// On failure you get back an error along with the starting `Arc`.
366 ///
367 /// The bounds on this function are the same as [`cast_mut`], because a user
368 /// could call `Rc::get_unchecked_mut` on the output, which could be observable
369 /// in the input.
370 ///
371 /// ## Failure
372 ///
373 /// * The start and end content type of the `Arc` must have the exact same
374 /// alignment.
375 /// * The start and end size of the `Arc` must have the exact same size.
376 #[inline]
377 #[cfg(target_has_atomic = "ptr")]
try_cast_arc< A: NoUninit + AnyBitPattern, B: NoUninit + AnyBitPattern, >( input: Arc<A>, ) -> Result<Arc<B>, (PodCastError, Arc<A>)>378 pub fn try_cast_arc<
379 A: NoUninit + AnyBitPattern,
380 B: NoUninit + AnyBitPattern,
381 >(
382 input: Arc<A>,
383 ) -> Result<Arc<B>, (PodCastError, Arc<A>)> {
384 if align_of::<A>() != align_of::<B>() {
385 Err((PodCastError::AlignmentMismatch, input))
386 } else if size_of::<A>() != size_of::<B>() {
387 Err((PodCastError::SizeMismatch, input))
388 } else {
389 // Safety: Arc::from_raw requires size and alignment match, which is met.
390 let ptr: *const B = Arc::into_raw(input) as *const B;
391 Ok(unsafe { Arc::from_raw(ptr) })
392 }
393 }
394
395 /// As [`try_cast_slice_rc`], but unwraps for you.
396 #[inline]
cast_slice_rc< A: NoUninit + AnyBitPattern, B: NoUninit + AnyBitPattern, >( input: Rc<[A]>, ) -> Rc<[B]>397 pub fn cast_slice_rc<
398 A: NoUninit + AnyBitPattern,
399 B: NoUninit + AnyBitPattern,
400 >(
401 input: Rc<[A]>,
402 ) -> Rc<[B]> {
403 try_cast_slice_rc(input).map_err(|(e, _v)| e).unwrap()
404 }
405
406 /// Attempts to cast the content type of a `Rc<[T]>`.
407 ///
408 /// On failure you get back an error along with the starting `Rc<[T]>`.
409 ///
410 /// The bounds on this function are the same as [`cast_mut`], because a user
411 /// could call `Rc::get_unchecked_mut` on the output, which could be observable
412 /// in the input.
413 ///
414 /// ## Failure
415 ///
416 /// * The start and end content type of the `Rc<[T]>` must have the exact same
417 /// alignment.
418 /// * The start and end content size in bytes of the `Rc<[T]>` must be the exact
419 /// same.
420 #[inline]
try_cast_slice_rc< A: NoUninit + AnyBitPattern, B: NoUninit + AnyBitPattern, >( input: Rc<[A]>, ) -> Result<Rc<[B]>, (PodCastError, Rc<[A]>)>421 pub fn try_cast_slice_rc<
422 A: NoUninit + AnyBitPattern,
423 B: NoUninit + AnyBitPattern,
424 >(
425 input: Rc<[A]>,
426 ) -> Result<Rc<[B]>, (PodCastError, Rc<[A]>)> {
427 if align_of::<A>() != align_of::<B>() {
428 Err((PodCastError::AlignmentMismatch, input))
429 } else if size_of::<A>() != size_of::<B>() {
430 let input_bytes = size_of_val::<[A]>(&*input);
431 if (size_of::<B>() == 0 && input_bytes != 0)
432 || (size_of::<B>() != 0 && input_bytes % size_of::<B>() != 0)
433 {
434 // If the size in bytes of the underlying buffer does not match an exact
435 // multiple of the size of B, we cannot cast between them.
436 Err((PodCastError::OutputSliceWouldHaveSlop, input))
437 } else {
438 // Because the size is an exact multiple, we can now change the length
439 // of the slice and recreate the Rc
440 // NOTE: This is a valid operation because according to the docs of
441 // std::rc::Rc::from_raw(), the type U that was in the original Rc<U>
442 // acquired from Rc::into_raw() must have the same size alignment and
443 // size of the type T in the new Rc<T>. So as long as both the size
444 // and alignment stay the same, the Rc will remain a valid Rc.
445 let length =
446 if size_of::<B>() != 0 { input_bytes / size_of::<B>() } else { 0 };
447 let rc_ptr: *const A = Rc::into_raw(input) as *const A;
448 // Must use ptr::slice_from_raw_parts, because we cannot make an
449 // intermediate const reference, because it has mutable provenance,
450 // nor an intermediate mutable reference, because it could be aliased.
451 let ptr = core::ptr::slice_from_raw_parts(rc_ptr as *const B, length);
452 Ok(unsafe { Rc::<[B]>::from_raw(ptr) })
453 }
454 } else {
455 let rc_ptr: *const [A] = Rc::into_raw(input);
456 let ptr: *const [B] = rc_ptr as *const [B];
457 Ok(unsafe { Rc::<[B]>::from_raw(ptr) })
458 }
459 }
460
461 /// As [`try_cast_slice_arc`], but unwraps for you.
462 #[inline]
463 #[cfg(target_has_atomic = "ptr")]
cast_slice_arc< A: NoUninit + AnyBitPattern, B: NoUninit + AnyBitPattern, >( input: Arc<[A]>, ) -> Arc<[B]>464 pub fn cast_slice_arc<
465 A: NoUninit + AnyBitPattern,
466 B: NoUninit + AnyBitPattern,
467 >(
468 input: Arc<[A]>,
469 ) -> Arc<[B]> {
470 try_cast_slice_arc(input).map_err(|(e, _v)| e).unwrap()
471 }
472
473 /// Attempts to cast the content type of a `Arc<[T]>`.
474 ///
475 /// On failure you get back an error along with the starting `Arc<[T]>`.
476 ///
477 /// The bounds on this function are the same as [`cast_mut`], because a user
478 /// could call `Rc::get_unchecked_mut` on the output, which could be observable
479 /// in the input.
480 ///
481 /// ## Failure
482 ///
483 /// * The start and end content type of the `Arc<[T]>` must have the exact same
484 /// alignment.
485 /// * The start and end content size in bytes of the `Arc<[T]>` must be the
486 /// exact same.
487 #[inline]
488 #[cfg(target_has_atomic = "ptr")]
try_cast_slice_arc< A: NoUninit + AnyBitPattern, B: NoUninit + AnyBitPattern, >( input: Arc<[A]>, ) -> Result<Arc<[B]>, (PodCastError, Arc<[A]>)>489 pub fn try_cast_slice_arc<
490 A: NoUninit + AnyBitPattern,
491 B: NoUninit + AnyBitPattern,
492 >(
493 input: Arc<[A]>,
494 ) -> Result<Arc<[B]>, (PodCastError, Arc<[A]>)> {
495 if align_of::<A>() != align_of::<B>() {
496 Err((PodCastError::AlignmentMismatch, input))
497 } else if size_of::<A>() != size_of::<B>() {
498 let input_bytes = size_of_val::<[A]>(&*input);
499 if (size_of::<B>() == 0 && input_bytes != 0)
500 || (size_of::<B>() != 0 && input_bytes % size_of::<B>() != 0)
501 {
502 // If the size in bytes of the underlying buffer does not match an exact
503 // multiple of the size of B, we cannot cast between them.
504 Err((PodCastError::OutputSliceWouldHaveSlop, input))
505 } else {
506 // Because the size is an exact multiple, we can now change the length
507 // of the slice and recreate the Arc
508 // NOTE: This is a valid operation because according to the docs of
509 // std::sync::Arc::from_raw(), the type U that was in the original Arc<U>
510 // acquired from Arc::into_raw() must have the same size alignment and
511 // size of the type T in the new Arc<T>. So as long as both the size
512 // and alignment stay the same, the Arc will remain a valid Arc.
513 let length =
514 if size_of::<B>() != 0 { input_bytes / size_of::<B>() } else { 0 };
515 let arc_ptr: *const A = Arc::into_raw(input) as *const A;
516 // Must use ptr::slice_from_raw_parts, because we cannot make an
517 // intermediate const reference, because it has mutable provenance,
518 // nor an intermediate mutable reference, because it could be aliased.
519 let ptr = core::ptr::slice_from_raw_parts(arc_ptr as *const B, length);
520 Ok(unsafe { Arc::<[B]>::from_raw(ptr) })
521 }
522 } else {
523 let arc_ptr: *const [A] = Arc::into_raw(input);
524 let ptr: *const [B] = arc_ptr as *const [B];
525 Ok(unsafe { Arc::<[B]>::from_raw(ptr) })
526 }
527 }
528
529 /// An extension trait for `TransparentWrapper` and alloc types.
530 pub trait TransparentWrapperAlloc<Inner: ?Sized>:
531 TransparentWrapper<Inner>
532 {
533 /// Convert a vec of the inner type into a vec of the wrapper type.
wrap_vec(s: Vec<Inner>) -> Vec<Self> where Self: Sized, Inner: Sized,534 fn wrap_vec(s: Vec<Inner>) -> Vec<Self>
535 where
536 Self: Sized,
537 Inner: Sized,
538 {
539 let mut s = ManuallyDrop::new(s);
540
541 let length = s.len();
542 let capacity = s.capacity();
543 let ptr = s.as_mut_ptr();
544
545 unsafe {
546 // SAFETY:
547 // * ptr comes from Vec (and will not be double-dropped)
548 // * the two types have the identical representation
549 // * the len and capacity fields are valid
550 Vec::from_raw_parts(ptr as *mut Self, length, capacity)
551 }
552 }
553
554 /// Convert a box to the inner type into a box to the wrapper
555 /// type.
556 #[inline]
wrap_box(s: Box<Inner>) -> Box<Self>557 fn wrap_box(s: Box<Inner>) -> Box<Self> {
558 // The unsafe contract requires that these two have
559 // identical representations, and thus identical pointer metadata.
560 // Assert that Self and Inner have the same pointer size,
561 // which is the best we can do to assert their metadata is the same type
562 // on stable.
563 assert!(size_of::<*mut Inner>() == size_of::<*mut Self>());
564
565 unsafe {
566 // A pointer cast doesn't work here because rustc can't tell that
567 // the vtables match (because of the `?Sized` restriction relaxation).
568 // A `transmute` doesn't work because the sizes are unspecified.
569 //
570 // SAFETY:
571 // * The unsafe contract requires that pointers to Inner and Self have
572 // identical representations
573 // * Box is guaranteed to have representation identical to a (non-null)
574 // pointer
575 // * The pointer comes from a box (and thus satisfies all safety
576 // requirements of Box)
577 let inner_ptr: *mut Inner = Box::into_raw(s);
578 let wrapper_ptr: *mut Self = transmute!(inner_ptr);
579 Box::from_raw(wrapper_ptr)
580 }
581 }
582
583 /// Convert an [`Rc`] to the inner type into an `Rc` to the wrapper type.
584 #[inline]
wrap_rc(s: Rc<Inner>) -> Rc<Self>585 fn wrap_rc(s: Rc<Inner>) -> Rc<Self> {
586 // The unsafe contract requires that these two have
587 // identical representations, and thus identical pointer metadata.
588 // Assert that Self and Inner have the same pointer size,
589 // which is the best we can do to assert their metadata is the same type
590 // on stable.
591 assert!(size_of::<*mut Inner>() == size_of::<*mut Self>());
592
593 unsafe {
594 // A pointer cast doesn't work here because rustc can't tell that
595 // the vtables match (because of the `?Sized` restriction relaxation).
596 // A `transmute` doesn't work because the layout of Rc is unspecified.
597 //
598 // SAFETY:
599 // * The unsafe contract requires that pointers to Inner and Self have
600 // identical representations, and that the size and alignment of Inner
601 // and Self are the same, which meets the safety requirements of
602 // Rc::from_raw
603 let inner_ptr: *const Inner = Rc::into_raw(s);
604 let wrapper_ptr: *const Self = transmute!(inner_ptr);
605 Rc::from_raw(wrapper_ptr)
606 }
607 }
608
609 /// Convert an [`Arc`] to the inner type into an `Arc` to the wrapper type.
610 #[inline]
611 #[cfg(target_has_atomic = "ptr")]
wrap_arc(s: Arc<Inner>) -> Arc<Self>612 fn wrap_arc(s: Arc<Inner>) -> Arc<Self> {
613 // The unsafe contract requires that these two have
614 // identical representations, and thus identical pointer metadata.
615 // Assert that Self and Inner have the same pointer size,
616 // which is the best we can do to assert their metadata is the same type
617 // on stable.
618 assert!(size_of::<*mut Inner>() == size_of::<*mut Self>());
619
620 unsafe {
621 // A pointer cast doesn't work here because rustc can't tell that
622 // the vtables match (because of the `?Sized` restriction relaxation).
623 // A `transmute` doesn't work because the layout of Arc is unspecified.
624 //
625 // SAFETY:
626 // * The unsafe contract requires that pointers to Inner and Self have
627 // identical representations, and that the size and alignment of Inner
628 // and Self are the same, which meets the safety requirements of
629 // Arc::from_raw
630 let inner_ptr: *const Inner = Arc::into_raw(s);
631 let wrapper_ptr: *const Self = transmute!(inner_ptr);
632 Arc::from_raw(wrapper_ptr)
633 }
634 }
635
636 /// Convert a vec of the wrapper type into a vec of the inner type.
peel_vec(s: Vec<Self>) -> Vec<Inner> where Self: Sized, Inner: Sized,637 fn peel_vec(s: Vec<Self>) -> Vec<Inner>
638 where
639 Self: Sized,
640 Inner: Sized,
641 {
642 let mut s = ManuallyDrop::new(s);
643
644 let length = s.len();
645 let capacity = s.capacity();
646 let ptr = s.as_mut_ptr();
647
648 unsafe {
649 // SAFETY:
650 // * ptr comes from Vec (and will not be double-dropped)
651 // * the two types have the identical representation
652 // * the len and capacity fields are valid
653 Vec::from_raw_parts(ptr as *mut Inner, length, capacity)
654 }
655 }
656
657 /// Convert a box to the wrapper type into a box to the inner
658 /// type.
659 #[inline]
peel_box(s: Box<Self>) -> Box<Inner>660 fn peel_box(s: Box<Self>) -> Box<Inner> {
661 // The unsafe contract requires that these two have
662 // identical representations, and thus identical pointer metadata.
663 // Assert that Self and Inner have the same pointer size,
664 // which is the best we can do to assert their metadata is the same type
665 // on stable.
666 assert!(size_of::<*mut Inner>() == size_of::<*mut Self>());
667
668 unsafe {
669 // A pointer cast doesn't work here because rustc can't tell that
670 // the vtables match (because of the `?Sized` restriction relaxation).
671 // A `transmute` doesn't work because the sizes are unspecified.
672 //
673 // SAFETY:
674 // * The unsafe contract requires that pointers to Inner and Self have
675 // identical representations
676 // * Box is guaranteed to have representation identical to a (non-null)
677 // pointer
678 // * The pointer comes from a box (and thus satisfies all safety
679 // requirements of Box)
680 let wrapper_ptr: *mut Self = Box::into_raw(s);
681 let inner_ptr: *mut Inner = transmute!(wrapper_ptr);
682 Box::from_raw(inner_ptr)
683 }
684 }
685
686 /// Convert an [`Rc`] to the wrapper type into an `Rc` to the inner type.
687 #[inline]
peel_rc(s: Rc<Self>) -> Rc<Inner>688 fn peel_rc(s: Rc<Self>) -> Rc<Inner> {
689 // The unsafe contract requires that these two have
690 // identical representations, and thus identical pointer metadata.
691 // Assert that Self and Inner have the same pointer size,
692 // which is the best we can do to assert their metadata is the same type
693 // on stable.
694 assert!(size_of::<*mut Inner>() == size_of::<*mut Self>());
695
696 unsafe {
697 // A pointer cast doesn't work here because rustc can't tell that
698 // the vtables match (because of the `?Sized` restriction relaxation).
699 // A `transmute` doesn't work because the layout of Rc is unspecified.
700 //
701 // SAFETY:
702 // * The unsafe contract requires that pointers to Inner and Self have
703 // identical representations, and that the size and alignment of Inner
704 // and Self are the same, which meets the safety requirements of
705 // Rc::from_raw
706 let wrapper_ptr: *const Self = Rc::into_raw(s);
707 let inner_ptr: *const Inner = transmute!(wrapper_ptr);
708 Rc::from_raw(inner_ptr)
709 }
710 }
711
712 /// Convert an [`Arc`] to the wrapper type into an `Arc` to the inner type.
713 #[inline]
714 #[cfg(target_has_atomic = "ptr")]
peel_arc(s: Arc<Self>) -> Arc<Inner>715 fn peel_arc(s: Arc<Self>) -> Arc<Inner> {
716 // The unsafe contract requires that these two have
717 // identical representations, and thus identical pointer metadata.
718 // Assert that Self and Inner have the same pointer size,
719 // which is the best we can do to assert their metadata is the same type
720 // on stable.
721 assert!(size_of::<*mut Inner>() == size_of::<*mut Self>());
722
723 unsafe {
724 // A pointer cast doesn't work here because rustc can't tell that
725 // the vtables match (because of the `?Sized` restriction relaxation).
726 // A `transmute` doesn't work because the layout of Arc is unspecified.
727 //
728 // SAFETY:
729 // * The unsafe contract requires that pointers to Inner and Self have
730 // identical representations, and that the size and alignment of Inner
731 // and Self are the same, which meets the safety requirements of
732 // Arc::from_raw
733 let wrapper_ptr: *const Self = Arc::into_raw(s);
734 let inner_ptr: *const Inner = transmute!(wrapper_ptr);
735 Arc::from_raw(inner_ptr)
736 }
737 }
738 }
739
740 impl<I: ?Sized, T: ?Sized + TransparentWrapper<I>> TransparentWrapperAlloc<I>
741 for T
742 {
743 }
744
745 /// As `Box<[u8]>`, but remembers the original alignment.
746 pub struct BoxBytes {
747 // SAFETY: `ptr` is aligned to `layout.align()`, points to
748 // `layout.size()` initialized bytes, and, if `layout.size() > 0`,
749 // is owned and was allocated with the global allocator with `layout`.
750 ptr: NonNull<u8>,
751 layout: Layout,
752 }
753
754 impl Deref for BoxBytes {
755 type Target = [u8];
756
deref(&self) -> &Self::Target757 fn deref(&self) -> &Self::Target {
758 // SAFETY: See type invariant.
759 unsafe {
760 core::slice::from_raw_parts(self.ptr.as_ptr(), self.layout.size())
761 }
762 }
763 }
764
765 impl DerefMut for BoxBytes {
deref_mut(&mut self) -> &mut Self::Target766 fn deref_mut(&mut self) -> &mut Self::Target {
767 // SAFETY: See type invariant.
768 unsafe {
769 core::slice::from_raw_parts_mut(self.ptr.as_ptr(), self.layout.size())
770 }
771 }
772 }
773
774 impl Drop for BoxBytes {
drop(&mut self)775 fn drop(&mut self) {
776 if self.layout.size() != 0 {
777 // SAFETY: See type invariant: if `self.layout.size() != 0`, then
778 // `self.ptr` is owned and was allocated with `self.layout`.
779 unsafe { alloc::alloc::dealloc(self.ptr.as_ptr(), self.layout) };
780 }
781 }
782 }
783
784 impl<T: ?Sized + sealed::BoxBytesOf> From<Box<T>> for BoxBytes {
from(value: Box<T>) -> Self785 fn from(value: Box<T>) -> Self {
786 value.box_bytes_of()
787 }
788 }
789
790 mod sealed {
791 use crate::{BoxBytes, PodCastError};
792 use alloc::boxed::Box;
793
794 pub trait BoxBytesOf {
box_bytes_of(self: Box<Self>) -> BoxBytes795 fn box_bytes_of(self: Box<Self>) -> BoxBytes;
796 }
797
798 pub trait FromBoxBytes {
try_from_box_bytes( bytes: BoxBytes, ) -> Result<Box<Self>, (PodCastError, BoxBytes)>799 fn try_from_box_bytes(
800 bytes: BoxBytes,
801 ) -> Result<Box<Self>, (PodCastError, BoxBytes)>;
802 }
803 }
804
805 impl<T: NoUninit> sealed::BoxBytesOf for T {
box_bytes_of(self: Box<Self>) -> BoxBytes806 fn box_bytes_of(self: Box<Self>) -> BoxBytes {
807 let layout = Layout::new::<T>();
808 let ptr = Box::into_raw(self) as *mut u8;
809 // SAFETY: Box::into_raw() returns a non-null pointer.
810 let ptr = unsafe { NonNull::new_unchecked(ptr) };
811 BoxBytes { ptr, layout }
812 }
813 }
814
815 impl<T: NoUninit> sealed::BoxBytesOf for [T] {
box_bytes_of(self: Box<Self>) -> BoxBytes816 fn box_bytes_of(self: Box<Self>) -> BoxBytes {
817 let layout = Layout::for_value::<[T]>(&self);
818 let ptr = Box::into_raw(self) as *mut u8;
819 // SAFETY: Box::into_raw() returns a non-null pointer.
820 let ptr = unsafe { NonNull::new_unchecked(ptr) };
821 BoxBytes { ptr, layout }
822 }
823 }
824
825 impl sealed::BoxBytesOf for str {
box_bytes_of(self: Box<Self>) -> BoxBytes826 fn box_bytes_of(self: Box<Self>) -> BoxBytes {
827 self.into_boxed_bytes().box_bytes_of()
828 }
829 }
830
831 impl<T: AnyBitPattern> sealed::FromBoxBytes for T {
try_from_box_bytes( bytes: BoxBytes, ) -> Result<Box<Self>, (PodCastError, BoxBytes)>832 fn try_from_box_bytes(
833 bytes: BoxBytes,
834 ) -> Result<Box<Self>, (PodCastError, BoxBytes)> {
835 let layout = Layout::new::<T>();
836 if bytes.layout.align() != layout.align() {
837 Err((PodCastError::AlignmentMismatch, bytes))
838 } else if bytes.layout.size() != layout.size() {
839 Err((PodCastError::SizeMismatch, bytes))
840 } else {
841 let (ptr, _) = bytes.into_raw_parts();
842 // SAFETY: See BoxBytes type invariant.
843 Ok(unsafe { Box::from_raw(ptr.as_ptr() as *mut T) })
844 }
845 }
846 }
847
848 impl<T: AnyBitPattern> sealed::FromBoxBytes for [T] {
try_from_box_bytes( bytes: BoxBytes, ) -> Result<Box<Self>, (PodCastError, BoxBytes)>849 fn try_from_box_bytes(
850 bytes: BoxBytes,
851 ) -> Result<Box<Self>, (PodCastError, BoxBytes)> {
852 let single_layout = Layout::new::<T>();
853 if bytes.layout.align() != single_layout.align() {
854 Err((PodCastError::AlignmentMismatch, bytes))
855 } else if (single_layout.size() == 0 && bytes.layout.size() != 0)
856 || (single_layout.size() != 0
857 && bytes.layout.size() % single_layout.size() != 0)
858 {
859 Err((PodCastError::OutputSliceWouldHaveSlop, bytes))
860 } else {
861 let (ptr, layout) = bytes.into_raw_parts();
862 let length = if single_layout.size() != 0 {
863 layout.size() / single_layout.size()
864 } else {
865 0
866 };
867 let ptr =
868 core::ptr::slice_from_raw_parts_mut(ptr.as_ptr() as *mut T, length);
869 // SAFETY: See BoxBytes type invariant.
870 Ok(unsafe { Box::from_raw(ptr) })
871 }
872 }
873 }
874
875 /// Re-interprets `Box<T>` as `BoxBytes`.
876 ///
877 /// `T` must be either [`Sized`] and [`NoUninit`],
878 /// [`[U]`](slice) where `U: NoUninit`, or [`str`].
879 #[inline]
box_bytes_of<T: sealed::BoxBytesOf + ?Sized>(input: Box<T>) -> BoxBytes880 pub fn box_bytes_of<T: sealed::BoxBytesOf + ?Sized>(input: Box<T>) -> BoxBytes {
881 input.box_bytes_of()
882 }
883
884 /// Re-interprets `BoxBytes` as `Box<T>`.
885 ///
886 /// `T` must be either [`Sized`] + [`AnyBitPattern`], or
887 /// [`[U]`](slice) where `U: AnyBitPattern`.
888 ///
889 /// ## Panics
890 ///
891 /// This is [`try_from_box_bytes`] but will panic on error and the input will be
892 /// dropped.
893 #[inline]
894 #[cfg_attr(feature = "track_caller", track_caller)]
from_box_bytes<T: sealed::FromBoxBytes + ?Sized>( input: BoxBytes, ) -> Box<T>895 pub fn from_box_bytes<T: sealed::FromBoxBytes + ?Sized>(
896 input: BoxBytes,
897 ) -> Box<T> {
898 try_from_box_bytes(input).map_err(|(error, _)| error).unwrap()
899 }
900
901 /// Re-interprets `BoxBytes` as `Box<T>`.
902 ///
903 /// `T` must be either [`Sized`] + [`AnyBitPattern`], or
904 /// [`[U]`](slice) where `U: AnyBitPattern`.
905 ///
906 /// Returns `Err`:
907 /// * If the input isn't aligned for `T`.
908 /// * If `T: Sized` and the input's length isn't exactly the size of `T`.
909 /// * If `T = [U]` and the input's length isn't exactly a multiple of the size
910 /// of `U`.
911 #[inline]
try_from_box_bytes<T: sealed::FromBoxBytes + ?Sized>( input: BoxBytes, ) -> Result<Box<T>, (PodCastError, BoxBytes)>912 pub fn try_from_box_bytes<T: sealed::FromBoxBytes + ?Sized>(
913 input: BoxBytes,
914 ) -> Result<Box<T>, (PodCastError, BoxBytes)> {
915 T::try_from_box_bytes(input)
916 }
917
918 impl BoxBytes {
919 /// Constructs a `BoxBytes` from its raw parts.
920 ///
921 /// # Safety
922 ///
923 /// The pointer is owned, has been allocated with the provided layout, and
924 /// points to `layout.size()` initialized bytes.
from_raw_parts(ptr: NonNull<u8>, layout: Layout) -> Self925 pub unsafe fn from_raw_parts(ptr: NonNull<u8>, layout: Layout) -> Self {
926 BoxBytes { ptr, layout }
927 }
928
929 /// Deconstructs a `BoxBytes` into its raw parts.
930 ///
931 /// The pointer is owned, has been allocated with the provided layout, and
932 /// points to `layout.size()` initialized bytes.
into_raw_parts(self) -> (NonNull<u8>, Layout)933 pub fn into_raw_parts(self) -> (NonNull<u8>, Layout) {
934 let me = ManuallyDrop::new(self);
935 (me.ptr, me.layout)
936 }
937
938 /// Returns the original layout.
layout(&self) -> Layout939 pub fn layout(&self) -> Layout {
940 self.layout
941 }
942 }
943