zerocopy/ref.rs
1// Copyright 2024 The Fuchsia Authors
2//
3// Licensed under the 2-Clause BSD License <LICENSE-BSD or
4// https://opensource.org/license/bsd-2-clause>, Apache License, Version 2.0
5// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT
6// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option.
7// This file may not be copied, modified, or distributed except according to
8// those terms.
9use super::*;
10use crate::pointer::{
11 BecauseInvariantsEq, BecauseMutationCompatible, MutationCompatible, TransmuteFromPtr,
12};
13
14mod def {
15 use core::marker::PhantomData;
16
17 use crate::{
18 ByteSlice, ByteSliceMut, CloneableByteSlice, CopyableByteSlice, IntoByteSlice,
19 IntoByteSliceMut,
20 };
21
22 /// A typed reference derived from a byte slice.
23 ///
24 /// A `Ref<B, T>` is a reference to a `T` which is stored in a byte slice, `B`.
25 /// Unlike a native reference (`&T` or `&mut T`), `Ref<B, T>` has the same
26 /// mutability as the byte slice it was constructed from (`B`).
27 ///
28 /// # Examples
29 ///
30 /// `Ref` can be used to treat a sequence of bytes as a structured type, and
31 /// to read and write the fields of that type as if the byte slice reference
32 /// were simply a reference to that type.
33 ///
34 /// ```rust
35 /// use zerocopy::*;
36 /// # use zerocopy_derive::*;
37 ///
38 /// #[derive(FromBytes, IntoBytes, KnownLayout, Immutable, Unaligned)]
39 /// #[repr(C)]
40 /// struct UdpHeader {
41 /// src_port: [u8; 2],
42 /// dst_port: [u8; 2],
43 /// length: [u8; 2],
44 /// checksum: [u8; 2],
45 /// }
46 ///
47 /// #[derive(FromBytes, IntoBytes, KnownLayout, Immutable, Unaligned)]
48 /// #[repr(C, packed)]
49 /// struct UdpPacket {
50 /// header: UdpHeader,
51 /// body: [u8],
52 /// }
53 ///
54 /// impl UdpPacket {
55 /// pub fn parse<B: ByteSlice>(bytes: B) -> Option<Ref<B, UdpPacket>> {
56 /// Ref::from_bytes(bytes).ok()
57 /// }
58 /// }
59 /// ```
60 pub struct Ref<B, T: ?Sized>(
61 // INVARIANTS: The referent (via `.deref`, `.deref_mut`, `.into`) byte
62 // slice is aligned to `T`'s alignment and its size corresponds to a
63 // valid size for `T`.
64 B,
65 PhantomData<T>,
66 );
67
68 impl<B, T: ?Sized> Ref<B, T> {
69 /// Constructs a new `Ref`.
70 ///
71 /// # Safety
72 ///
73 /// `bytes` dereferences (via [`deref`], [`deref_mut`], and [`into`]) to
74 /// a byte slice which is aligned to `T`'s alignment and whose size is a
75 /// valid size for `T`.
76 ///
77 /// [`deref`]: core::ops::Deref::deref
78 /// [`deref_mut`]: core::ops::DerefMut::deref_mut
79 /// [`into`]: core::convert::Into::into
80 pub(crate) unsafe fn new_unchecked(bytes: B) -> Ref<B, T> {
81 // INVARIANTS: The caller has promised that `bytes`'s referent is
82 // validly-aligned and has a valid size.
83 Ref(bytes, PhantomData)
84 }
85 }
86
87 impl<B: ByteSlice, T: ?Sized> Ref<B, T> {
88 /// Access the byte slice as a [`ByteSlice`].
89 ///
90 /// # Safety
91 ///
92 /// The caller promises not to call methods on the returned
93 /// [`ByteSlice`] other than `ByteSlice` methods (for example, via
94 /// `Any::downcast_ref`).
95 ///
96 /// `as_byte_slice` promises to return a `ByteSlice` whose referent is
97 /// validly-aligned for `T` and has a valid size for `T`.
98 pub(crate) unsafe fn as_byte_slice(&self) -> &impl ByteSlice {
99 // INVARIANTS: The caller promises not to call methods other than
100 // those on `ByteSlice`. Since `B: ByteSlice`, dereference stability
101 // guarantees that calling `ByteSlice` methods will not change the
102 // address or length of `self.0`'s referent.
103 //
104 // SAFETY: By invariant on `self.0`, the alignment and size
105 // post-conditions are upheld.
106 &self.0
107 }
108 }
109
110 impl<B: ByteSliceMut, T: ?Sized> Ref<B, T> {
111 /// Access the byte slice as a [`ByteSliceMut`].
112 ///
113 /// # Safety
114 ///
115 /// The caller promises not to call methods on the returned
116 /// [`ByteSliceMut`] other than `ByteSliceMut` methods (for example, via
117 /// `Any::downcast_mut`).
118 ///
119 /// `as_byte_slice` promises to return a `ByteSlice` whose referent is
120 /// validly-aligned for `T` and has a valid size for `T`.
121 pub(crate) unsafe fn as_byte_slice_mut(&mut self) -> &mut impl ByteSliceMut {
122 // INVARIANTS: The caller promises not to call methods other than
123 // those on `ByteSliceMut`. Since `B: ByteSlice`, dereference
124 // stability guarantees that calling `ByteSlice` methods will not
125 // change the address or length of `self.0`'s referent.
126 //
127 // SAFETY: By invariant on `self.0`, the alignment and size
128 // post-conditions are upheld.
129 &mut self.0
130 }
131 }
132
133 impl<'a, B: IntoByteSlice<'a>, T: ?Sized> Ref<B, T> {
134 /// Access the byte slice as an [`IntoByteSlice`].
135 ///
136 /// # Safety
137 ///
138 /// The caller promises not to call methods on the returned
139 /// [`IntoByteSlice`] other than `IntoByteSlice` methods (for example,
140 /// via `Any::downcast_ref`).
141 ///
142 /// `as_byte_slice` promises to return a `ByteSlice` whose referent is
143 /// validly-aligned for `T` and has a valid size for `T`.
144 pub(crate) unsafe fn into_byte_slice(self) -> impl IntoByteSlice<'a> {
145 // INVARIANTS: The caller promises not to call methods other than
146 // those on `IntoByteSlice`. Since `B: ByteSlice`, dereference
147 // stability guarantees that calling `ByteSlice` methods will not
148 // change the address or length of `self.0`'s referent.
149 //
150 // SAFETY: By invariant on `self.0`, the alignment and size
151 // post-conditions are upheld.
152 self.0
153 }
154 }
155
156 impl<'a, B: IntoByteSliceMut<'a>, T: ?Sized> Ref<B, T> {
157 /// Access the byte slice as an [`IntoByteSliceMut`].
158 ///
159 /// # Safety
160 ///
161 /// The caller promises not to call methods on the returned
162 /// [`IntoByteSliceMut`] other than `IntoByteSliceMut` methods (for
163 /// example, via `Any::downcast_mut`).
164 ///
165 /// `as_byte_slice` promises to return a `ByteSlice` whose referent is
166 /// validly-aligned for `T` and has a valid size for `T`.
167 pub(crate) unsafe fn into_byte_slice_mut(self) -> impl IntoByteSliceMut<'a> {
168 // INVARIANTS: The caller promises not to call methods other than
169 // those on `IntoByteSliceMut`. Since `B: ByteSlice`, dereference
170 // stability guarantees that calling `ByteSlice` methods will not
171 // change the address or length of `self.0`'s referent.
172 //
173 // SAFETY: By invariant on `self.0`, the alignment and size
174 // post-conditions are upheld.
175 self.0
176 }
177 }
178
179 impl<B: CloneableByteSlice + Clone, T: ?Sized> Clone for Ref<B, T> {
180 #[inline]
181 fn clone(&self) -> Ref<B, T> {
182 // INVARIANTS: Since `B: CloneableByteSlice`, `self.0.clone()` has
183 // the same address and length as `self.0`. Since `self.0` upholds
184 // the field invariants, so does `self.0.clone()`.
185 Ref(self.0.clone(), PhantomData)
186 }
187 }
188
189 // INVARIANTS: Since `B: CopyableByteSlice`, the copied `Ref`'s `.0` has the
190 // same address and length as the original `Ref`'s `.0`. Since the original
191 // upholds the field invariants, so does the copy.
192 impl<B: CopyableByteSlice + Copy, T: ?Sized> Copy for Ref<B, T> {}
193}
194
195#[allow(unreachable_pub)] // This is a false positive on our MSRV toolchain.
196pub use def::Ref;
197
198use crate::pointer::{
199 invariant::{Aligned, BecauseExclusive, Initialized, Unaligned, Valid},
200 BecauseRead, PtrInner,
201};
202
203impl<B, T> Ref<B, T>
204where
205 B: ByteSlice,
206{
207 #[must_use = "has no side effects"]
208 pub(crate) fn sized_from(bytes: B) -> Result<Ref<B, T>, CastError<B, T>> {
209 if bytes.len() != mem::size_of::<T>() {
210 return Err(SizeError::new(bytes).into());
211 }
212 if let Err(err) = util::validate_aligned_to::<_, T>(bytes.deref()) {
213 return Err(err.with_src(bytes).into());
214 }
215
216 // SAFETY: We just validated size and alignment.
217 Ok(unsafe { Ref::new_unchecked(bytes) })
218 }
219}
220
221impl<B, T> Ref<B, T>
222where
223 B: SplitByteSlice,
224{
225 #[must_use = "has no side effects"]
226 pub(crate) fn sized_from_prefix(bytes: B) -> Result<(Ref<B, T>, B), CastError<B, T>> {
227 if bytes.len() < mem::size_of::<T>() {
228 return Err(SizeError::new(bytes).into());
229 }
230 if let Err(err) = util::validate_aligned_to::<_, T>(bytes.deref()) {
231 return Err(err.with_src(bytes).into());
232 }
233 let (bytes, suffix) = bytes.split_at(mem::size_of::<T>()).map_err(
234 #[inline(always)]
235 |b| SizeError::new(b).into(),
236 )?;
237 // SAFETY: We just validated alignment and that `bytes` is at least as
238 // large as `T`. `bytes.split_at(mem::size_of::<T>())?` ensures that the
239 // new `bytes` is exactly the size of `T`. By safety postcondition on
240 // `SplitByteSlice::split_at` we can rely on `split_at` to produce the
241 // correct `bytes` and `suffix`.
242 let r = unsafe { Ref::new_unchecked(bytes) };
243 Ok((r, suffix))
244 }
245
246 #[must_use = "has no side effects"]
247 pub(crate) fn sized_from_suffix(bytes: B) -> Result<(B, Ref<B, T>), CastError<B, T>> {
248 let bytes_len = bytes.len();
249 let split_at = if let Some(split_at) = bytes_len.checked_sub(mem::size_of::<T>()) {
250 split_at
251 } else {
252 return Err(SizeError::new(bytes).into());
253 };
254 let (prefix, bytes) = bytes.split_at(split_at).map_err(|b| SizeError::new(b).into())?;
255 if let Err(err) = util::validate_aligned_to::<_, T>(bytes.deref()) {
256 return Err(err.with_src(bytes).into());
257 }
258 // SAFETY: Since `split_at` is defined as `bytes_len - size_of::<T>()`,
259 // the `bytes` which results from `let (prefix, bytes) =
260 // bytes.split_at(split_at)?` has length `size_of::<T>()`. After
261 // constructing `bytes`, we validate that it has the proper alignment.
262 // By safety postcondition on `SplitByteSlice::split_at` we can rely on
263 // `split_at` to produce the correct `prefix` and `bytes`.
264 let r = unsafe { Ref::new_unchecked(bytes) };
265 Ok((prefix, r))
266 }
267}
268
269impl<B, T> Ref<B, T>
270where
271 B: ByteSlice,
272 T: KnownLayout + Immutable + ?Sized,
273{
274 /// Constructs a `Ref` from a byte slice.
275 ///
276 /// If the length of `source` is not a [valid size of `T`][valid-size], or
277 /// if `source` is not appropriately aligned for `T`, this returns `Err`. If
278 /// [`T: Unaligned`][t-unaligned], you can [infallibly discard the alignment
279 /// error][size-error-from].
280 ///
281 /// `T` may be a sized type, a slice, or a [slice DST][slice-dst].
282 ///
283 /// [valid-size]: crate::KnownLayout#what-is-a-valid-size
284 /// [t-unaligned]: crate::Unaligned
285 /// [size-error-from]: error/struct.SizeError.html#method.from-1
286 /// [slice-dst]: KnownLayout#dynamically-sized-types
287 ///
288 /// # Compile-Time Assertions
289 ///
290 /// This method cannot yet be used on unsized types whose dynamically-sized
291 /// component is zero-sized. Attempting to use this method on such types
292 /// results in a compile-time assertion error; e.g.:
293 ///
294 /// ```compile_fail,E0080
295 /// use zerocopy::*;
296 /// # use zerocopy_derive::*;
297 ///
298 /// #[derive(Immutable, KnownLayout)]
299 /// #[repr(C)]
300 /// struct ZSTy {
301 /// leading_sized: u16,
302 /// trailing_dst: [()],
303 /// }
304 ///
305 /// let _ = Ref::<_, ZSTy>::from_bytes(&b"UU"[..]); // ⚠ Compile Error!
306 /// ```
307 #[must_use = "has no side effects"]
308 #[inline]
309 pub fn from_bytes(source: B) -> Result<Ref<B, T>, CastError<B, T>> {
310 static_assert_dst_is_not_zst!(T);
311 if let Err(e) =
312 Ptr::from_ref(source.deref()).try_cast_into_no_leftover::<T, BecauseImmutable>(None)
313 {
314 return Err(e.with_src(()).with_src(source));
315 }
316 // SAFETY: `try_cast_into_no_leftover` validates size and alignment.
317 Ok(unsafe { Ref::new_unchecked(source) })
318 }
319}
320
321impl<B, T> Ref<B, T>
322where
323 B: SplitByteSlice,
324 T: KnownLayout + Immutable + ?Sized,
325{
326 /// Constructs a `Ref` from the prefix of a byte slice.
327 ///
328 /// This method computes the [largest possible size of `T`][valid-size] that
329 /// can fit in the leading bytes of `source`, then attempts to return both a
330 /// `Ref` to those bytes, and a reference to the remaining bytes. If there
331 /// are insufficient bytes, or if `source` is not appropriately aligned,
332 /// this returns `Err`. If [`T: Unaligned`][t-unaligned], you can
333 /// [infallibly discard the alignment error][size-error-from].
334 ///
335 /// `T` may be a sized type, a slice, or a [slice DST][slice-dst].
336 ///
337 /// [valid-size]: crate::KnownLayout#what-is-a-valid-size
338 /// [t-unaligned]: crate::Unaligned
339 /// [size-error-from]: error/struct.SizeError.html#method.from-1
340 /// [slice-dst]: KnownLayout#dynamically-sized-types
341 ///
342 /// # Compile-Time Assertions
343 ///
344 /// This method cannot yet be used on unsized types whose dynamically-sized
345 /// component is zero-sized. Attempting to use this method on such types
346 /// results in a compile-time assertion error; e.g.:
347 ///
348 /// ```compile_fail,E0080
349 /// use zerocopy::*;
350 /// # use zerocopy_derive::*;
351 ///
352 /// #[derive(Immutable, KnownLayout)]
353 /// #[repr(C)]
354 /// struct ZSTy {
355 /// leading_sized: u16,
356 /// trailing_dst: [()],
357 /// }
358 ///
359 /// let _ = Ref::<_, ZSTy>::from_prefix(&b"UU"[..]); // ⚠ Compile Error!
360 /// ```
361 #[must_use = "has no side effects"]
362 #[inline]
363 pub fn from_prefix(source: B) -> Result<(Ref<B, T>, B), CastError<B, T>> {
364 static_assert_dst_is_not_zst!(T);
365 let remainder = match Ptr::from_ref(source.deref())
366 .try_cast_into::<T, BecauseImmutable>(CastType::Prefix, None)
367 {
368 Ok((_, remainder)) => remainder,
369 Err(e) => {
370 return Err(e.with_src(()).with_src(source));
371 }
372 };
373
374 // SAFETY: `remainder` is constructed as a subset of `source`, and so it
375 // cannot have a larger size than `source`. Both of their `len` methods
376 // measure bytes (`source` deref's to `[u8]`, and `remainder` is a
377 // `Ptr<[u8]>`), so `source.len() >= remainder.len()`. Thus, this cannot
378 // underflow.
379 #[allow(unstable_name_collisions)]
380 let split_at = unsafe { source.len().unchecked_sub(remainder.len()) };
381 let (bytes, suffix) = source.split_at(split_at).map_err(|b| SizeError::new(b).into())?;
382 // SAFETY: `try_cast_into` validates size and alignment, and returns a
383 // `split_at` that indicates how many bytes of `source` correspond to a
384 // valid `T`. By safety postcondition on `SplitByteSlice::split_at` we
385 // can rely on `split_at` to produce the correct `source` and `suffix`.
386 let r = unsafe { Ref::new_unchecked(bytes) };
387 Ok((r, suffix))
388 }
389
390 /// Constructs a `Ref` from the suffix of a byte slice.
391 ///
392 /// This method computes the [largest possible size of `T`][valid-size] that
393 /// can fit in the trailing bytes of `source`, then attempts to return both
394 /// a `Ref` to those bytes, and a reference to the preceding bytes. If there
395 /// are insufficient bytes, or if that suffix of `source` is not
396 /// appropriately aligned, this returns `Err`. If [`T:
397 /// Unaligned`][t-unaligned], you can [infallibly discard the alignment
398 /// error][size-error-from].
399 ///
400 /// `T` may be a sized type, a slice, or a [slice DST][slice-dst].
401 ///
402 /// [valid-size]: crate::KnownLayout#what-is-a-valid-size
403 /// [t-unaligned]: crate::Unaligned
404 /// [size-error-from]: error/struct.SizeError.html#method.from-1
405 /// [slice-dst]: KnownLayout#dynamically-sized-types
406 ///
407 /// # Compile-Time Assertions
408 ///
409 /// This method cannot yet be used on unsized types whose dynamically-sized
410 /// component is zero-sized. Attempting to use this method on such types
411 /// results in a compile-time assertion error; e.g.:
412 ///
413 /// ```compile_fail,E0080
414 /// use zerocopy::*;
415 /// # use zerocopy_derive::*;
416 ///
417 /// #[derive(Immutable, KnownLayout)]
418 /// #[repr(C)]
419 /// struct ZSTy {
420 /// leading_sized: u16,
421 /// trailing_dst: [()],
422 /// }
423 ///
424 /// let _ = Ref::<_, ZSTy>::from_suffix(&b"UU"[..]); // ⚠ Compile Error!
425 /// ```
426 #[must_use = "has no side effects"]
427 #[inline]
428 pub fn from_suffix(source: B) -> Result<(B, Ref<B, T>), CastError<B, T>> {
429 static_assert_dst_is_not_zst!(T);
430 let remainder = match Ptr::from_ref(source.deref())
431 .try_cast_into::<T, BecauseImmutable>(CastType::Suffix, None)
432 {
433 Ok((_, remainder)) => remainder,
434 Err(e) => {
435 let e = e.with_src(());
436 return Err(e.with_src(source));
437 }
438 };
439
440 let split_at = remainder.len();
441 let (prefix, bytes) = source.split_at(split_at).map_err(|b| SizeError::new(b).into())?;
442 // SAFETY: `try_cast_into` validates size and alignment, and returns a
443 // `split_at` that indicates how many bytes of `source` correspond to a
444 // valid `T`. By safety postcondition on `SplitByteSlice::split_at` we
445 // can rely on `split_at` to produce the correct `prefix` and `bytes`.
446 let r = unsafe { Ref::new_unchecked(bytes) };
447 Ok((prefix, r))
448 }
449}
450
451impl<B, T> Ref<B, T>
452where
453 B: ByteSlice,
454 T: KnownLayout<PointerMetadata = usize> + Immutable + ?Sized,
455{
456 /// Constructs a `Ref` from the given bytes with DST length equal to `count`
457 /// without copying.
458 ///
459 /// This method attempts to return a `Ref` to the prefix of `source`
460 /// interpreted as a `T` with `count` trailing elements, and a reference to
461 /// the remaining bytes. If the length of `source` is not equal to the size
462 /// of `Self` with `count` elements, or if `source` is not appropriately
463 /// aligned, this returns `Err`. If [`T: Unaligned`][t-unaligned], you can
464 /// [infallibly discard the alignment error][size-error-from].
465 ///
466 /// [t-unaligned]: crate::Unaligned
467 /// [size-error-from]: error/struct.SizeError.html#method.from-1
468 ///
469 /// # Compile-Time Assertions
470 ///
471 /// This method cannot yet be used on unsized types whose dynamically-sized
472 /// component is zero-sized. Attempting to use this method on such types
473 /// results in a compile-time assertion error; e.g.:
474 ///
475 /// ```compile_fail,E0080
476 /// use zerocopy::*;
477 /// # use zerocopy_derive::*;
478 ///
479 /// #[derive(Immutable, KnownLayout)]
480 /// #[repr(C)]
481 /// struct ZSTy {
482 /// leading_sized: u16,
483 /// trailing_dst: [()],
484 /// }
485 ///
486 /// let _ = Ref::<_, ZSTy>::from_bytes_with_elems(&b"UU"[..], 42); // ⚠ Compile Error!
487 /// ```
488 #[inline]
489 pub fn from_bytes_with_elems(source: B, count: usize) -> Result<Ref<B, T>, CastError<B, T>> {
490 static_assert_dst_is_not_zst!(T);
491 let expected_len = match T::size_for_metadata(count) {
492 Some(len) => len,
493 None => return Err(SizeError::new(source).into()),
494 };
495 if source.len() != expected_len {
496 return Err(SizeError::new(source).into());
497 }
498 Self::from_bytes(source)
499 }
500}
501
502impl<B, T> Ref<B, T>
503where
504 B: SplitByteSlice,
505 T: KnownLayout<PointerMetadata = usize> + Immutable + ?Sized,
506{
507 /// Constructs a `Ref` from the prefix of the given bytes with DST
508 /// length equal to `count` without copying.
509 ///
510 /// This method attempts to return a `Ref` to the prefix of `source`
511 /// interpreted as a `T` with `count` trailing elements, and a reference to
512 /// the remaining bytes. If there are insufficient bytes, or if `source` is
513 /// not appropriately aligned, this returns `Err`. If [`T:
514 /// Unaligned`][t-unaligned], you can [infallibly discard the alignment
515 /// error][size-error-from].
516 ///
517 /// [t-unaligned]: crate::Unaligned
518 /// [size-error-from]: error/struct.SizeError.html#method.from-1
519 ///
520 /// # Compile-Time Assertions
521 ///
522 /// This method cannot yet be used on unsized types whose dynamically-sized
523 /// component is zero-sized. Attempting to use this method on such types
524 /// results in a compile-time assertion error; e.g.:
525 ///
526 /// ```compile_fail,E0080
527 /// use zerocopy::*;
528 /// # use zerocopy_derive::*;
529 ///
530 /// #[derive(Immutable, KnownLayout)]
531 /// #[repr(C)]
532 /// struct ZSTy {
533 /// leading_sized: u16,
534 /// trailing_dst: [()],
535 /// }
536 ///
537 /// let _ = Ref::<_, ZSTy>::from_prefix_with_elems(&b"UU"[..], 42); // ⚠ Compile Error!
538 /// ```
539 #[inline]
540 pub fn from_prefix_with_elems(
541 source: B,
542 count: usize,
543 ) -> Result<(Ref<B, T>, B), CastError<B, T>> {
544 static_assert_dst_is_not_zst!(T);
545 let expected_len = match T::size_for_metadata(count) {
546 Some(len) => len,
547 None => return Err(SizeError::new(source).into()),
548 };
549 let (prefix, bytes) = source.split_at(expected_len).map_err(SizeError::new)?;
550 Self::from_bytes(prefix).map(move |l| (l, bytes))
551 }
552
553 /// Constructs a `Ref` from the suffix of the given bytes with DST length
554 /// equal to `count` without copying.
555 ///
556 /// This method attempts to return a `Ref` to the suffix of `source`
557 /// interpreted as a `T` with `count` trailing elements, and a reference to
558 /// the preceding bytes. If there are insufficient bytes, or if that suffix
559 /// of `source` is not appropriately aligned, this returns `Err`. If [`T:
560 /// Unaligned`][t-unaligned], you can [infallibly discard the alignment
561 /// error][size-error-from].
562 ///
563 /// [t-unaligned]: crate::Unaligned
564 /// [size-error-from]: error/struct.SizeError.html#method.from-1
565 ///
566 /// # Compile-Time Assertions
567 ///
568 /// This method cannot yet be used on unsized types whose dynamically-sized
569 /// component is zero-sized. Attempting to use this method on such types
570 /// results in a compile-time assertion error; e.g.:
571 ///
572 /// ```compile_fail,E0080
573 /// use zerocopy::*;
574 /// # use zerocopy_derive::*;
575 ///
576 /// #[derive(Immutable, KnownLayout)]
577 /// #[repr(C)]
578 /// struct ZSTy {
579 /// leading_sized: u16,
580 /// trailing_dst: [()],
581 /// }
582 ///
583 /// let _ = Ref::<_, ZSTy>::from_suffix_with_elems(&b"UU"[..], 42); // ⚠ Compile Error!
584 /// ```
585 #[inline]
586 pub fn from_suffix_with_elems(
587 source: B,
588 count: usize,
589 ) -> Result<(B, Ref<B, T>), CastError<B, T>> {
590 static_assert_dst_is_not_zst!(T);
591 let expected_len = match T::size_for_metadata(count) {
592 Some(len) => len,
593 None => return Err(SizeError::new(source).into()),
594 };
595 let split_at = if let Some(split_at) = source.len().checked_sub(expected_len) {
596 split_at
597 } else {
598 return Err(SizeError::new(source).into());
599 };
600 // SAFETY: The preceding `source.len().checked_sub(expected_len)`
601 // guarantees that `split_at` is in-bounds.
602 let (bytes, suffix) = unsafe { source.split_at_unchecked(split_at) };
603 Self::from_bytes(suffix).map(move |l| (bytes, l))
604 }
605}
606
607impl<'a, B, T> Ref<B, T>
608where
609 B: 'a + IntoByteSlice<'a>,
610 T: FromBytes + KnownLayout + Immutable + ?Sized,
611{
612 /// Converts this `Ref` into a reference.
613 ///
614 /// `into_ref` consumes the `Ref`, and returns a reference to `T`.
615 ///
616 /// Note: this is an associated function, which means that you have to call
617 /// it as `Ref::into_ref(r)` instead of `r.into_ref()`. This is so that
618 /// there is no conflict with a method on the inner type.
619 #[must_use = "has no side effects"]
620 #[inline(always)]
621 pub fn into_ref(r: Self) -> &'a T {
622 // Presumably unreachable, since we've guarded each constructor of `Ref`.
623 static_assert_dst_is_not_zst!(T);
624
625 // SAFETY: We don't call any methods on `b` other than those provided by
626 // `IntoByteSlice`.
627 let b = unsafe { r.into_byte_slice() };
628 let b = b.into_byte_slice();
629
630 if let crate::layout::SizeInfo::Sized { .. } = T::LAYOUT.size_info {
631 let ptr = Ptr::from_ref(b);
632 // SAFETY: We just checked that `T: Sized`. By invariant on `r`,
633 // `b`'s size is equal to `size_of::<T>()`.
634 let ptr = unsafe { cast_for_sized::<T, _, _, _>(ptr) };
635
636 // SAFETY: None of the preceding transformations modifies the
637 // address of the pointer, and by invariant on `r`, we know that it
638 // is validly-aligned.
639 let ptr = unsafe { ptr.assume_alignment::<Aligned>() };
640 return ptr.as_ref();
641 }
642
643 // PANICS: By post-condition on `into_byte_slice`, `b`'s size and
644 // alignment are valid for `T`. By post-condition, `b.into_byte_slice()`
645 // produces a byte slice with identical address and length to that
646 // produced by `b.deref()`.
647 let ptr = Ptr::from_ref(b.into_byte_slice())
648 .try_cast_into_no_leftover::<T, BecauseImmutable>(None)
649 .expect("zerocopy internal error: into_ref should be infallible");
650 let ptr = ptr.recall_validity();
651 ptr.as_ref()
652 }
653}
654
655impl<'a, B, T> Ref<B, T>
656where
657 B: 'a + IntoByteSliceMut<'a>,
658 T: FromBytes + IntoBytes + KnownLayout + ?Sized,
659{
660 /// Converts this `Ref` into a mutable reference.
661 ///
662 /// `into_mut` consumes the `Ref`, and returns a mutable reference to `T`.
663 ///
664 /// Note: this is an associated function, which means that you have to call
665 /// it as `Ref::into_mut(r)` instead of `r.into_mut()`. This is so that
666 /// there is no conflict with a method on the inner type.
667 #[must_use = "has no side effects"]
668 #[inline(always)]
669 pub fn into_mut(r: Self) -> &'a mut T {
670 // Presumably unreachable, since we've guarded each constructor of `Ref`.
671 static_assert_dst_is_not_zst!(T);
672
673 // SAFETY: We don't call any methods on `b` other than those provided by
674 // `IntoByteSliceMut`.
675 let b = unsafe { r.into_byte_slice_mut() };
676 let b = b.into_byte_slice_mut();
677
678 if let crate::layout::SizeInfo::Sized { .. } = T::LAYOUT.size_info {
679 let ptr = Ptr::from_mut(b);
680 // SAFETY: We just checked that `T: Sized`. By invariant on `r`,
681 // `b`'s size is equal to `size_of::<T>()`.
682 let ptr = unsafe {
683 cast_for_sized::<
684 T,
685 _,
686 (BecauseRead, BecauseExclusive),
687 (BecauseMutationCompatible, BecauseInvariantsEq),
688 >(ptr)
689 };
690
691 // SAFETY: None of the preceding transformations modifies the
692 // address of the pointer, and by invariant on `r`, we know that it
693 // is validly-aligned.
694 let ptr = unsafe { ptr.assume_alignment::<Aligned>() };
695 return ptr.as_mut();
696 }
697
698 // PANICS: By post-condition on `into_byte_slice_mut`, `b`'s size and
699 // alignment are valid for `T`. By post-condition,
700 // `b.into_byte_slice_mut()` produces a byte slice with identical
701 // address and length to that produced by `b.deref_mut()`.
702 let ptr = Ptr::from_mut(b.into_byte_slice_mut())
703 .try_cast_into_no_leftover::<T, BecauseExclusive>(None)
704 .expect("zerocopy internal error: into_ref should be infallible");
705 let ptr = ptr.recall_validity::<_, (_, (_, _))>();
706 ptr.as_mut()
707 }
708}
709
710impl<B, T> Ref<B, T>
711where
712 B: ByteSlice,
713 T: ?Sized,
714{
715 /// Gets the underlying bytes.
716 ///
717 /// Note: this is an associated function, which means that you have to call
718 /// it as `Ref::bytes(r)` instead of `r.bytes()`. This is so that there is
719 /// no conflict with a method on the inner type.
720 #[inline]
721 pub fn bytes(r: &Self) -> &[u8] {
722 // SAFETY: We don't call any methods on `b` other than those provided by
723 // `ByteSlice`.
724 unsafe { r.as_byte_slice().deref() }
725 }
726}
727
728impl<B, T> Ref<B, T>
729where
730 B: ByteSliceMut,
731 T: ?Sized,
732{
733 /// Gets the underlying bytes mutably.
734 ///
735 /// Note: this is an associated function, which means that you have to call
736 /// it as `Ref::bytes_mut(r)` instead of `r.bytes_mut()`. This is so that
737 /// there is no conflict with a method on the inner type.
738 #[inline]
739 pub fn bytes_mut(r: &mut Self) -> &mut [u8] {
740 // SAFETY: We don't call any methods on `b` other than those provided by
741 // `ByteSliceMut`.
742 unsafe { r.as_byte_slice_mut().deref_mut() }
743 }
744}
745
746impl<B, T> Ref<B, T>
747where
748 B: ByteSlice,
749 T: FromBytes,
750{
751 /// Reads a copy of `T`.
752 ///
753 /// Note: this is an associated function, which means that you have to call
754 /// it as `Ref::read(r)` instead of `r.read()`. This is so that there is no
755 /// conflict with a method on the inner type.
756 #[must_use = "has no side effects"]
757 #[inline]
758 pub fn read(r: &Self) -> T {
759 // SAFETY: We don't call any methods on `b` other than those provided by
760 // `ByteSlice`.
761 let b = unsafe { r.as_byte_slice() };
762
763 // SAFETY: By postcondition on `as_byte_slice`, we know that `b` is a
764 // valid size and alignment for `T`. By safety invariant on `ByteSlice`,
765 // we know that this is preserved via `.deref()`. Because `T:
766 // FromBytes`, it is sound to interpret these bytes as a `T`.
767 unsafe { ptr::read(b.deref().as_ptr().cast::<T>()) }
768 }
769}
770
771impl<B, T> Ref<B, T>
772where
773 B: ByteSliceMut,
774 T: IntoBytes,
775{
776 /// Writes the bytes of `t` and then forgets `t`.
777 ///
778 /// Note: this is an associated function, which means that you have to call
779 /// it as `Ref::write(r, t)` instead of `r.write(t)`. This is so that there
780 /// is no conflict with a method on the inner type.
781 #[inline]
782 pub fn write(r: &mut Self, t: T) {
783 // SAFETY: We don't call any methods on `b` other than those provided by
784 // `ByteSliceMut`.
785 let b = unsafe { r.as_byte_slice_mut() };
786
787 // SAFETY: By postcondition on `as_byte_slice_mut`, we know that `b` is
788 // a valid size and alignment for `T`. By safety invariant on
789 // `ByteSlice`, we know that this is preserved via `.deref()`. Writing
790 // `t` to the buffer will allow all of the bytes of `t` to be accessed
791 // as a `[u8]`, but because `T: IntoBytes`, we know that this is sound.
792 unsafe { ptr::write(b.deref_mut().as_mut_ptr().cast::<T>(), t) }
793 }
794}
795
796impl<B, T> Deref for Ref<B, T>
797where
798 B: ByteSlice,
799 T: FromBytes + KnownLayout + Immutable + ?Sized,
800{
801 type Target = T;
802 #[inline]
803 fn deref(&self) -> &T {
804 // Presumably unreachable, since we've guarded each constructor of `Ref`.
805 static_assert_dst_is_not_zst!(T);
806
807 // SAFETY: We don't call any methods on `b` other than those provided by
808 // `ByteSlice`.
809 let b = unsafe { self.as_byte_slice() };
810 let b = b.deref();
811
812 if let crate::layout::SizeInfo::Sized { .. } = T::LAYOUT.size_info {
813 let ptr = Ptr::from_ref(b);
814 // SAFETY: We just checked that `T: Sized`. By invariant on `r`,
815 // `b`'s size is equal to `size_of::<T>()`.
816 let ptr = unsafe { cast_for_sized::<T, _, _, _>(ptr) };
817
818 // SAFETY: None of the preceding transformations modifies the
819 // address of the pointer, and by invariant on `r`, we know that it
820 // is validly-aligned.
821 let ptr = unsafe { ptr.assume_alignment::<Aligned>() };
822 return ptr.as_ref();
823 }
824
825 // PANICS: By postcondition on `as_byte_slice`, `b`'s size and alignment
826 // are valid for `T`, and by invariant on `ByteSlice`, these are
827 // preserved through `.deref()`, so this `unwrap` will not panic.
828 let ptr = Ptr::from_ref(b)
829 .try_cast_into_no_leftover::<T, BecauseImmutable>(None)
830 .expect("zerocopy internal error: Deref::deref should be infallible");
831 let ptr = ptr.recall_validity();
832 ptr.as_ref()
833 }
834}
835
836impl<B, T> DerefMut for Ref<B, T>
837where
838 B: ByteSliceMut,
839 // FIXME(#251): We can't remove `Immutable` here because it's required by
840 // the impl of `Deref`, which is a super-trait of `DerefMut`. Maybe we can
841 // add a separate inherent method for this?
842 T: FromBytes + IntoBytes + KnownLayout + Immutable + ?Sized,
843{
844 #[inline]
845 fn deref_mut(&mut self) -> &mut T {
846 // Presumably unreachable, since we've guarded each constructor of `Ref`.
847 static_assert_dst_is_not_zst!(T);
848
849 // SAFETY: We don't call any methods on `b` other than those provided by
850 // `ByteSliceMut`.
851 let b = unsafe { self.as_byte_slice_mut() };
852 let b = b.deref_mut();
853
854 if let crate::layout::SizeInfo::Sized { .. } = T::LAYOUT.size_info {
855 let ptr = Ptr::from_mut(b);
856 // SAFETY: We just checked that `T: Sized`. By invariant on `r`,
857 // `b`'s size is equal to `size_of::<T>()`.
858 let ptr = unsafe {
859 cast_for_sized::<
860 T,
861 _,
862 (BecauseRead, BecauseExclusive),
863 (BecauseMutationCompatible, BecauseInvariantsEq),
864 >(ptr)
865 };
866
867 // SAFETY: None of the preceding transformations modifies the
868 // address of the pointer, and by invariant on `r`, we know that it
869 // is validly-aligned.
870 let ptr = unsafe { ptr.assume_alignment::<Aligned>() };
871 return ptr.as_mut();
872 }
873
874 // PANICS: By postcondition on `as_byte_slice_mut`, `b`'s size and
875 // alignment are valid for `T`, and by invariant on `ByteSlice`, these
876 // are preserved through `.deref_mut()`, so this `unwrap` will not
877 // panic.
878 let ptr = Ptr::from_mut(b)
879 .try_cast_into_no_leftover::<T, BecauseExclusive>(None)
880 .expect("zerocopy internal error: DerefMut::deref_mut should be infallible");
881 let ptr = ptr.recall_validity::<_, (_, (_, BecauseExclusive))>();
882 ptr.as_mut()
883 }
884}
885
886impl<T, B> Display for Ref<B, T>
887where
888 B: ByteSlice,
889 T: FromBytes + Display + KnownLayout + Immutable + ?Sized,
890{
891 #[inline]
892 fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
893 let inner: &T = self;
894 inner.fmt(fmt)
895 }
896}
897
898impl<T, B> Debug for Ref<B, T>
899where
900 B: ByteSlice,
901 T: FromBytes + Debug + KnownLayout + Immutable + ?Sized,
902{
903 #[inline]
904 fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
905 let inner: &T = self;
906 fmt.debug_tuple("Ref").field(&inner).finish()
907 }
908}
909
910impl<T, B> Eq for Ref<B, T>
911where
912 B: ByteSlice,
913 T: FromBytes + Eq + KnownLayout + Immutable + ?Sized,
914{
915}
916
917impl<T, B> PartialEq for Ref<B, T>
918where
919 B: ByteSlice,
920 T: FromBytes + PartialEq + KnownLayout + Immutable + ?Sized,
921{
922 #[inline]
923 fn eq(&self, other: &Self) -> bool {
924 self.deref().eq(other.deref())
925 }
926}
927
928impl<T, B> Ord for Ref<B, T>
929where
930 B: ByteSlice,
931 T: FromBytes + Ord + KnownLayout + Immutable + ?Sized,
932{
933 #[inline]
934 fn cmp(&self, other: &Self) -> Ordering {
935 let inner: &T = self;
936 let other_inner: &T = other;
937 inner.cmp(other_inner)
938 }
939}
940
941impl<T, B> PartialOrd for Ref<B, T>
942where
943 B: ByteSlice,
944 T: FromBytes + PartialOrd + KnownLayout + Immutable + ?Sized,
945{
946 #[inline]
947 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
948 let inner: &T = self;
949 let other_inner: &T = other;
950 inner.partial_cmp(other_inner)
951 }
952}
953
954/// # Safety
955///
956/// `T: Sized` and `ptr`'s referent must have size `size_of::<T>()`.
957#[inline(always)]
958unsafe fn cast_for_sized<'a, T, A, R, S>(
959 ptr: Ptr<'a, [u8], (A, Aligned, Valid)>,
960) -> Ptr<'a, T, (A, Unaligned, Valid)>
961where
962 T: FromBytes + KnownLayout + ?Sized,
963 A: crate::invariant::Aliasing,
964 [u8]: MutationCompatible<T, A, Initialized, Initialized, R>,
965 T: TransmuteFromPtr<T, A, Initialized, Valid, crate::pointer::cast::IdCast, S>,
966{
967 use crate::pointer::cast::{Cast, Project};
968
969 enum CastForSized {}
970
971 // SAFETY: `CastForSized` is only used below with the input `ptr`, which the
972 // caller promises has size `size_of::<T>()`. Thus, the referent produced in
973 // this cast has the same size as `ptr`'s referent. All operations preserve
974 // provenance.
975 unsafe impl<T: ?Sized + KnownLayout> Project<[u8], T> for CastForSized {
976 #[inline(always)]
977 fn project(src: PtrInner<'_, [u8]>) -> *mut T {
978 T::raw_from_ptr_len(
979 src.as_non_null().cast(),
980 <T::PointerMetadata as crate::PointerMetadata>::from_elem_count(0),
981 )
982 .as_ptr()
983 }
984 }
985
986 // SAFETY: The `Project::project` impl preserves referent address.
987 unsafe impl<T: ?Sized + KnownLayout> Cast<[u8], T> for CastForSized {}
988
989 ptr.recall_validity::<Initialized, (_, (_, _))>()
990 .cast::<_, CastForSized, _>()
991 .recall_validity::<Valid, _>()
992}
993
994#[cfg(test)]
995#[allow(clippy::assertions_on_result_states)]
996mod tests {
997 use core::convert::TryInto as _;
998
999 use super::*;
1000 use crate::util::testutil::*;
1001
1002 #[test]
1003 fn test_mut_slice_into_ref() {
1004 // Prior to #1260/#1299, calling `into_ref` on a `&mut [u8]`-backed
1005 // `Ref` was not supported.
1006 let mut buf = [0u8];
1007 let r = Ref::<&mut [u8], u8>::from_bytes(&mut buf).unwrap();
1008 assert_eq!(Ref::into_ref(r), &0);
1009 }
1010
1011 #[test]
1012 fn test_address() {
1013 // Test that the `Deref` and `DerefMut` implementations return a
1014 // reference which points to the right region of memory.
1015
1016 let buf = [0];
1017 let r = Ref::<_, u8>::from_bytes(&buf[..]).unwrap();
1018 let buf_ptr = buf.as_ptr();
1019 let deref_ptr: *const u8 = r.deref();
1020 assert_eq!(buf_ptr, deref_ptr);
1021
1022 let buf = [0];
1023 let r = Ref::<_, [u8]>::from_bytes(&buf[..]).unwrap();
1024 let buf_ptr = buf.as_ptr();
1025 let deref_ptr = r.deref().as_ptr();
1026 assert_eq!(buf_ptr, deref_ptr);
1027 }
1028
1029 // Verify that values written to a `Ref` are properly shared between the
1030 // typed and untyped representations, that reads via `deref` and `read`
1031 // behave the same, and that writes via `deref_mut` and `write` behave the
1032 // same.
1033 fn test_new_helper(mut r: Ref<&mut [u8], AU64>) {
1034 // assert that the value starts at 0
1035 assert_eq!(*r, AU64(0));
1036 assert_eq!(Ref::read(&r), AU64(0));
1037
1038 // Assert that values written to the typed value are reflected in the
1039 // byte slice.
1040 const VAL1: AU64 = AU64(0xFF00FF00FF00FF00);
1041 *r = VAL1;
1042 assert_eq!(Ref::bytes(&r), &VAL1.to_bytes());
1043 *r = AU64(0);
1044 Ref::write(&mut r, VAL1);
1045 assert_eq!(Ref::bytes(&r), &VAL1.to_bytes());
1046
1047 // Assert that values written to the byte slice are reflected in the
1048 // typed value.
1049 const VAL2: AU64 = AU64(!VAL1.0); // different from `VAL1`
1050 Ref::bytes_mut(&mut r).copy_from_slice(&VAL2.to_bytes()[..]);
1051 assert_eq!(*r, VAL2);
1052 assert_eq!(Ref::read(&r), VAL2);
1053 }
1054
1055 // Verify that values written to a `Ref` are properly shared between the
1056 // typed and untyped representations; pass a value with `typed_len` `AU64`s
1057 // backed by an array of `typed_len * 8` bytes.
1058 fn test_new_helper_slice(mut r: Ref<&mut [u8], [AU64]>, typed_len: usize) {
1059 // Assert that the value starts out zeroed.
1060 assert_eq!(&*r, vec![AU64(0); typed_len].as_slice());
1061
1062 // Check the backing storage is the exact same slice.
1063 let untyped_len = typed_len * 8;
1064 assert_eq!(Ref::bytes(&r).len(), untyped_len);
1065 assert_eq!(Ref::bytes(&r).as_ptr(), r.as_ptr().cast::<u8>());
1066
1067 // Assert that values written to the typed value are reflected in the
1068 // byte slice.
1069 const VAL1: AU64 = AU64(0xFF00FF00FF00FF00);
1070 for typed in &mut *r {
1071 *typed = VAL1;
1072 }
1073 assert_eq!(Ref::bytes(&r), VAL1.0.to_ne_bytes().repeat(typed_len).as_slice());
1074
1075 // Assert that values written to the byte slice are reflected in the
1076 // typed value.
1077 const VAL2: AU64 = AU64(!VAL1.0); // different from VAL1
1078 Ref::bytes_mut(&mut r).copy_from_slice(&VAL2.0.to_ne_bytes().repeat(typed_len));
1079 assert!(r.iter().copied().all(|x| x == VAL2));
1080 }
1081
1082 #[test]
1083 fn test_new_aligned_sized() {
1084 // Test that a properly-aligned, properly-sized buffer works for new,
1085 // new_from_prefix, and new_from_suffix, and that new_from_prefix and
1086 // new_from_suffix return empty slices. Test that a properly-aligned
1087 // buffer whose length is a multiple of the element size works for
1088 // new_slice.
1089
1090 // A buffer with an alignment of 8.
1091 let mut buf = Align::<[u8; 8], AU64>::default();
1092 // `buf.t` should be aligned to 8, so this should always succeed.
1093 test_new_helper(Ref::<_, AU64>::from_bytes(&mut buf.t[..]).unwrap());
1094 {
1095 // In a block so that `r` and `suffix` don't live too long.
1096 buf.set_default();
1097 let (r, suffix) = Ref::<_, AU64>::from_prefix(&mut buf.t[..]).unwrap();
1098 assert!(suffix.is_empty());
1099 test_new_helper(r);
1100 }
1101 {
1102 buf.set_default();
1103 let (prefix, r) = Ref::<_, AU64>::from_suffix(&mut buf.t[..]).unwrap();
1104 assert!(prefix.is_empty());
1105 test_new_helper(r);
1106 }
1107
1108 // A buffer with alignment 8 and length 24. We choose this length very
1109 // intentionally: if we instead used length 16, then the prefix and
1110 // suffix lengths would be identical. In the past, we used length 16,
1111 // which resulted in this test failing to discover the bug uncovered in
1112 // #506.
1113 let mut buf = Align::<[u8; 24], AU64>::default();
1114 // `buf.t` should be aligned to 8 and have a length which is a multiple
1115 // of `size_of::<AU64>()`, so this should always succeed.
1116 test_new_helper_slice(Ref::<_, [AU64]>::from_bytes(&mut buf.t[..]).unwrap(), 3);
1117 buf.set_default();
1118 let r = Ref::<_, [AU64]>::from_bytes_with_elems(&mut buf.t[..], 3).unwrap();
1119 test_new_helper_slice(r, 3);
1120
1121 let ascending: [u8; 24] = (0..24).collect::<Vec<_>>().try_into().unwrap();
1122 // 16 ascending bytes followed by 8 zeros.
1123 let mut ascending_prefix = ascending;
1124 ascending_prefix[16..].copy_from_slice(&[0, 0, 0, 0, 0, 0, 0, 0]);
1125 // 8 zeros followed by 16 ascending bytes.
1126 let mut ascending_suffix = ascending;
1127 ascending_suffix[..8].copy_from_slice(&[0, 0, 0, 0, 0, 0, 0, 0]);
1128 {
1129 buf.t = ascending_suffix;
1130 let (r, suffix) = Ref::<_, [AU64]>::from_prefix_with_elems(&mut buf.t[..], 1).unwrap();
1131 assert_eq!(suffix, &ascending[8..]);
1132 test_new_helper_slice(r, 1);
1133 }
1134 {
1135 buf.t = ascending_prefix;
1136 let (prefix, r) = Ref::<_, [AU64]>::from_suffix_with_elems(&mut buf.t[..], 1).unwrap();
1137 assert_eq!(prefix, &ascending[..16]);
1138 test_new_helper_slice(r, 1);
1139 }
1140 }
1141
1142 #[test]
1143 fn test_new_oversized() {
1144 // Test that a properly-aligned, overly-sized buffer works for
1145 // `new_from_prefix` and `new_from_suffix`, and that they return the
1146 // remainder and prefix of the slice respectively.
1147
1148 let mut buf = Align::<[u8; 16], AU64>::default();
1149 {
1150 // In a block so that `r` and `suffix` don't live too long. `buf.t`
1151 // should be aligned to 8, so this should always succeed.
1152 let (r, suffix) = Ref::<_, AU64>::from_prefix(&mut buf.t[..]).unwrap();
1153 assert_eq!(suffix.len(), 8);
1154 test_new_helper(r);
1155 }
1156 {
1157 buf.set_default();
1158 // `buf.t` should be aligned to 8, so this should always succeed.
1159 let (prefix, r) = Ref::<_, AU64>::from_suffix(&mut buf.t[..]).unwrap();
1160 assert_eq!(prefix.len(), 8);
1161 test_new_helper(r);
1162 }
1163 }
1164
1165 #[test]
1166 #[allow(clippy::cognitive_complexity)]
1167 fn test_new_error() {
1168 // Fail because the buffer is too large.
1169
1170 // A buffer with an alignment of 8.
1171 let buf = Align::<[u8; 16], AU64>::default();
1172 // `buf.t` should be aligned to 8, so only the length check should fail.
1173 assert!(Ref::<_, AU64>::from_bytes(&buf.t[..]).is_err());
1174
1175 // Fail because the buffer is too small.
1176
1177 // A buffer with an alignment of 8.
1178 let buf = Align::<[u8; 4], AU64>::default();
1179 // `buf.t` should be aligned to 8, so only the length check should fail.
1180 assert!(Ref::<_, AU64>::from_bytes(&buf.t[..]).is_err());
1181 assert!(Ref::<_, AU64>::from_prefix(&buf.t[..]).is_err());
1182 assert!(Ref::<_, AU64>::from_suffix(&buf.t[..]).is_err());
1183
1184 // Fail because the length is not a multiple of the element size.
1185
1186 let buf = Align::<[u8; 12], AU64>::default();
1187 // `buf.t` has length 12, but element size is 8.
1188 assert!(Ref::<_, [AU64]>::from_bytes(&buf.t[..]).is_err());
1189
1190 // Fail because the buffer is too short.
1191 let buf = Align::<[u8; 12], AU64>::default();
1192 // `buf.t` has length 12, but the element size is 8 (and we're expecting
1193 // two of them). For each function, we test with a length that would
1194 // cause the size to overflow `usize`, and with a normal length that
1195 // will fail thanks to the buffer being too short; these are different
1196 // error paths, and while the error types are the same, the distinction
1197 // shows up in code coverage metrics.
1198 let n = (usize::MAX / mem::size_of::<AU64>()) + 1;
1199 assert!(Ref::<_, [AU64]>::from_bytes_with_elems(&buf.t[..], n).is_err());
1200 assert!(Ref::<_, [AU64]>::from_bytes_with_elems(&buf.t[..], 2).is_err());
1201 assert!(Ref::<_, [AU64]>::from_prefix_with_elems(&buf.t[..], n).is_err());
1202 assert!(Ref::<_, [AU64]>::from_prefix_with_elems(&buf.t[..], 2).is_err());
1203 assert!(Ref::<_, [AU64]>::from_suffix_with_elems(&buf.t[..], n).is_err());
1204 assert!(Ref::<_, [AU64]>::from_suffix_with_elems(&buf.t[..], 2).is_err());
1205
1206 // Fail because the alignment is insufficient.
1207
1208 // A buffer with an alignment of 8. An odd buffer size is chosen so that
1209 // the last byte of the buffer has odd alignment.
1210 let buf = Align::<[u8; 13], AU64>::default();
1211 // Slicing from 1, we get a buffer with size 12 (so the length check
1212 // should succeed) but an alignment of only 1, which is insufficient.
1213 assert!(Ref::<_, AU64>::from_bytes(&buf.t[1..]).is_err());
1214 assert!(Ref::<_, AU64>::from_prefix(&buf.t[1..]).is_err());
1215 assert!(Ref::<_, [AU64]>::from_bytes(&buf.t[1..]).is_err());
1216 assert!(Ref::<_, [AU64]>::from_bytes_with_elems(&buf.t[1..], 1).is_err());
1217 assert!(Ref::<_, [AU64]>::from_prefix_with_elems(&buf.t[1..], 1).is_err());
1218 assert!(Ref::<_, [AU64]>::from_suffix_with_elems(&buf.t[1..], 1).is_err());
1219 // Slicing is unnecessary here because `new_from_suffix` uses the suffix
1220 // of the slice, which has odd alignment.
1221 assert!(Ref::<_, AU64>::from_suffix(&buf.t[..]).is_err());
1222
1223 // Fail due to arithmetic overflow.
1224
1225 let buf = Align::<[u8; 16], AU64>::default();
1226 let unreasonable_len = usize::MAX / mem::size_of::<AU64>() + 1;
1227 assert!(Ref::<_, [AU64]>::from_prefix_with_elems(&buf.t[..], unreasonable_len).is_err());
1228 assert!(Ref::<_, [AU64]>::from_suffix_with_elems(&buf.t[..], unreasonable_len).is_err());
1229 }
1230
1231 #[test]
1232 #[allow(unstable_name_collisions)]
1233 #[allow(clippy::as_conversions)]
1234 fn test_into_ref_mut() {
1235 #[allow(unused)]
1236 use crate::util::AsAddress as _;
1237
1238 let mut buf = Align::<[u8; 8], u64>::default();
1239 let r = Ref::<_, u64>::from_bytes(&buf.t[..]).unwrap();
1240 let rf = Ref::into_ref(r);
1241 assert_eq!(rf, &0u64);
1242 let buf_addr = (&buf.t as *const [u8; 8]).addr();
1243 assert_eq!((rf as *const u64).addr(), buf_addr);
1244
1245 let r = Ref::<_, u64>::from_bytes(&mut buf.t[..]).unwrap();
1246 let rf = Ref::into_mut(r);
1247 assert_eq!(rf, &mut 0u64);
1248 assert_eq!((rf as *mut u64).addr(), buf_addr);
1249
1250 *rf = u64::MAX;
1251 assert_eq!(buf.t, [0xFF; 8]);
1252 }
1253
1254 #[test]
1255 fn test_display_debug() {
1256 let buf = Align::<[u8; 8], u64>::default();
1257 let r = Ref::<_, u64>::from_bytes(&buf.t[..]).unwrap();
1258 assert_eq!(format!("{}", r), "0");
1259 assert_eq!(format!("{:?}", r), "Ref(0)");
1260
1261 let buf = Align::<[u8; 8], u64>::default();
1262 let r = Ref::<_, [u64]>::from_bytes(&buf.t[..]).unwrap();
1263 assert_eq!(format!("{:?}", r), "Ref([0])");
1264 }
1265
1266 #[test]
1267 fn test_eq() {
1268 let buf1 = 0_u64;
1269 let r1 = Ref::<_, u64>::from_bytes(buf1.as_bytes()).unwrap();
1270 let buf2 = 0_u64;
1271 let r2 = Ref::<_, u64>::from_bytes(buf2.as_bytes()).unwrap();
1272 assert_eq!(r1, r2);
1273 }
1274
1275 #[test]
1276 fn test_ne() {
1277 let buf1 = 0_u64;
1278 let r1 = Ref::<_, u64>::from_bytes(buf1.as_bytes()).unwrap();
1279 let buf2 = 1_u64;
1280 let r2 = Ref::<_, u64>::from_bytes(buf2.as_bytes()).unwrap();
1281 assert_ne!(r1, r2);
1282 }
1283
1284 #[test]
1285 fn test_ord() {
1286 let buf1 = 0_u64;
1287 let r1 = Ref::<_, u64>::from_bytes(buf1.as_bytes()).unwrap();
1288 let buf2 = 1_u64;
1289 let r2 = Ref::<_, u64>::from_bytes(buf2.as_bytes()).unwrap();
1290 assert!(r1 < r2);
1291 assert_eq!(PartialOrd::partial_cmp(&r1, &r2), Some(Ordering::Less));
1292 assert_eq!(Ord::cmp(&r1, &r2), Ordering::Less);
1293 }
1294}
1295
1296#[cfg(all(test, __ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS))]
1297mod benches {
1298 use test::{self, Bencher};
1299
1300 use super::*;
1301 use crate::util::testutil::*;
1302
1303 #[bench]
1304 fn bench_from_bytes_sized(b: &mut Bencher) {
1305 let buf = Align::<[u8; 8], AU64>::default();
1306 // `buf.t` should be aligned to 8, so this should always succeed.
1307 let bytes = &buf.t[..];
1308 b.iter(|| test::black_box(Ref::<_, AU64>::from_bytes(test::black_box(bytes)).unwrap()));
1309 }
1310
1311 #[bench]
1312 fn bench_into_ref_sized(b: &mut Bencher) {
1313 let buf = Align::<[u8; 8], AU64>::default();
1314 let bytes = &buf.t[..];
1315 let r = Ref::<_, AU64>::from_bytes(bytes).unwrap();
1316 b.iter(|| test::black_box(Ref::into_ref(test::black_box(r))));
1317 }
1318
1319 #[bench]
1320 fn bench_into_mut_sized(b: &mut Bencher) {
1321 let mut buf = Align::<[u8; 8], AU64>::default();
1322 let buf = &mut buf.t[..];
1323 let _ = Ref::<_, AU64>::from_bytes(&mut *buf).unwrap();
1324 b.iter(move || {
1325 // SAFETY: The preceding `from_bytes` succeeded, and so we know that
1326 // `buf` is validly-aligned and has the correct length.
1327 let r = unsafe { Ref::<&mut [u8], AU64>::new_unchecked(&mut *buf) };
1328 test::black_box(Ref::into_mut(test::black_box(r)));
1329 });
1330 }
1331
1332 #[bench]
1333 fn bench_deref_sized(b: &mut Bencher) {
1334 let buf = Align::<[u8; 8], AU64>::default();
1335 let bytes = &buf.t[..];
1336 let r = Ref::<_, AU64>::from_bytes(bytes).unwrap();
1337 b.iter(|| {
1338 let temp = test::black_box(r);
1339 test::black_box(temp.deref());
1340 });
1341 }
1342
1343 #[bench]
1344 fn bench_deref_mut_sized(b: &mut Bencher) {
1345 let mut buf = Align::<[u8; 8], AU64>::default();
1346 let buf = &mut buf.t[..];
1347 let _ = Ref::<_, AU64>::from_bytes(&mut *buf).unwrap();
1348 b.iter(|| {
1349 // SAFETY: The preceding `from_bytes` succeeded, and so we know that
1350 // `buf` is validly-aligned and has the correct length.
1351 let r = unsafe { Ref::<&mut [u8], AU64>::new_unchecked(&mut *buf) };
1352 let mut temp = test::black_box(r);
1353 test::black_box(temp.deref_mut());
1354 });
1355 }
1356}