kernel/utilities/
dma_slice.rs

1// Licensed under the Apache License, Version 2.0 or the MIT License.
2// SPDX-License-Identifier: Apache-2.0 OR MIT
3// Copyright Leon Schuermann <leon@is.currently.online> 2026.
4// Copyright Tock Contributors 2026.
5
6//! Mechanism for sharing buffers with DMA peripherals.
7//!
8//! When implementing a chip peripheral driver using DMA, the driver must be
9//! careful to not introduce any undefined behavior. This module provides
10//! `DmaSlice` types, which drivers can use when passing a buffer to the DMA
11//! hardware. When used correctly, these types ensure that Rust's memory requirements
12//! are preserved when hardware is accessing memory in a way the Rust compiler
13//! cannot reason about.
14//!
15//! Tock provides multiple implementations of `DmaSlice` depending on the
16//! needs of the user. These include:
17//!
18//! - [`DmaSlice`]: For immutable buffers and read-only DMA operations.
19//! - [`DmaSliceMut`]: For mutable buffers and writable DMA operations.
20//! - [`DmaSubSlice`]: For immutable [`SubSlice`]s and read-only DMA
21//!   operations.
22//! - [`DmaSubSliceMut`]: For mutable [`SubSliceMut`]s and writable DMA
23//!   operations.
24//!
25//! Internally, all implementations of `DmaSlice` use an architecture or
26//! chip-provided implementation of [`DmaFence`] to ensure that the
27//! Rust compiler cannot assume the memory passed to the DMA hardware is not
28//! modified.
29//!
30//! Conceptually, a `DmaSlice` consumes a memory buffer. Once consumed, a
31//! pointer to that memory can then be safely provided to DMA hardware. When the
32//! buffer is consumed, the `DmaSlice` prevents the Rust compiler from making
33//! assumptions about the state of the memory accessed by DMA hardware that
34//! would be incorrect and introduce undefined behavior. Once the DMA operation
35//! finishes, the buffer must be extracted from the `DmaSlice`. Before
36//! extracting the buffer, the user must guarantee that the DMA hardware can no
37//! longer access the memory.
38//!
39//! # Usage
40//!
41//! This example shows how a developer might use the `DmaSlice` infrastructure
42//! in a driver. The [`DmaSliceMut::new`] operation can be replaced by the safe
43//! [`DmaSliceMut::new_static`] alternative if the provided slice reference has
44//! a `'static` lifetime.
45//!
46//! ```
47//! # use core::cell::Cell;
48//! # use kernel::platform::dma_fence::DmaFence;
49//! # use kernel::utilities::dma_slice::DmaSliceMut;
50//! #
51//! # #[derive(Debug, Copy, Clone)]
52//! # struct SomeDmaFence;
53//! # unsafe impl DmaFence for SomeDmaFence {
54//! #     fn release<T>(self, _buf: *mut [T]) {}
55//! #     fn acquire<T>(self, _buf: *mut [T]) {}
56//! # }
57//! #
58//! # enum DmaOp {
59//! #     Stop,
60//! # }
61//! #
62//! # struct Registers {
63//! #     dma_ptr: Cell<*mut u8>,
64//! #     dma_ctrl: Cell<DmaOp>,
65//! # }
66//! #
67//! # let regs = Registers {
68//! #     dma_ptr: Cell::new(core::ptr::null_mut()),
69//! #     dma_ctrl: Cell::new(DmaOp::Stop),
70//! # };
71//! #
72//! // Buffer that will be used by the DMA hardware.
73//! let mut buffer: [u8; 16] = [0_u8; 16];
74//!
75//! // Create the `DmaSlice` that can be provided to the DMA hardware.
76//! //
77//! // For a static slice, users can instead use the safe `new_static`
78//! // constructor.
79//! let dma_slice = unsafe { DmaSliceMut::new(&mut buffer, SomeDmaFence) };
80//!
81//! // Provide the pointer to the buffer to the DMA hardware registers.
82//! regs.dma_ptr.set(dma_slice.as_mut_ptr());
83//!
84//! // Wait for the DMA operation to finish...
85//!
86//! // Disable the DMA engine to ensure it cannot access the buffer.
87//! regs.dma_ctrl.set(DmaOp::Stop);
88//!
89//! // Extract the buffer to retrieve the Rust slice.
90//! let buffer = unsafe { dma_slice.take(SomeDmaFence) };
91//! ```
92
93use core::marker::PhantomData;
94use core::ops::Range;
95use core::ptr::{self, NonNull};
96
97use super::leasable_buffer::{SubSlice, SubSliceMut, SubSliceMutImmut};
98use crate::platform::dma_fence::DmaFence;
99
100/// An immutable buffer that can be safely used for read-only DMA operations.
101///
102/// The buffer can be a slice of any type which implements
103/// [`ImmutableFromIntoBytes`](immutable_from_into_bytes::ImmutableFromIntoBytes),
104/// such as `u8` or `u32`.
105///
106/// [`DmaSlice`] wraps an immutable slice. As such, its contents MUST NOT be
107/// modified by the DMA operation. For a DMA operation that may write to the
108/// supplied buffer, use [`DmaSliceMut`] instead.
109///
110/// # Use with DMA
111///
112/// Creating a [`DmaSlice`] over an immutable Rust slice ensures that all prior
113/// Rust writes to this slice are observable by any DMA operations initiated
114/// through an MMIO write operation, where that MMIO write is performed after
115/// constructing the [`DmaSlice`].
116///
117/// For this guarantee to hold, the [`DmaSlice`] instance must exist for the
118/// duration of the entire DMA operation, until the Rust program has observed
119/// that the operation is complete (such as by reading a status bit in memory or
120/// an MMIO register).
121///
122/// This struct uses a [`DmaFence`] implementation to ensure that all prior
123/// writes to `slice` are exposed to any DMA operations initiated by an MMIO
124/// read or write operation issued after this function returns, and which finish
125/// before the resulting [`DmaSlice`] is dropped.
126#[derive(Debug)]
127pub struct DmaSlice<'a, T: immutable_from_into_bytes::ImmutableFromIntoBytes> {
128    slice: &'a [T],
129}
130
131impl<'a, T: immutable_from_into_bytes::ImmutableFromIntoBytes> DmaSlice<'a, T> {
132    /// Create a [`DmaSlice`] from an immutable slice.
133    pub fn new(slice: &[T], fence: impl DmaFence) -> DmaSlice<'_, T> {
134        // Ensure that all prior writes to this slice are exposed to any DMA
135        // operations initiated by an MMIO read or write operation after this
136        // function returns.
137        let mut_slice_ptr: *mut [T] = ptr::from_ref(slice).cast_mut();
138        fence.release::<T>(mut_slice_ptr);
139
140        DmaSlice { slice }
141    }
142
143    /// Returns a pointer to the start of the slice.
144    pub fn as_ptr(&self) -> *const T {
145        self.slice.as_ptr()
146    }
147
148    /// Returns the length of the slice.
149    pub fn len(&self) -> usize {
150        self.slice.len()
151    }
152
153    /// Retrieve the inner slice reference.
154    ///
155    /// This is safe, as the slice is immutable. Therefore, the DMA hardware and
156    /// software may read it concurrently.
157    pub fn get(&self) -> &'a [T] {
158        self.slice
159    }
160}
161
162/// A mutable buffer that can be safely used for DMA operations that read or
163/// write the buffer's contents.
164///
165/// The buffer can be a slice of any type which implements
166/// [`ImmutableFromIntoBytes`](immutable_from_into_bytes::ImmutableFromIntoBytes),
167/// such as `u8` or `u32`.
168///
169/// # Use with DMA
170///
171/// Creating a [`DmaSliceMut`] over a mutable Rust slice ensures that all prior
172/// Rust writes to this slice are observable by any DMA operations initiated
173/// through an MMIO write operation, where that MMIO write is performed
174/// **after** constructing the `DmaSliceMut`. All writes by the DMA operation
175/// will be observable by Rust when calling [`take`](Self::take) **after** the
176/// DMA operation is finished.
177///
178/// This struct uses a [`DmaFence`] implementation to ensure that all prior
179/// writes to `slice` are exposed to any DMA operations initiated by an MMIO
180/// read or write operation after this function returns, and which finish before
181/// calling [`take`](Self::take).
182///
183/// # Safety Considerations
184///
185/// Users **must** eventually call [`take`](Self::take) to retrieve the
186/// underlying buffer. The [`DmaSliceMut`] must exist for the entire duration of
187/// the DMA operation. Users must never drop a [`DmaSliceMut`] with a
188/// non-`'static` lifetime, as this could provide access to the underlying
189/// buffer without guaranteeing that the DMA operation has finished, and without
190/// issuing a DMA memory fence to ensure that writes by the DMA operation are
191/// visible to Rust.
192///
193/// [`take`](Self::take) must only be called after the DMA operation has been
194/// observed to be complete through an explicit memory or MMIO read. Callers
195/// must ensure that the hardware will not perform any further writes to the
196/// buffer at the point where [`take`](Self::take) is called.
197///
198/// Callers must further ensure that they start DMA operations through a memory
199/// or MMIO write only after constructing the [`DmaSliceMut`], and only in the
200/// memory region described by [`as_mut_ptr`](Self::as_mut_ptr) and
201/// [`len`](Self::len).
202///
203/// Users are responsible to ensure that, after the DMA operation completes and
204/// before calling [`take`](Self::take), every element in the underlying slice
205/// represents a well-initialized and valid instance of its type (with the
206/// exception of padding bytes). Concretely, all elements in the slice must meet
207/// the requirements of the
208/// [`ImmutableFromIntoBytes`](immutable_from_into_bytes::ImmutableFromIntoBytes)
209/// trait. See the [zerocopy crate](https://docs.rs/zerocopy/0.8.31/zerocopy/)
210/// for a more in-depth explanation of these requirements.
211#[derive(Debug)]
212pub struct DmaSliceMut<'a, T: immutable_from_into_bytes::ImmutableFromIntoBytes> {
213    slice_ptr: NonNull<[T]>,
214    _lt: PhantomData<&'a mut [T]>,
215}
216
217impl<'a, T: immutable_from_into_bytes::ImmutableFromIntoBytes> DmaSliceMut<'a, T> {
218    /// Create a [`DmaSliceMut`] from a static mutable slice.
219    pub fn new_static(slice: &'static mut [T], fence: impl DmaFence) -> DmaSliceMut<'static, T> {
220        // # Safety
221        //
222        // This operation is safe, as dropping or forgetting its return value is
223        // safe. This would merely leak memory and make the underlying slice
224        // inaccessible.
225        unsafe { Self::new(slice, fence) }
226    }
227
228    /// Create a [`DmaSliceMut`] from a mutable slice.
229    ///
230    /// # Safety
231    ///
232    /// Callers must ensure to not drop the returned [`DmaSliceMut`]. This could
233    /// provide access to the underlying buffer without guaranteeing that the
234    /// DMA operation has finished.  Users **must** eventually call
235    /// [`take`](Self::take) to retrieve the underlying buffer.
236    #[must_use]
237    pub unsafe fn new(slice: &mut [T], fence: impl DmaFence) -> DmaSliceMut<'_, T> {
238        let dma_slice_mut = DmaSliceMut {
239            slice_ptr: NonNull::from_mut(slice),
240            _lt: PhantomData,
241        };
242
243        // Ensure that all prior writes to this slice are exposed to any DMA
244        // operations initiated by an MMIO read or write operation after this
245        // function returns:
246        fence.release::<T>(dma_slice_mut.slice_ptr.as_ptr());
247
248        dma_slice_mut
249    }
250
251    /// Returns the pointer to the start of the slice.
252    pub fn as_mut_ptr(&self) -> *mut T {
253        // TODO: switch `.cast()` to `.as_mut_ptr()` on the slice pointer (`*mut
254        // [T]`) to obtain the "thin", raw pointer to its first element. This is
255        // blocked on the nightly `slice_ptr_get` feature.
256        self.slice_ptr.as_ptr().cast()
257    }
258
259    /// Returns the length of the slice.
260    pub fn len(&self) -> usize {
261        self.slice_ptr.len()
262    }
263
264    /// Recover the original mutable slice.
265    ///
266    /// The caller MUST ensure the hardware DMA will no longer write to the
267    /// buffer.
268    ///
269    /// # Safety
270    ///
271    /// Callers must guarantee no hardware DMA have access to the buffer before
272    /// calling `take()`. All DMA operations must have completed before calling
273    /// this function and the caller must ensure no future operations will occur
274    /// using the underlying buffer.
275    pub unsafe fn take(mut self, fence: impl DmaFence) -> &'a mut [T] {
276        // Ensure that any reads from Rust to the buffer described by
277        // `slice_ptr` issued _after_ this function returns reflect all writes
278        // made by DMA operations finished _before_ this function ran:
279        fence.acquire::<T>(self.slice_ptr.as_ptr());
280
281        unsafe { self.slice_ptr.as_mut() }
282    }
283}
284
285/// A buffer that can be safely used for read-only DMA operations, backed by
286/// either a shared (immutable) or unique (mutable) Rust slice.
287///
288/// Creating a [`DmaSliceMutImmut`] over a Rust slice ensures that all prior
289/// Rust writes to this slice are observable by any DMA operations initiated
290/// through an MMIO write operation, where that MMIO write is performed after
291/// constructing the `DmaSliceMutImmut`.
292///
293/// For this guarantee to hold, the `DmaSliceMutImmut` instance must exist for
294/// the duration of the entire DMA operation, until the Rust program has
295/// observed that the operation is complete (such as by reading a status bit in
296/// memory or an MMIO register).
297///
298/// [`DmaSliceMutImmut`] may wrap an immutable, shared Rust slice
299/// reference. Furthermore, in contrast to `DmaSliceMut`, `DmaSliceMutImmut` may
300/// not expose writes performed by a DMA operation back to Rust. As such, its
301/// contents *must* not be modified by the DMA operation. For a DMA operation
302/// that may write to the supplied buffer, use [`DmaSliceMut`] instead.
303#[derive(Debug)]
304pub enum DmaSliceMutImmut<'a, T: immutable_from_into_bytes::ImmutableFromIntoBytes> {
305    Immutable(DmaSlice<'a, T>),
306    Mutable(DmaSliceMut<'a, T>),
307}
308
309impl<'a, T: immutable_from_into_bytes::ImmutableFromIntoBytes> DmaSliceMutImmut<'a, T> {
310    /// Create a [`DmaSliceMutImmut`] from a shared, immutable Rust slice.
311    ///
312    /// This function uses the supplied `fence` object to ensure that all prior
313    /// writes to `slice` are exposed to any DMA operations initiated by an MMIO
314    /// read or write operation after this function returns, and which finish
315    /// before the resulting [`DmaSliceMutImmut`] is dropped.
316    pub fn new(slice: &[T], fence: impl DmaFence) -> DmaSliceMutImmut<'_, T> {
317        DmaSliceMutImmut::Immutable(DmaSlice::new(slice, fence))
318    }
319
320    /// Create a [`DmaSliceMutImmut`] from a unique, mutable Rust slice.
321    ///
322    /// This function uses the supplied `fence` object to ensure that all prior
323    /// writes to `slice` are exposed to any DMA operations initiated by an MMIO
324    /// read or write operation after this function returns, and which finish
325    /// before the resulting [`DmaSliceMutImmut`] is dropped.
326    ///
327    /// Even though this method takes a unique, mutable Rust slice, DMA
328    /// operations must not modify the buffers contents.
329    pub fn new_mut(slice: &mut [T], fence: impl DmaFence) -> DmaSliceMutImmut<'_, T> {
330        // # Safety
331        //
332        // `DmaSliceMut::from_mut_slice_ref` is unsafe, as dropping its return
333        // value without calling `take` may make the underlying buffer
334        // accessible as a Rust slice, potentially before the DMA operation is
335        // complete, and without using `fence.acquire` to make DMA writes
336        // visible to Rust. However, this struct does not permit DMA operations
337        // which write to the slice, and hence it can be safely dropped without
338        // risk of concurrent modifications or incoherence.
339        DmaSliceMutImmut::Mutable(unsafe { DmaSliceMut::new(slice, fence) })
340    }
341
342    /// Returns the pointer to the first element of the wrapped slice reference.
343    pub fn as_ptr(&self) -> *const T {
344        match self {
345            DmaSliceMutImmut::Immutable(dma_slice) => dma_slice.as_ptr(),
346            DmaSliceMutImmut::Mutable(dma_slice_mut) => dma_slice_mut.as_mut_ptr().cast_const(),
347        }
348    }
349
350    /// Returns the length of the wrapped slice reference.
351    pub fn len(&self) -> usize {
352        match self {
353            DmaSliceMutImmut::Immutable(dma_slice) => dma_slice.len(),
354            DmaSliceMutImmut::Mutable(dma_slice_mut) => dma_slice_mut.len(),
355        }
356    }
357
358    /// Retrieve the inner slice reference.
359    ///
360    /// This is safe, as [`DmaSliceMutImmut`] can only be used for read-only DMA
361    /// operations. The DMA hardware and software may read the underlying slice
362    /// concurrently.
363    pub fn get(&self) -> &'a [T] {
364        match self {
365            DmaSliceMutImmut::Immutable(dma_slice) => dma_slice.get(),
366            DmaSliceMutImmut::Mutable(dma_slice_mut) => unsafe {
367                // # Safety
368                //
369                // Over the duration that [`DmaSliceMutImmut`] the user
370                // guarantees that no DMA operation modifies the buffer (and
371                // doing so would require an MMIO write, which is itself
372                // unsafe). The `dma_slice_mut` is capturing a unique, mutable
373                // borrow of the underlying slice over its lifetime `'a`. As
374                // such, we can safely hand out immutable references over this
375                // slice, which are also bound to the lifetime `'a`.
376                core::slice::from_raw_parts(
377                    dma_slice_mut.as_mut_ptr().cast_const(),
378                    dma_slice_mut.len(),
379                )
380            },
381        }
382    }
383}
384
385/// An immutable buffer that can be safely used for read-only DMA operations,
386/// created from a `SubSlice` describing an active range in a larger buffer.
387///
388/// Creating a [`DmaSubSlice`] over a [`SubSlice`] ensures that all prior Rust
389/// writes to the active region of this slice are observable by any DMA
390/// operations initiated through an MMIO write operation, where that MMIO write
391/// is performed *after* constructing the `DmaSubSlice`.
392///
393/// For this guarantee to hold, the `DmaSubSlice` struct must exist for the
394/// duration of the entire DMA operation, until the Rust program has observed
395/// that the operation is complete (such as by reading a status bit in memory or
396/// an MMIO register).
397///
398/// [`DmaSubSlice`] wraps an immutable, shared Rust slice reference. As such,
399/// its contents must not be modified by the DMA operation. For a DMA operation
400/// that may write to the supplied buffer, use [`DmaSubSliceMut`] instead.
401#[derive(Debug)]
402pub struct DmaSubSlice<'a, T: immutable_from_into_bytes::ImmutableFromIntoBytes> {
403    sub_slice: SubSlice<'a, T>,
404}
405
406impl<'a, T: immutable_from_into_bytes::ImmutableFromIntoBytes> DmaSubSlice<'a, T> {
407    /// Create a [`DmaSubSlice`] from a shared, immutable Rust slice.
408    ///
409    /// This function uses the supplied `fence` object to ensure that all prior
410    /// writes to `slice` are exposed to any DMA operations initiated by an MMIO
411    /// read or write operation after this function returns, and which finish
412    /// before the resulting [`DmaSubSlice`] is dropped.
413    pub fn new(sub_slice: SubSlice<'_, T>, fence: impl DmaFence) -> DmaSubSlice<'_, T> {
414        // Ensure that all prior writes to this slice are exposed to any DMA
415        // operations initiated by an MMIO read or write operation after this
416        // function returns:
417        //
418        // Clippy says we should be using `.as_mut_ptr()` instead of `.as_ptr()
419        // as *mut T`, but that method doesn't exist. The cast doesn't matter
420        // here, `DmaFence::release` will not actually dereference the memory.
421        let sub_slice_ptr: *mut T = sub_slice.as_ptr().cast_mut();
422        #[allow(clippy::as_ptr_cast_mut)]
423        fence.release::<T>(ptr::slice_from_raw_parts_mut(
424            // `SubSlice::as_ptr()` returns a pointer to the currently
425            // accessible portion of the `SubSlice`.
426            sub_slice_ptr,
427            // `SubSlice::len()` returns the length of the currently accessible
428            // portion of the `SubSlice`.
429            sub_slice.len(),
430        ));
431
432        DmaSubSlice { sub_slice }
433    }
434
435    /// Returns the pointer to the first element of the currently accessible
436    /// portion of the wrapped `SubSlice`.
437    pub fn as_ptr(&self) -> *const T {
438        self.sub_slice.as_ptr()
439    }
440
441    /// Returns the length of the currently accessible range of the wrapped
442    /// `SubSlice`.
443    pub fn len(&self) -> usize {
444        self.sub_slice.len()
445    }
446
447    /// Retrieve the wrapped `SubSlice`.
448    pub fn as_sub_slice(&self) -> SubSlice<'a, T> {
449        self.sub_slice
450    }
451}
452
453/// A mutable buffer that can be safely used for DMA operations that read or
454/// write the buffer's contents.
455///
456/// The buffer can be a slice of any type which implements
457/// [`ImmutableFromIntoBytes`](immutable_from_into_bytes::ImmutableFromIntoBytes),
458/// such as `u8` or `u32`.
459///
460/// # Use with DMA
461///
462/// Creating a [`DmaSubSliceMut`] over a [`SubSliceMut`] ensures that all prior
463/// Rust writes to the active range of this slice are observable by any DMA
464/// operations initiated through an MMIO write operation, where that MMIO write
465/// is performed **after** constructing the `DmaSubSliceMut`. All writes by the
466/// DMA operation will be observable by Rust when calling [`take`](Self::take)
467/// **after** the DMA operation is finished.
468///
469/// This struct uses a [`DmaFence`] implementation to ensure that all prior
470/// writes to `slice` are exposed to any DMA operations initiated by an MMIO
471/// read or write operation after this function returns, and which finish before
472/// calling [`take`](Self::take).
473///
474/// # Safety Considerations
475///
476/// Users **must** eventually call [`take`](Self::take) to retrieve the
477/// underlying buffer. The [`DmaSubSliceMut`] must exist for the entire duration
478/// of the DMA operation. Users must never drop a [`DmaSubSliceMut`] with a
479/// non-`'static` lifetime, as this could provide access to the underlying
480/// buffer without guaranteeing that the DMA operation has finished, and without
481/// issuing a DMA memory fence to ensure that writes by the DMA operation are
482/// visible to Rust.
483///
484/// [`take`](Self::take) must only be called after the DMA operation has been
485/// observed to be complete through an explicit memory or MMIO read. Callers
486/// must ensure that the hardware will not perform any further writes to the
487/// buffer at the point where [`take`](Self::take) is called.
488///
489/// Callers must further ensure that they start DMA operations through a memory
490/// or MMIO write only after constructing the [`DmaSubSliceMut`], and only in
491/// the memory region described by [`as_mut_ptr`](Self::as_mut_ptr) and
492/// [`len`](Self::len).
493///
494/// Users are responsible to ensure that, after the DMA operation completes and
495/// before calling [`take`](Self::take), every element in the underlying slice
496/// represents a well-initialized and valid instance of its type (with the
497/// exception of padding bytes). Concretely, all elements in the slice must meet
498/// the requirements of the
499/// [`ImmutableFromIntoBytes`](immutable_from_into_bytes::ImmutableFromIntoBytes)
500/// trait. See the [zerocopy crate](https://docs.rs/zerocopy/0.8.31/zerocopy/)
501/// for a more in-depth explanation of these requirements.
502#[derive(Debug)]
503pub struct DmaSubSliceMut<'a, T: immutable_from_into_bytes::ImmutableFromIntoBytes> {
504    internal_slice_ptr: NonNull<[T]>,
505    active_range: Range<usize>,
506    _lt: PhantomData<&'a mut [T]>,
507}
508
509impl<'a, T: immutable_from_into_bytes::ImmutableFromIntoBytes> DmaSubSliceMut<'a, T> {
510    /// Create a [`DmaSubSliceMut`] from a [`SubSliceMut`] with `'static`
511    /// lifetime.
512    pub fn new_static(
513        sub_slice: SubSliceMut<'static, T>,
514        fence: impl DmaFence,
515    ) -> DmaSubSliceMut<'static, T> {
516        // # Safety
517        //
518        // This operation is safe, as dropping or forgetting its return value is
519        // safe. This would merely leak memory and make the underlying slice
520        // inaccessible.
521        unsafe { Self::new(sub_slice, fence) }
522    }
523
524    /// Create a [`DmaSubSliceMut`] from a [`SubSliceMut`].
525    ///
526    /// # Safety
527    ///
528    /// Callers must ensure to not drop the returned [`DmaSubSliceMut`]. This
529    /// could provide access to the underlying buffer without guaranteeing that
530    /// the DMA operation has finished. Users **must** eventually call
531    /// [`take`](Self::take) to retrieve the underlying buffer.
532    #[must_use]
533    pub unsafe fn new(
534        sub_slice_mut: SubSliceMut<'_, T>,
535        fence: impl DmaFence,
536    ) -> DmaSubSliceMut<'_, T> {
537        let active_range = sub_slice_mut.active_range();
538        let internal_slice_ptr = sub_slice_mut.take();
539
540        // Store only a fat raw pointer to the inner slice:
541        let dma_sub_slice_mut = DmaSubSliceMut {
542            internal_slice_ptr: NonNull::from_mut(internal_slice_ptr),
543            active_range,
544            _lt: PhantomData,
545        };
546
547        // Ensure that all prior writes to the currently active portion of this
548        // SubSliceMut are exposed to any DMA operations initiated by an MMIO
549        // read or write operation after this function returns:
550        fence.release::<T>(ptr::slice_from_raw_parts_mut(
551            dma_sub_slice_mut.as_mut_ptr(),
552            dma_sub_slice_mut.len(),
553        ));
554
555        dma_sub_slice_mut
556    }
557
558    /// Returns the pointer to the first element of the active range of the
559    /// wrapped [`SubSliceMut`].
560    pub fn as_mut_ptr(&self) -> *mut T {
561        // TODO: switch `.cast()` to `.as_mut_ptr()` on the slice pointer (`*mut
562        // [T]`) to obtain the "thin", raw pointer to its first element. This is
563        // blocked on the nightly `slice_ptr_get` feature.
564        self.internal_slice_ptr.as_ptr().cast::<T>().wrapping_add(
565            if self.active_range.start >= self.internal_slice_ptr.len() {
566                // `range.start` is out of bounds, return a pointer that's one
567                // after the last byte in this buffer, and length `0`:
568                self.internal_slice_ptr.len()
569            } else {
570                // Start is in bounds:
571                self.active_range.start
572            },
573        )
574    }
575
576    /// Returns the length of the active range of the wrapped [`SubSliceMut`].
577    pub fn len(&self) -> usize {
578        core::cmp::min(
579            self.active_range
580                .end
581                .saturating_sub(self.active_range.start),
582            self.internal_slice_ptr.len(),
583        )
584    }
585
586    /// Recover the original [`SubSliceMut`] used to construct this
587    /// [`DmaSubSliceMut`] object.
588    ///
589    /// # Safety
590    ///
591    /// Callers must guarantee no hardware DMA have access to the buffer before
592    /// calling `take()`. All DMA operations must have completed before calling
593    /// this function and the caller must ensure no future operations will occur
594    /// using the underlying buffer.
595    pub unsafe fn take(mut self, fence: impl DmaFence) -> SubSliceMut<'a, T> {
596        // Ensure that any reads from Rust to the active range of the buffer
597        // (described by `self.as_mut_ptr()` and `self.len()`) _after_ this
598        // function returns reflect all writes made by DMA operations finished
599        // _before_ this function ran:
600        fence.acquire::<T>(ptr::slice_from_raw_parts_mut(self.as_mut_ptr(), self.len()));
601
602        // Restore the original `SubSliceMut` configuration:
603        let mut sub_slice_mut = SubSliceMut::new(unsafe { self.internal_slice_ptr.as_mut() });
604        sub_slice_mut.slice(self.active_range);
605        sub_slice_mut
606    }
607
608    /// Recover the original [`SubSliceMut`] used to construct this
609    /// [`DmaSubSliceMut`] object, without performing an `acquire` fence.
610    ///
611    /// # Safety
612    ///
613    /// This function does not necessarily expose any writes to the underlying
614    /// buffer to Rust. It must only be used when the underlying buffer's
615    /// contents have not been modified by a DMA operation (i.e., any DMA
616    /// operations operating on the buffer while this `DmaSubSliceMut` existed
617    /// were read-only).
618    unsafe fn take_no_acquire(mut self) -> SubSliceMut<'a, T> {
619        // Restore the original `SubSliceMut` configuration:
620        let mut sub_slice_mut = SubSliceMut::new(unsafe { self.internal_slice_ptr.as_mut() });
621        sub_slice_mut.slice(self.active_range);
622        sub_slice_mut
623    }
624}
625
626/// A buffer that can be safely used for read-only DMA operations, backed by
627/// either a [`SubSliceMutImmut`].
628///
629/// Creating a [`DmaSubSliceMutImmut`] over a [`SubSliceMutImmut`] ensures that
630/// all prior Rust writes to the active region of this slice are observable by
631/// any DMA operations initiated through an MMIO write operation, where that
632/// MMIO write is performed *after* constructing the `DmaSubSliceMutImmut`.
633///
634/// For this guarantee to hold, the `DmaSubSliceMutImmut` struct must exist for
635/// the duration of the entire DMA operation, until the Rust program has
636/// observed that the operation is complete (such as by reading a status bit in
637/// memory or an MMIO register).
638///
639/// [`DmaSliceMutImmut`] may wrap an immutable, shared Rust slice
640/// reference. Furthermore, in contrast to `DmaSubSliceMut`,
641/// `DmaSubSliceMutImmut` may not expose writes performed by a DMA operation
642/// back to Rust. As such, its contents *must* not be modified by the DMA
643/// operation. For a DMA operation that may write to the active range of the
644/// supplied sub slice, use [`DmaSubSliceMut`] instead.
645#[derive(Debug)]
646pub enum DmaSubSliceMutImmut<'a, T: immutable_from_into_bytes::ImmutableFromIntoBytes> {
647    Immutable(DmaSubSlice<'a, T>),
648    Mutable(DmaSubSliceMut<'a, T>),
649}
650
651impl<'a, T: immutable_from_into_bytes::ImmutableFromIntoBytes> DmaSubSliceMutImmut<'a, T> {
652    /// Create a [`DmaSubSliceMutImmut`] from a [`SubSliceMutImmut`].
653    ///
654    /// This function uses the supplied `fence` object to ensure that all prior
655    /// writes to `slice` are exposed to any DMA operations initiated by an MMIO
656    /// read or write operation after this function returns, and which finish
657    /// before the resulting [`DmaSubSlice`] is dropped.
658    pub fn new(
659        sub_slice: SubSliceMutImmut<'_, T>,
660        fence: impl DmaFence,
661    ) -> DmaSubSliceMutImmut<'_, T> {
662        match sub_slice {
663            SubSliceMutImmut::Immutable(sub_slice) => {
664                DmaSubSliceMutImmut::Immutable(DmaSubSlice::new(sub_slice, fence))
665            }
666            SubSliceMutImmut::Mutable(sub_slice_mut) => DmaSubSliceMutImmut::Mutable(unsafe {
667                // # Safety
668                //
669                // `DmaSubSliceMut::new` is unsafe, as dropping its return value
670                // without calling `take` may make the underlying buffer
671                // accessible as a Rust slice, potentially before the DMA
672                // operation is complete, and without using `fence.acquire` to
673                // make DMA writes visible to Rust. However, this struct does
674                // not permit DMA operations which write to the slice, and hence
675                // it can be safely dropped without risk of concurrent
676                // modifications or incoherence.
677                DmaSubSliceMut::new(sub_slice_mut, fence)
678            }),
679        }
680    }
681
682    /// Returns the pointer to the first element of the currently accessible
683    /// portion of the wrapped `SubSlice`.
684    pub fn as_ptr(&self) -> *const T {
685        match self {
686            DmaSubSliceMutImmut::Immutable(dma_sub_slice) => dma_sub_slice.as_ptr(),
687            DmaSubSliceMutImmut::Mutable(dma_sub_slice_mut) => {
688                dma_sub_slice_mut.as_mut_ptr().cast_const()
689            }
690        }
691    }
692
693    /// Returns the length of the currently accessible range of the wrapped
694    /// `SubSlice`.
695    pub fn len(&self) -> usize {
696        match self {
697            DmaSubSliceMutImmut::Immutable(dma_sub_slice) => dma_sub_slice.len(),
698            DmaSubSliceMutImmut::Mutable(dma_sub_slice_mut) => dma_sub_slice_mut.len(),
699        }
700    }
701
702    /// Reconstruct the original `SubSliceMutImmut`.
703    ///
704    /// This function must be called only when the Rust program has observed
705    /// that the DMA operation over the buffer is complete (such as by reading a
706    /// status bit in memory or an MMIO register). Otherwise, any future reads
707    /// by the DMA peripheral may not be consistent with the buffer contents.
708    pub fn take(self) -> SubSliceMutImmut<'a, T> {
709        // We don't need to perform an `acquire` fence, as any DMA operation on
710        // the underlying slice must not have changed its contents:
711        match self {
712            DmaSubSliceMutImmut::Immutable(dma_sub_slice) => {
713                SubSliceMutImmut::Immutable(dma_sub_slice.as_sub_slice())
714            }
715            DmaSubSliceMutImmut::Mutable(dma_sub_slice_mut) => SubSliceMutImmut::Mutable(unsafe {
716                // # Safety
717                //
718                // The user guarantees that there has not been any DMA operation
719                // that changed the buffers contents while the
720                // `DmaSubSliceMutImmut` existed, and hence restoring a unique
721                // Rust slice through `take` is safe. No acquire-fence is
722                // needed, given the bufer contents have not been modified.
723                dma_sub_slice_mut.take_no_acquire()
724            }),
725        }
726    }
727}
728
729pub mod immutable_from_into_bytes {
730    /// Sealing module for [`ImmutableFromIntoBytes`]
731    mod private {
732        /// Sealing trait for [`ImmutableFromIntoBytes`]
733        pub trait Sealed {}
734    }
735
736    /// A type that is can be safely converted to an initialized sequence of
737    /// bytes, from an arbitrary initialized sequence of bytes, and does not
738    /// feature interior mutability.
739    ///
740    /// The requirements on implementors of this trait are effectively the same
741    /// as the combination of zerocopy's [`FromBytes`][zerocopy-frombytes],
742    /// [`IntoBytes`][zerocopy-intobytes], and [`Immutable`][zerocopy-immutable]
743    /// traits.
744    ///
745    /// This trait is only implemented for a few select primitives, intended to
746    /// be used for DMA operations. It is sealed; all extensions to future types
747    /// must ensure they conform to the above trait's requirements and are safe
748    /// for DMA operations.
749    ///
750    /// [zerocopy-frombytes]: https://docs.rs/zerocopy/0.8.42/zerocopy/trait.FromBytes.html
751    /// [zerocopy-intobytes]: https://docs.rs/zerocopy/0.8.42/zerocopy/trait.IntoBytes.html
752    /// [zerocopy-immutable]: https://docs.rs/zerocopy/0.8.42/zerocopy/trait.Immutable.html
753    pub unsafe trait ImmutableFromIntoBytes: private::Sealed {}
754
755    impl private::Sealed for u8 {}
756    unsafe impl ImmutableFromIntoBytes for u8 {}
757    impl private::Sealed for u16 {}
758    unsafe impl ImmutableFromIntoBytes for u16 {}
759    impl private::Sealed for u32 {}
760    unsafe impl ImmutableFromIntoBytes for u32 {}
761    impl private::Sealed for u64 {}
762    unsafe impl ImmutableFromIntoBytes for u64 {}
763    impl private::Sealed for u128 {}
764    unsafe impl ImmutableFromIntoBytes for u128 {}
765}
766
767#[cfg(test)]
768mod miri_tests {
769    use core::ptr;
770
771    use super::super::leasable_buffer::{SubSlice, SubSliceMut};
772    use super::{DmaSlice, DmaSliceMut, DmaSubSlice, DmaSubSliceMut};
773
774    /// A mock fence that does nothing, as Miri operations are sequential within
775    /// a single thread for this test.
776    #[derive(Debug, Clone, Copy)]
777    struct MockFence {
778        _private: (),
779    }
780
781    impl MockFence {
782        /// `MockFence` does not actually deliver any guarantees and is only
783        /// used for testing, thus make it unsafe to construct:
784        unsafe fn new() -> Self {
785            MockFence { _private: () }
786        }
787    }
788
789    unsafe impl super::DmaFence for MockFence {
790        fn release<T>(self, _buf: *mut [T]) {
791            // In a real system, this flushes caches. In Miri, memory is
792            // perfectly coherent, and our "DMA" reads are simply raw pointer
793            // reads.
794        }
795
796        fn acquire<T>(self, _buf: *mut [T]) {
797            // In a real system, this invalidates caches. In Miri, memory is
798            // perfectly coherent, and our "DMA" writes are simply raw pointer
799            // writes.
800        }
801    }
802
803    // Helper to simulate a DMA peripheral writing to memory. This writes to the
804    // memory using the pointer exposed by the DMA wrapper, which is legal
805    // because the wrapper owns the mutable borrow.
806    unsafe fn simulate_dma_write<T: Copy>(dst: *mut T, val: T, offset: usize) {
807        let target = dst.add(offset);
808        ptr::write(target, val);
809    }
810
811    #[test]
812    fn test_dma_slice_immut_basic() {
813        let fence = unsafe { MockFence::new() };
814
815        let data = [10u8, 20, 30, 40];
816
817        // 1. Create DmaSlice
818        let dma = DmaSlice::new(&data, fence);
819
820        // 2. Verify properties
821        assert_eq!(dma.len(), 4);
822        assert_eq!(dma.as_ptr(), data.as_ptr());
823
824        // 3. Simulate DMA Read (peripheral reads from host memory)
825        //
826        // In Miri, we just check if we can read via the raw pointer while the
827        // borrow is active.
828        let val = unsafe { ptr::read(dma.as_ptr().add(1)) };
829        assert_eq!(val, 20);
830
831        // 4. Drop (Safe)
832        drop(dma);
833    }
834
835    #[test]
836    fn test_dma_slice_mut_write_cycle() {
837        let fence = unsafe { MockFence::new() };
838
839        let mut data = [0u8; 4];
840        let data_ptr = data.as_mut_ptr();
841
842        // 1. Create DmaSliceMut
843        //
844        // SAFETY: We call `take` at the end.
845        let dma = unsafe { DmaSliceMut::new(&mut data, fence) };
846
847        // 2. Verify basic pointer integrity
848        assert_eq!(dma.as_mut_ptr(), data_ptr);
849        assert_eq!(dma.len(), 4);
850
851        // 3. Simulate DMA Write
852        //
853        // The peripheral writes `0xAA` to index 2.
854        unsafe {
855            simulate_dma_write(dma.as_mut_ptr(), 0xAA_u8, 2);
856        }
857
858        // 4. Restore
859        //
860        // SAFETY: DMA is "done".
861        let restored_slice = unsafe { dma.take(fence) };
862
863        // 5. Verify that the writes are reflected in the buffer:
864        assert_eq!(restored_slice, &[0, 0, 0xAA, 0]);
865    }
866
867    #[test]
868    fn test_dma_slice_mut_static() {
869        let fence = unsafe { MockFence::new() };
870
871        // Test specifically for the static constructor which is safe:
872        static mut BUFFER: [u32; 2] = [1, 2];
873
874        // 1. Create from static
875        //
876        // Note: access to static mut is unsafe, but the from_static_slice_ref
877        // call itself is safe
878        let dma = DmaSliceMut::new_static(unsafe { &mut *(&raw mut BUFFER) }, fence);
879
880        // 2. Simulate DMA Write
881        unsafe {
882            simulate_dma_write(dma.as_mut_ptr(), 99u32, 0);
883        }
884
885        // 3. Restore
886        let restored = unsafe { dma.take(fence) };
887
888        assert_eq!(restored[0], 99);
889        assert_eq!(restored[1], 2);
890    }
891
892    #[test]
893    fn test_dma_sub_slice_immut() {
894        let fence = unsafe { MockFence::new() };
895
896        let data = [100u8, 101, 102, 103, 104];
897
898        // Create a SubSlice with an active range of 1..=2
899        let mut sub = SubSlice::new(&data);
900        sub.slice(1..=2);
901
902        let dma = DmaSubSlice::new(sub, fence);
903        assert_eq!(dma.len(), 2);
904
905        unsafe {
906            assert_eq!(*dma.as_ptr(), 101);
907            assert_eq!(*dma.as_ptr().add(1), 102);
908        }
909    }
910
911    #[test]
912    fn test_dma_sub_slice_mut_offset() {
913        let fence = unsafe { MockFence::new() };
914
915        let mut data = [0u64, 1, 2, 3, 4]; // u64 to test stride sizes > 1 byte
916
917        // Create a SubSliceMut with an active range of 2..4
918        let mut sub = SubSliceMut::new(&mut data);
919        sub.slice(2..4);
920
921        let dma = unsafe { DmaSubSliceMut::new(sub, fence) };
922        assert_eq!(dma.len(), 2);
923
924        // Verify Pointer logic
925        //
926        // dma.as_ptr() should point to data[2]
927        unsafe {
928            // Write to index 0 of the DMA view (which is index 2 of underlying)
929            simulate_dma_write(dma.as_mut_ptr(), 0xFF_FF_FF_FF_u64, 0);
930
931            // Write to index 1 of the DMA view (index 3 of underlying)
932            simulate_dma_write(dma.as_mut_ptr(), 0xEE_EE_EE_EE_u64, 1);
933        }
934
935        // 3. Restore
936        let restored_sub = unsafe { dma.take(fence) };
937        let full_slice = restored_sub.take();
938
939        // 4. Validate
940        assert_eq!(full_slice[0], 0); // Untouched
941        assert_eq!(full_slice[1], 1); // Untouched
942        assert_eq!(full_slice[2], 0xFF_FF_FF_FF); // Written
943        assert_eq!(full_slice[3], 0xEE_EE_EE_EE); // Written
944        assert_eq!(full_slice[4], 4); // Untouched
945    }
946
947    #[test]
948    fn test_dma_sub_slice_mut_edge_cases() {
949        let fence = unsafe { MockFence::new() };
950
951        let mut data = [0u8; 10];
952        let data_ptr = data.as_mut_ptr();
953
954        // Case A: Empty Range
955        {
956            let mut sub = SubSliceMut::new(&mut data);
957            sub.slice(5..5);
958
959            let dma = unsafe { DmaSubSliceMut::new(sub, fence) };
960            assert_eq!(dma.len(), 0);
961
962            // Verify we return the correct ptr, even if we shouldn't deref it
963            assert_eq!(dma.as_mut_ptr(), data_ptr.wrapping_add(5));
964            unsafe { dma.take(fence) };
965        }
966
967        // Case B: Range at exact end
968        {
969            let base_addr = data.as_ptr() as usize;
970
971            let mut sub = SubSliceMut::new(&mut data);
972            sub.slice(10..10); // End of buffer
973
974            let dma = unsafe { DmaSubSliceMut::new(sub, fence) };
975
976            // Pointer should point one past the end of the array
977            let ptr_addr = dma.as_mut_ptr() as usize;
978            assert_eq!(ptr_addr, base_addr + 10);
979
980            unsafe { dma.take(fence) };
981        }
982
983        // Case C: Range out of bounds (Should be clamped by implementation)
984        {
985            let mut sub = SubSliceMut::new(&mut data);
986            sub.slice(8..15); // End is past 10
987
988            let dma = unsafe { DmaSubSliceMut::new(sub, fence) };
989
990            // Length should be clamped to available (10 - 8 = 2)
991            assert_eq!(dma.len(), 2);
992
993            unsafe {
994                simulate_dma_write(dma.as_mut_ptr(), 99u8, 0); // index 8
995                simulate_dma_write(dma.as_mut_ptr(), 88u8, 1); // index 9
996            }
997
998            let res = unsafe { dma.take(fence) };
999            let arr = res.take();
1000            assert_eq!(arr[8], 99);
1001            assert_eq!(arr[9], 88);
1002        }
1003    }
1004}