planus/impls/
byte_slice.rs

1use core::mem::MaybeUninit;
2
3use crate::{
4    errors::ErrorKind, slice_helpers::SliceWithStartOffset, traits::*, Builder, Cursor, Offset,
5};
6
7impl<'buf> TableRead<'buf> for &'buf [u8] {
8    fn from_buffer(
9        buffer: SliceWithStartOffset<'buf>,
10        offset: usize,
11    ) -> core::result::Result<Self, ErrorKind> {
12        let (buffer, len) = super::array_from_buffer(buffer, offset)?;
13        buffer.as_slice().get(..len).ok_or(ErrorKind::InvalidLength)
14    }
15}
16
17impl<'buf> TableRead<'buf> for &'buf [i8] {
18    fn from_buffer(
19        buffer: SliceWithStartOffset<'buf>,
20        offset: usize,
21    ) -> core::result::Result<Self, ErrorKind> {
22        let (buffer, len) = super::array_from_buffer(buffer, offset)?;
23        let slice = buffer
24            .as_slice()
25            .get(..len)
26            .ok_or(ErrorKind::InvalidLength)?;
27        Ok(unsafe { core::slice::from_raw_parts(slice.as_ptr() as *const i8, slice.len()) })
28    }
29}
30
31impl WriteAsOffset<[u8]> for [u8] {
32    #[allow(clippy::let_and_return)]
33    fn prepare(&self, builder: &mut Builder) -> Offset<[u8]> {
34        #[cfg(feature = "bytes-cache")]
35        let hash = {
36            let hash = builder.bytes_cache.hash(self);
37            if let Some(offset) = builder
38                .bytes_cache
39                .get(builder.inner.as_slice(), hash, self)
40            {
41                return offset.into();
42            }
43            hash
44        };
45
46        // SAFETY: We make sure to write the 4+len bytes inside the closure
47        unsafe {
48            builder.write_with(
49                self.len().checked_add(4).unwrap(),
50                u32::ALIGNMENT_MASK,
51                |buffer_position, bytes| {
52                    let bytes = bytes.as_mut_ptr();
53
54                    (self.len() as u32).write(
55                        Cursor::new(&mut *(bytes as *mut [MaybeUninit<u8>; 4])),
56                        buffer_position,
57                    );
58
59                    core::ptr::copy_nonoverlapping(
60                        self.as_ptr(),
61                        bytes.add(4) as *mut u8,
62                        self.len(),
63                    );
64                },
65            )
66        }
67        let offset = builder.current_offset();
68
69        #[cfg(feature = "bytes-cache")]
70        builder
71            .bytes_cache
72            .insert(hash, offset.into(), builder.inner.as_slice());
73
74        offset
75    }
76}
77
78impl WriteAsOffset<[i8]> for [i8] {
79    #[allow(clippy::let_and_return)]
80    fn prepare(&self, builder: &mut Builder) -> Offset<[i8]> {
81        #[cfg(feature = "bytes-cache")]
82        let hash = {
83            let v: &[u8] =
84                unsafe { core::slice::from_raw_parts(self.as_ptr() as *const u8, self.len()) };
85            let hash = builder.bytes_cache.hash(v);
86            if let Some(offset) = builder.bytes_cache.get(builder.inner.as_slice(), hash, v) {
87                return offset.into();
88            }
89            hash
90        };
91
92        // SAFETY: We make sure to write the 4+len bytes inside the closure
93        unsafe {
94            builder.write_with(
95                self.len().checked_add(4).unwrap(),
96                u32::ALIGNMENT_MASK,
97                |buffer_position, bytes| {
98                    let bytes = bytes.as_mut_ptr();
99
100                    (self.len() as u32).write(
101                        Cursor::new(&mut *(bytes as *mut [MaybeUninit<u8>; 4])),
102                        buffer_position,
103                    );
104
105                    core::ptr::copy_nonoverlapping(
106                        self.as_ptr(),
107                        bytes.add(4) as *mut i8,
108                        self.len(),
109                    );
110                },
111            )
112        }
113        let offset = builder.current_offset();
114
115        #[cfg(feature = "bytes-cache")]
116        builder
117            .bytes_cache
118            .insert(hash, offset.into(), builder.inner.as_slice());
119
120        offset
121    }
122}
123
124impl<const N: usize> WriteAsOffset<[u8]> for [u8; N] {
125    fn prepare(&self, builder: &mut Builder) -> Offset<[u8]> {
126        WriteAsOffset::prepare(self.as_slice(), builder)
127    }
128}
129
130impl<const N: usize> WriteAs<Offset<[u8]>> for [u8; N] {
131    type Prepared = Offset<[u8]>;
132
133    fn prepare(&self, builder: &mut Builder) -> Offset<[u8]> {
134        WriteAsOffset::prepare(self.as_slice(), builder)
135    }
136}
137
138impl<const N: usize> WriteAsOptional<Offset<[u8]>> for [u8; N] {
139    type Prepared = Offset<[u8]>;
140
141    #[inline]
142    fn prepare(&self, builder: &mut Builder) -> Option<Offset<[u8]>> {
143        Some(WriteAsOffset::prepare(self.as_slice(), builder))
144    }
145}
146
147impl<const N: usize> WriteAsOffset<[i8]> for [i8; N] {
148    fn prepare(&self, builder: &mut Builder) -> Offset<[i8]> {
149        WriteAsOffset::prepare(self.as_slice(), builder)
150    }
151}
152
153impl<const N: usize> WriteAs<Offset<[i8]>> for [i8; N] {
154    type Prepared = Offset<[i8]>;
155
156    fn prepare(&self, builder: &mut Builder) -> Offset<[i8]> {
157        WriteAsOffset::prepare(self.as_slice(), builder)
158    }
159}
160
161impl<const N: usize> WriteAsOptional<Offset<[i8]>> for [i8; N] {
162    type Prepared = Offset<[i8]>;
163
164    #[inline]
165    fn prepare(&self, builder: &mut Builder) -> Option<Offset<[i8]>> {
166        Some(WriteAsOffset::prepare(self.as_slice(), builder))
167    }
168}