|
| 1 | +#![allow(dead_code)] |
| 2 | +use std::ops::{Deref, DerefMut}; |
| 3 | + |
| 4 | +use bytes::Bytes; |
| 5 | + |
| 6 | +pub trait PowerOfTwo<const N: usize> {} |
| 7 | +impl<const N: usize> PowerOfTwo<N> for usize where usize: sealed::Sealed<N> {} |
| 8 | + |
| 9 | +mod sealed { |
| 10 | + pub trait Sealed<const N: usize> {} |
| 11 | + |
| 12 | + impl Sealed<1> for usize {} |
| 13 | + impl Sealed<2> for usize {} |
| 14 | + impl Sealed<4> for usize {} |
| 15 | + impl Sealed<8> for usize {} |
| 16 | + impl Sealed<16> for usize {} |
| 17 | + impl Sealed<32> for usize {} |
| 18 | + impl Sealed<64> for usize {} |
| 19 | + impl Sealed<128> for usize {} |
| 20 | + impl Sealed<256> for usize {} |
| 21 | + impl Sealed<512> for usize {} |
| 22 | +} |
| 23 | + |
| 24 | +/// A variant of [`BytesMut`][bytes::BytesMut] that freezes into a [`Bytes`] that is guaranteed |
| 25 | +/// to begin at a multiple of a target byte-alignment. |
| 26 | +/// |
| 27 | +/// Internally, it accomplishes this by over-allocating by up to the alignment size, padding the |
| 28 | +/// front as necessary. Reads and writes will only be able to access the region after the padding. |
| 29 | +/// |
| 30 | +/// It is required for the alignment to be a valid power of 2 <= 512, any other value will be |
| 31 | +/// a compile-time failure. |
| 32 | +pub(crate) struct AlignedBytesMut<const ALIGN: usize> { |
| 33 | + buf: Vec<u8>, |
| 34 | + padding: usize, |
| 35 | + capacity: usize, |
| 36 | +} |
| 37 | + |
| 38 | +impl<const ALIGN: usize> AlignedBytesMut<ALIGN> |
| 39 | +where |
| 40 | + usize: PowerOfTwo<ALIGN>, |
| 41 | +{ |
| 42 | + /// Allocate a new mutable buffer with capacity to hold at least `capacity` bytes. |
| 43 | + /// |
| 44 | + /// The mutable buffer may allocate more than the requested amount to pad the memory for |
| 45 | + /// alignment. |
| 46 | + pub fn with_capacity(capacity: usize) -> Self { |
| 47 | + // Allocate up to `ALIGN` extra bytes, in case we need to pad the returned pointer. |
| 48 | + let allocation_size = (capacity + ALIGN - 1).next_multiple_of(ALIGN); |
| 49 | + let mut buf = Vec::<u8>::with_capacity(allocation_size); |
| 50 | + let padding = buf.as_ptr().align_offset(ALIGN); |
| 51 | + unsafe { |
| 52 | + buf.set_len(padding); |
| 53 | + } |
| 54 | + |
| 55 | + Self { |
| 56 | + buf, |
| 57 | + padding, |
| 58 | + capacity, |
| 59 | + } |
| 60 | + } |
| 61 | + |
| 62 | + /// Usable capacity of this buffer. |
| 63 | + pub fn capacity(&self) -> usize { |
| 64 | + self.capacity |
| 65 | + } |
| 66 | + |
| 67 | + /// Set the length of the mutable buffer directly. |
| 68 | + /// |
| 69 | + /// # Safety |
| 70 | + /// |
| 71 | + /// The caller is responsible for ensuring that the provided length fits within the original |
| 72 | + /// capacity request. |
| 73 | + /// |
| 74 | + /// Failure to do so could cause uninitialized memory to be readable. |
| 75 | + pub unsafe fn set_len(&mut self, len: usize) { |
| 76 | + assert!( |
| 77 | + len <= self.capacity, |
| 78 | + "set_len call out of bounds: {} > {}", |
| 79 | + len, |
| 80 | + self.capacity |
| 81 | + ); |
| 82 | + unsafe { self.buf.set_len(len + self.padding) } |
| 83 | + } |
| 84 | + |
| 85 | + /// Extend this mutable buffer with the contents of the provided slice. |
| 86 | + pub fn extend_from_slice(&mut self, slice: &[u8]) { |
| 87 | + // The internal `buf` is padded, so appends will land after the padded region. |
| 88 | + self.buf.extend_from_slice(slice) |
| 89 | + } |
| 90 | + |
| 91 | + /// Freeze the existing allocation into a readonly [`Bytes`], guaranteed to be aligned to |
| 92 | + /// the target [`ALIGN`] size. |
| 93 | + pub fn freeze(self) -> Bytes { |
| 94 | + // bytes_unaligned will contain the entire allocation, so that on Drop the entire buf |
| 95 | + // is freed. |
| 96 | + // |
| 97 | + // bytes_aligned is a sliced view on top of bytes_unaligned. |
| 98 | + // |
| 99 | + // bytes_aligned |
| 100 | + // | parent \ *ptr |
| 101 | + // v | |
| 102 | + // bytes_unaligned | |
| 103 | + // | | |
| 104 | + // | *ptr | |
| 105 | + // v v |
| 106 | + // +------------+------------------+----------------+ |
| 107 | + // | padding | content | spare capacity | |
| 108 | + // +------------+------------------+----------------+ |
| 109 | + let bytes_unaligned = Bytes::from(self.buf); |
| 110 | + let bytes_aligned = bytes_unaligned.slice(self.padding..); |
| 111 | + |
| 112 | + assert_eq!( |
| 113 | + bytes_aligned.as_ptr().align_offset(ALIGN), |
| 114 | + 0, |
| 115 | + "bytes_aligned must be aligned to {}", |
| 116 | + ALIGN |
| 117 | + ); |
| 118 | + |
| 119 | + bytes_aligned |
| 120 | + } |
| 121 | +} |
| 122 | + |
| 123 | +impl<const ALIGN: usize> Deref for AlignedBytesMut<ALIGN> |
| 124 | +where |
| 125 | + usize: PowerOfTwo<ALIGN>, |
| 126 | +{ |
| 127 | + type Target = [u8]; |
| 128 | + |
| 129 | + fn deref(&self) -> &Self::Target { |
| 130 | + &self.buf[self.padding..] |
| 131 | + } |
| 132 | +} |
| 133 | + |
| 134 | +impl<const ALIGN: usize> DerefMut for AlignedBytesMut<ALIGN> |
| 135 | +where |
| 136 | + usize: PowerOfTwo<ALIGN>, |
| 137 | +{ |
| 138 | + fn deref_mut(&mut self) -> &mut Self::Target { |
| 139 | + &mut self.buf[self.padding..] |
| 140 | + } |
| 141 | +} |
| 142 | + |
| 143 | +#[cfg(test)] |
| 144 | +mod tests { |
| 145 | + use crate::aligned::AlignedBytesMut; |
| 146 | + |
| 147 | + #[test] |
| 148 | + fn test_align() { |
| 149 | + let mut buf = AlignedBytesMut::<128>::with_capacity(1); |
| 150 | + buf.extend_from_slice(b"a"); |
| 151 | + |
| 152 | + let data = buf.freeze(); |
| 153 | + |
| 154 | + assert_eq!(data.as_ref(), b"a"); |
| 155 | + assert_eq!(data.as_ptr().align_offset(128), 0); |
| 156 | + } |
| 157 | + |
| 158 | + #[test] |
| 159 | + fn test_extend() { |
| 160 | + let mut buf = AlignedBytesMut::<128>::with_capacity(256); |
| 161 | + buf.extend_from_slice(b"a"); |
| 162 | + buf.extend_from_slice(b"bcdefgh"); |
| 163 | + |
| 164 | + let data = buf.freeze(); |
| 165 | + assert_eq!(data.as_ref(), b"abcdefgh"); |
| 166 | + } |
| 167 | +} |
0 commit comments