diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 9c11b01b..17ee856e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -41,7 +41,7 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: 1.81.0 + toolchain: stable override: true - name: Install Clang (Ubuntu) diff --git a/Cargo.toml b/Cargo.toml index e04ad6be..cc93293e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -37,7 +37,8 @@ arbitrary = "1.0" tiny-keccak = "2.0" crunchy = { version = "0.2.2", default-features = false } serde = { version = "1.0.101", default-features = false } -codec = { package = "parity-scale-codec", version = "3.7.4", default-features = false } +scale-codec = { package = "parity-scale-codec", version = "3.7.4", default-features = false } +jam-codec = { version = "0.1.0", default-features = false } log = { version = "0.4.17", default-features = false } schemars = ">=0.8.12" tempfile = "3.1.0" diff --git a/bounded-collections/CHANGELOG.md b/bounded-collections/CHANGELOG.md index c9e95a1b..56ea8155 100644 --- a/bounded-collections/CHANGELOG.md +++ b/bounded-collections/CHANGELOG.md @@ -4,6 +4,9 @@ The format is based on [Keep a Changelog]. [Keep a Changelog]: http://keepachangelog.com/en/1.0.0/ +## [0.3.0] - 2025-05-21 +- Jam codec support [#914](https://github.com/paritytech/parity-common/pull/914) + ## [0.2.4] - 2025-03-20 - Implement DecodeWithMemTracking for BoundedBTreeMap [#906](https://github.com/paritytech/parity-common/pull/906) diff --git a/bounded-collections/Cargo.toml b/bounded-collections/Cargo.toml index b415c7c9..c68c9c92 100644 --- a/bounded-collections/Cargo.toml +++ b/bounded-collections/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "bounded-collections" -version = "0.2.4" +version = "0.3.0" description = "Bounded types and their supporting traits" readme = "README.md" rust-version = "1.79.0" @@ -12,8 +12,9 @@ repository.workspace = true [dependencies] serde = { workspace = true, features = ["alloc", "derive"], optional = true } -codec = { workspace = true, features = ["max-encoded-len"] } -scale-info = { workspace = true, features = ["derive"] } +scale-codec = { workspace = true, default-features = false, features = ["max-encoded-len"], optional = true } +scale-info = { workspace = true, features = ["derive"], optional = true } +jam-codec = { workspace = true, features = ["derive","max-encoded-len"], optional = true } log = { workspace = true } schemars = { workspace = true, optional = true } @@ -25,7 +26,9 @@ default = ["std"] json-schema = ["dep:schemars"] std = [ "log/std", - "codec/std", + "jam-codec/std", + "scale-codec/std", "scale-info/std", "serde/std", ] +scale-codec = [ "scale-info" ] diff --git a/bounded-collections/src/bounded_btree_map.rs b/bounded-collections/src/bounded_btree_map.rs index 9c04ae97..09a17dcb 100644 --- a/bounded-collections/src/bounded_btree_map.rs +++ b/bounded-collections/src/bounded_btree_map.rs @@ -19,7 +19,6 @@ use crate::{Get, TryCollect}; use alloc::collections::BTreeMap; -use codec::{Compact, Decode, DecodeWithMemTracking, Encode, MaxEncodedLen}; use core::{borrow::Borrow, marker::PhantomData, ops::Deref}; #[cfg(feature = "serde")] use serde::{ @@ -35,8 +34,9 @@ use serde::{ /// Unlike a standard `BTreeMap`, there is an enforced upper limit to the number of items in the /// map. All internal operations ensure this bound is respected. #[cfg_attr(feature = "serde", derive(Serialize), serde(transparent))] -#[derive(Encode, scale_info::TypeInfo)] -#[scale_info(skip_type_params(S))] +#[cfg_attr(feature = "scale-codec", derive(scale_codec::Encode, scale_info::TypeInfo))] +#[cfg_attr(feature = "scale-codec", scale_info(skip_type_params(S)))] +#[cfg_attr(feature = "jam-codec", derive(jam_codec::Encode))] pub struct BoundedBTreeMap( BTreeMap, #[cfg_attr(feature = "serde", serde(skip_serializing))] PhantomData, @@ -99,79 +99,6 @@ where } } -// Struct which allows prepending the compact after reading from an input. -pub(crate) struct PrependCompactInput<'a, I> { - encoded_len: &'a [u8], - read: usize, - inner: &'a mut I, -} - -impl<'a, I: codec::Input> codec::Input for PrependCompactInput<'a, I> { - fn remaining_len(&mut self) -> Result, codec::Error> { - let remaining_compact = self.encoded_len.len().saturating_sub(self.read); - Ok(self.inner.remaining_len()?.map(|len| len.saturating_add(remaining_compact))) - } - - fn read(&mut self, into: &mut [u8]) -> Result<(), codec::Error> { - if into.is_empty() { - return Ok(()); - } - - let remaining_compact = self.encoded_len.len().saturating_sub(self.read); - if remaining_compact > 0 { - let to_read = into.len().min(remaining_compact); - into[..to_read].copy_from_slice(&self.encoded_len[self.read..][..to_read]); - self.read += to_read; - - if to_read < into.len() { - // Buffer not full, keep reading the inner. - self.inner.read(&mut into[to_read..]) - } else { - // Buffer was filled by the compact. - Ok(()) - } - } else { - // Prepended compact has been read, just read from inner. - self.inner.read(into) - } - } -} - -impl Decode for BoundedBTreeMap -where - K: Decode + Ord, - V: Decode, - S: Get, -{ - fn decode(input: &mut I) -> Result { - // Fail early if the len is too big. This is a compact u32 which we will later put back. - let compact = >::decode(input)?; - if compact.0 > S::get() { - return Err("BoundedBTreeMap exceeds its limit".into()); - } - // Reconstruct the original input by prepending the length we just read, then delegate the decoding to BTreeMap. - let inner = BTreeMap::decode(&mut PrependCompactInput { - encoded_len: compact.encode().as_ref(), - read: 0, - inner: input, - })?; - Ok(Self(inner, PhantomData)) - } - - fn skip(input: &mut I) -> Result<(), codec::Error> { - BTreeMap::::skip(input) - } -} - -impl DecodeWithMemTracking for BoundedBTreeMap -where - K: DecodeWithMemTracking + Ord, - V: DecodeWithMemTracking, - S: Get, - BoundedBTreeMap: Decode, -{ -} - impl BoundedBTreeMap where S: Get, @@ -439,19 +366,6 @@ impl<'a, K, V, S> IntoIterator for &'a mut BoundedBTreeMap { } } -impl MaxEncodedLen for BoundedBTreeMap -where - K: MaxEncodedLen, - V: MaxEncodedLen, - S: Get, -{ - fn max_encoded_len() -> usize { - Self::bound() - .saturating_mul(K::max_encoded_len().saturating_add(V::max_encoded_len())) - .saturating_add(codec::Compact(S::get()).encoded_size()) - } -} - impl Deref for BoundedBTreeMap where K: Ord, @@ -495,17 +409,6 @@ where } } -impl codec::DecodeLength for BoundedBTreeMap { - fn len(self_encoded: &[u8]) -> Result { - // `BoundedBTreeMap` is stored just a `BTreeMap`, which is stored as a - // `Compact` with its length followed by an iteration of its items. We can just use - // the underlying implementation. - as codec::DecodeLength>::len(self_encoded) - } -} - -impl codec::EncodeLike> for BoundedBTreeMap where BTreeMap: Encode {} - impl TryCollect> for I where K: Ord, @@ -523,12 +426,132 @@ where } } +#[cfg(any(feature = "scale-codec", feature = "jam-codec"))] +macro_rules! codec_impl { + ($codec:ident) => { + use super::*; + use $codec::{ + Compact, Decode, DecodeLength, DecodeWithMemTracking, Encode, EncodeLike, Error, Input, MaxEncodedLen, + }; + + // Struct which allows prepending the compact after reading from an input. + pub(crate) struct PrependCompactInput<'a, I> { + pub encoded_len: &'a [u8], + pub read: usize, + pub inner: &'a mut I, + } + + impl<'a, I: Input> Input for PrependCompactInput<'a, I> { + fn remaining_len(&mut self) -> Result, Error> { + let remaining_compact = self.encoded_len.len().saturating_sub(self.read); + Ok(self.inner.remaining_len()?.map(|len| len.saturating_add(remaining_compact))) + } + + fn read(&mut self, into: &mut [u8]) -> Result<(), Error> { + if into.is_empty() { + return Ok(()); + } + + let remaining_compact = self.encoded_len.len().saturating_sub(self.read); + if remaining_compact > 0 { + let to_read = into.len().min(remaining_compact); + into[..to_read].copy_from_slice(&self.encoded_len[self.read..][..to_read]); + self.read += to_read; + + if to_read < into.len() { + // Buffer not full, keep reading the inner. + self.inner.read(&mut into[to_read..]) + } else { + // Buffer was filled by the compact. + Ok(()) + } + } else { + // Prepended compact has been read, just read from inner. + self.inner.read(into) + } + } + } + + impl Decode for BoundedBTreeMap + where + K: Decode + Ord, + V: Decode, + S: Get, + { + fn decode(input: &mut I) -> Result { + // Fail early if the len is too big. This is a compact u32 which we will later put back. + let compact = >::decode(input)?; + if compact.0 > S::get() { + return Err("BoundedBTreeMap exceeds its limit".into()); + } + // Reconstruct the original input by prepending the length we just read, then delegate the decoding to BTreeMap. + let inner = BTreeMap::decode(&mut PrependCompactInput { + encoded_len: compact.encode().as_ref(), + read: 0, + inner: input, + })?; + Ok(Self(inner, PhantomData)) + } + + fn skip(input: &mut I) -> Result<(), Error> { + BTreeMap::::skip(input) + } + } + + impl DecodeWithMemTracking for BoundedBTreeMap + where + K: DecodeWithMemTracking + Ord, + V: DecodeWithMemTracking, + S: Get, + BoundedBTreeMap: Decode, + { + } + + impl MaxEncodedLen for BoundedBTreeMap + where + K: MaxEncodedLen, + V: MaxEncodedLen, + S: Get, + { + fn max_encoded_len() -> usize { + Self::bound() + .saturating_mul(K::max_encoded_len().saturating_add(V::max_encoded_len())) + .saturating_add(Compact(S::get()).encoded_size()) + } + } + + impl EncodeLike> for BoundedBTreeMap where BTreeMap: Encode {} + + impl DecodeLength for BoundedBTreeMap { + fn len(self_encoded: &[u8]) -> Result { + // `BoundedBTreeMap` is stored just a `BTreeMap`, which is stored as a + // `Compact` with its length followed by an iteration of its items. We can just use + // the underlying implementation. + as DecodeLength>::len(self_encoded) + } + } + }; +} + +#[cfg(feature = "scale-codec")] +mod scale_codec_impl { + codec_impl!(scale_codec); +} + +#[cfg(feature = "jam-codec")] +mod jam_codec_impl { + codec_impl!(jam_codec); +} + #[cfg(test)] mod test { use super::*; use crate::ConstU32; use alloc::{vec, vec::Vec}; - use codec::{CompactLen, Input}; + #[cfg(feature = "scale-codec")] + use scale_codec::{Compact, CompactLen, Decode, Encode, Input}; + #[cfg(feature = "scale-codec")] + use scale_codec_impl::PrependCompactInput; fn map_from_keys(keys: &[K]) -> BTreeMap where @@ -546,6 +569,7 @@ mod test { } #[test] + #[cfg(feature = "scale-codec")] fn encoding_same_as_unbounded_map() { let b = boundedmap_from_keys::>(&[1, 2, 3, 4, 5, 6]); let m = map_from_keys(&[1, 2, 3, 4, 5, 6]); @@ -554,6 +578,7 @@ mod test { } #[test] + #[cfg(feature = "scale-codec")] fn encode_then_decode_gives_original_map() { let b = boundedmap_from_keys::>(&[1, 2, 3, 4, 5, 6]); let b_encode_decode = BoundedBTreeMap::>::decode(&mut &b.encode()[..]).unwrap(); @@ -603,6 +628,7 @@ mod test { } #[test] + #[cfg(feature = "scale-codec")] fn too_big_fail_to_decode() { let v: Vec<(u32, u32)> = vec![(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)]; assert_eq!( @@ -612,6 +638,7 @@ mod test { } #[test] + #[cfg(feature = "scale-codec")] fn dont_consume_more_data_than_bounded_len() { let m = map_from_keys(&[1, 2, 3, 4, 5, 6]); let data = m.encode(); @@ -779,6 +806,7 @@ mod test { } #[test] + #[cfg(feature = "scale-codec")] fn prepend_compact_input_works() { let encoded_len = Compact(3u32).encode(); let inner = [2, 3, 4]; @@ -805,6 +833,7 @@ mod test { } #[test] + #[cfg(feature = "scale-codec")] fn prepend_compact_input_incremental_read_works() { let encoded_len = Compact(3u32).encode(); let inner = [2, 3, 4]; diff --git a/bounded-collections/src/bounded_btree_set.rs b/bounded-collections/src/bounded_btree_set.rs index 0942f34e..fc393325 100644 --- a/bounded-collections/src/bounded_btree_set.rs +++ b/bounded-collections/src/bounded_btree_set.rs @@ -19,7 +19,6 @@ use crate::{Get, TryCollect}; use alloc::collections::BTreeSet; -use codec::{Compact, Decode, Encode, MaxEncodedLen}; use core::{borrow::Borrow, marker::PhantomData, ops::Deref}; #[cfg(feature = "serde")] use serde::{ @@ -35,8 +34,9 @@ use serde::{ /// Unlike a standard `BTreeSet`, there is an enforced upper limit to the number of items in the /// set. All internal operations ensure this bound is respected. #[cfg_attr(feature = "serde", derive(Serialize), serde(transparent))] -#[derive(Encode, scale_info::TypeInfo)] -#[scale_info(skip_type_params(S))] +#[cfg_attr(feature = "scale-codec", derive(scale_codec::Encode, scale_info::TypeInfo))] +#[cfg_attr(feature = "scale-codec", scale_info(skip_type_params(S)))] +#[cfg_attr(feature = "jam-codec", derive(jam_codec::Encode))] pub struct BoundedBTreeSet(BTreeSet, #[cfg_attr(feature = "serde", serde(skip_serializing))] PhantomData); #[cfg(feature = "serde")] @@ -79,7 +79,7 @@ where while let Some(value) = seq.next_element()? { if values.len() >= max { - return Err(A::Error::custom("out of bounds")) + return Err(A::Error::custom("out of bounds")); } values.insert(value); } @@ -96,29 +96,6 @@ where } } -impl Decode for BoundedBTreeSet -where - T: Decode + Ord, - S: Get, -{ - fn decode(input: &mut I) -> Result { - // Same as the underlying implementation for `Decode` on `BTreeSet`, except we fail early if - // the len is too big. - let len: u32 = >::decode(input)?.into(); - if len > S::get() { - return Err("BoundedBTreeSet exceeds its limit".into()) - } - input.descend_ref()?; - let inner = Result::from_iter((0..len).map(|_| Decode::decode(input)))?; - input.ascend_ref(); - Ok(Self(inner, PhantomData)) - } - - fn skip(input: &mut I) -> Result<(), codec::Error> { - BTreeSet::::skip(input) - } -} - impl BoundedBTreeSet where S: Get, @@ -318,18 +295,6 @@ impl<'a, T, S> IntoIterator for &'a BoundedBTreeSet { } } -impl MaxEncodedLen for BoundedBTreeSet -where - T: MaxEncodedLen, - S: Get, -{ - fn max_encoded_len() -> usize { - Self::bound() - .saturating_mul(T::max_encoded_len()) - .saturating_add(codec::Compact(S::get()).encoded_size()) - } -} - impl Deref for BoundedBTreeSet where T: Ord, @@ -373,17 +338,6 @@ where } } -impl codec::DecodeLength for BoundedBTreeSet { - fn len(self_encoded: &[u8]) -> Result { - // `BoundedBTreeSet` is stored just a `BTreeSet`, which is stored as a - // `Compact` with its length followed by an iteration of its items. We can just use - // the underlying implementation. - as codec::DecodeLength>::len(self_encoded) - } -} - -impl codec::EncodeLike> for BoundedBTreeSet where BTreeSet: Encode {} - impl TryCollect> for I where T: Ord, @@ -401,12 +355,76 @@ where } } +#[cfg(any(feature = "scale-codec", feature = "jam-codec"))] +macro_rules! codec_impl { + ($codec:ident) => { + use super::*; + use $codec::{Compact, Decode, DecodeLength, Encode, EncodeLike, Error, Input, MaxEncodedLen}; + impl Decode for BoundedBTreeSet + where + T: Decode + Ord, + S: Get, + { + fn decode(input: &mut I) -> Result { + // Same as the underlying implementation for `Decode` on `BTreeSet`, except we fail early if + // the len is too big. + let len: u32 = >::decode(input)?.into(); + if len > S::get() { + return Err("BoundedBTreeSet exceeds its limit".into()); + } + input.descend_ref()?; + let inner = Result::from_iter((0..len).map(|_| Decode::decode(input)))?; + input.ascend_ref(); + Ok(Self(inner, PhantomData)) + } + + fn skip(input: &mut I) -> Result<(), Error> { + BTreeSet::::skip(input) + } + } + + impl MaxEncodedLen for BoundedBTreeSet + where + T: MaxEncodedLen, + S: Get, + { + fn max_encoded_len() -> usize { + Self::bound() + .saturating_mul(T::max_encoded_len()) + .saturating_add(Compact(S::get()).encoded_size()) + } + } + + impl DecodeLength for BoundedBTreeSet { + fn len(self_encoded: &[u8]) -> Result { + // `BoundedBTreeSet` is stored just a `BTreeSet`, which is stored as a + // `Compact` with its length followed by an iteration of its items. We can just use + // the underlying implementation. + as DecodeLength>::len(self_encoded) + } + } + + impl EncodeLike> for BoundedBTreeSet where BTreeSet: Encode {} + }; +} + +#[cfg(feature = "scale-codec")] +mod scale_codec_impl { + codec_impl!(scale_codec); +} + +#[cfg(feature = "jam-codec")] +mod jam_codec_impl { + codec_impl!(jam_codec); +} + #[cfg(test)] mod test { use super::*; use crate::ConstU32; use alloc::{vec, vec::Vec}; - use codec::CompactLen; + #[cfg(feature = "scale-codec")] + use scale_codec::{Compact, CompactLen, Decode, Encode}; fn set_from_keys(keys: &[T]) -> BTreeSet where @@ -424,6 +442,7 @@ mod test { } #[test] + #[cfg(feature = "scale-codec")] fn encoding_same_as_unbounded_set() { let b = boundedset_from_keys::>(&[1, 2, 3, 4, 5, 6]); let m = set_from_keys(&[1, 2, 3, 4, 5, 6]); @@ -473,6 +492,7 @@ mod test { } #[test] + #[cfg(feature = "scale-codec")] fn too_big_fail_to_decode() { let v: Vec = vec![1, 2, 3, 4, 5]; assert_eq!( @@ -482,6 +502,7 @@ mod test { } #[test] + #[cfg(feature = "scale-codec")] fn dont_consume_more_data_than_bounded_len() { let s = set_from_keys(&[1, 2, 3, 4, 5, 6]); let data = s.encode(); diff --git a/bounded-collections/src/bounded_vec.rs b/bounded-collections/src/bounded_vec.rs index 99d293bc..82fd2660 100644 --- a/bounded-collections/src/bounded_vec.rs +++ b/bounded-collections/src/bounded_vec.rs @@ -21,7 +21,6 @@ use super::WeakBoundedVec; use crate::{Get, TryCollect}; use alloc::vec::Vec; -use codec::{decode_vec_with_len, Compact, Decode, DecodeWithMemTracking, Encode, EncodeLike, MaxEncodedLen}; use core::{ marker::PhantomData, ops::{Deref, Index, IndexMut, RangeBounds}, @@ -41,8 +40,9 @@ use serde::{ /// As the name suggests, the length of the queue is always bounded. All internal operations ensure /// this bound is respected. #[cfg_attr(feature = "serde", derive(Serialize), serde(transparent))] -#[derive(Encode, scale_info::TypeInfo)] -#[scale_info(skip_type_params(S))] +#[cfg_attr(feature = "jam-codec", derive(jam_codec::Encode))] +#[cfg_attr(feature = "scale-codec", derive(scale_codec::Encode, scale_info::TypeInfo))] +#[cfg_attr(feature = "scale-codec", scale_info(skip_type_params(S)))] #[cfg_attr(feature = "json-schema", derive(schemars::JsonSchema))] pub struct BoundedVec(pub(super) Vec, #[cfg_attr(feature = "serde", serde(skip_serializing))] PhantomData); @@ -53,71 +53,70 @@ pub trait TruncateFrom { } #[cfg(feature = "serde")] -impl<'de, T, S: Get> Deserialize<'de> for BoundedVec -where - T: Deserialize<'de>, -{ - fn deserialize(deserializer: D) -> Result +mod serde_impl { + use super::*; + + impl<'de, T, S: Get> Deserialize<'de> for BoundedVec where - D: Deserializer<'de>, + T: Deserialize<'de>, { - struct VecVisitor>(PhantomData<(T, S)>); - - impl<'de, T, S: Get> Visitor<'de> for VecVisitor + fn deserialize(deserializer: D) -> Result where - T: Deserialize<'de>, + D: Deserializer<'de>, { - type Value = Vec; - - fn expecting(&self, formatter: &mut alloc::fmt::Formatter) -> alloc::fmt::Result { - formatter.write_str("a sequence") - } + struct VecVisitor>(PhantomData<(T, S)>); - fn visit_seq(self, mut seq: A) -> Result + impl<'de, T, S: Get> Visitor<'de> for VecVisitor where - A: SeqAccess<'de>, + T: Deserialize<'de>, { - let size = seq.size_hint().unwrap_or(0); - let max = match usize::try_from(S::get()) { - Ok(n) => n, - Err(_) => return Err(A::Error::custom("can't convert to usize")), - }; - if size > max { - Err(A::Error::custom("out of bounds")) - } else { - let mut values = Vec::with_capacity(size); - - while let Some(value) = seq.next_element()? { - if values.len() >= max { - return Err(A::Error::custom("out of bounds")) + type Value = Vec; + + fn expecting(&self, formatter: &mut alloc::fmt::Formatter) -> alloc::fmt::Result { + formatter.write_str("a sequence") + } + + fn visit_seq(self, mut seq: A) -> Result + where + A: SeqAccess<'de>, + { + let size = seq.size_hint().unwrap_or(0); + let max = match usize::try_from(S::get()) { + Ok(n) => n, + Err(_) => return Err(A::Error::custom("can't convert to usize")), + }; + if size > max { + Err(A::Error::custom("out of bounds")) + } else { + let mut values = Vec::with_capacity(size); + + while let Some(value) = seq.next_element()? { + if values.len() >= max { + return Err(A::Error::custom("out of bounds")); + } + values.push(value); } - values.push(value); - } - Ok(values) + Ok(values) + } } } - } - let visitor: VecVisitor = VecVisitor(PhantomData); - deserializer - .deserialize_seq(visitor) - .map(|v| BoundedVec::::try_from(v).map_err(|_| Error::custom("out of bounds")))? + let visitor: VecVisitor = VecVisitor(PhantomData); + deserializer + .deserialize_seq(visitor) + .map(|v| BoundedVec::::try_from(v).map_err(|_| Error::custom("out of bounds")))? + } } } /// A bounded slice. /// /// Similar to a `BoundedVec`, but not owned and cannot be decoded. -#[derive(Encode, scale_info::TypeInfo)] +#[cfg_attr(feature = "scale-codec", derive(scale_codec::Encode, scale_info::TypeInfo))] +#[cfg_attr(feature = "jam-codec", derive(jam_codec::Encode))] pub struct BoundedSlice<'a, T, S>(pub(super) &'a [T], PhantomData); -// `BoundedSlice`s encode to something which will always decode into a `BoundedVec`, -// `WeakBoundedVec`, or a `Vec`. -impl<'a, T: Encode + Decode, S: Get> EncodeLike> for BoundedSlice<'a, T, S> {} -impl<'a, T: Encode + Decode, S: Get> EncodeLike> for BoundedSlice<'a, T, S> {} -impl<'a, T: Encode + Decode, S: Get> EncodeLike> for BoundedSlice<'a, T, S> {} - impl<'a, T, BoundSelf, BoundRhs> PartialEq> for BoundedSlice<'a, T, BoundSelf> where T: PartialEq, @@ -268,28 +267,6 @@ impl<'a, T, S: Get> BoundedSlice<'a, T, S> { } } -impl> Decode for BoundedVec { - fn decode(input: &mut I) -> Result { - // Same as the underlying implementation for `Decode` on `Vec`, except we fail early if the - // len is too big. - let len: u32 = >::decode(input)?.into(); - if len > S::get() { - return Err("BoundedVec exceeds its limit".into()) - } - let inner = decode_vec_with_len(input, len as usize)?; - Ok(Self(inner, PhantomData)) - } - - fn skip(input: &mut I) -> Result<(), codec::Error> { - Vec::::skip(input) - } -} - -impl> DecodeWithMemTracking for BoundedVec {} - -// `BoundedVec`s encode to something which will always decode as a `Vec`. -impl> EncodeLike> for BoundedVec {} - impl BoundedVec { /// Create `Self` with no items. pub fn new() -> Self { @@ -798,14 +775,6 @@ impl<'a, T, S> core::iter::IntoIterator for &'a mut BoundedVec { } } -impl codec::DecodeLength for BoundedVec { - fn len(self_encoded: &[u8]) -> Result { - // `BoundedVec` stored just a `Vec`, thus the length is at the beginning in - // `Compact` form, and same implementation as `Vec` can be used. - as codec::DecodeLength>::len(self_encoded) - } -} - impl PartialEq> for BoundedVec where T: PartialEq, @@ -892,22 +861,6 @@ impl> Ord for BoundedVec { } } -impl MaxEncodedLen for BoundedVec -where - T: MaxEncodedLen, - S: Get, - BoundedVec: Encode, -{ - fn max_encoded_len() -> usize { - // BoundedVec encodes like Vec which encodes like [T], which is a compact u32 - // plus each item in the slice: - // See: https://docs.substrate.io/reference/scale-codec/ - codec::Compact(S::get()) - .encoded_size() - .saturating_add(Self::bound().saturating_mul(T::max_encoded_len())) - } -} - impl TryCollect> for I where I: ExactSizeIterator + Iterator, @@ -924,13 +877,91 @@ where } } +#[cfg(any(feature = "scale-codec", feature = "jam-codec"))] +macro_rules! codec_impl { + ($codec:ident) => { + use super::*; + + use $codec::{ + decode_vec_with_len, Compact, Decode, DecodeLength, DecodeWithMemTracking, Encode, EncodeLike, Error, + Input, MaxEncodedLen, + }; + + impl> Decode for BoundedVec { + fn decode(input: &mut I) -> Result { + // Same as the underlying implementation for `Decode` on `Vec`, except we fail early if the + // len is too big. + let len: u32 = >::decode(input)?.into(); + if len > S::get() { + return Err("BoundedVec exceeds its limit".into()); + } + let inner = decode_vec_with_len(input, len as usize)?; + Ok(Self(inner, PhantomData)) + } + + fn skip(input: &mut I) -> Result<(), Error> { + Vec::::skip(input) + } + } + + impl> DecodeWithMemTracking for BoundedVec {} + + // `BoundedVec`s encode to something which will always decode as a `Vec`. + impl> EncodeLike> for BoundedVec {} + + impl MaxEncodedLen for BoundedVec + where + T: MaxEncodedLen, + S: Get, + BoundedVec: Encode, + { + fn max_encoded_len() -> usize { + // BoundedVec encodes like Vec which encodes like [T], which is a compact u32 + // plus each item in the slice: + // See: https://docs.substrate.io/reference/scale-codec/ + Compact(S::get()) + .encoded_size() + .saturating_add(Self::bound().saturating_mul(T::max_encoded_len())) + } + } + + impl DecodeLength for BoundedVec { + fn len(self_encoded: &[u8]) -> Result { + // `BoundedVec` stored just a `Vec`, thus the length is at the beginning in + // `Compact` form, and same implementation as `Vec` can be used. + as DecodeLength>::len(self_encoded) + } + } + + // `BoundedSlice`s encode to something which will always decode into a `BoundedVec`, + // `WeakBoundedVec`, or a `Vec`. + impl<'a, T: Encode + Decode, S: Get> EncodeLike> for BoundedSlice<'a, T, S> {} + + impl<'a, T: Encode + Decode, S: Get> EncodeLike> for BoundedSlice<'a, T, S> {} + + impl<'a, T: Encode + Decode, S: Get> EncodeLike> for BoundedSlice<'a, T, S> {} + }; +} + +#[cfg(feature = "scale-codec")] +mod scale_codec_impl { + codec_impl!(scale_codec); +} + +#[cfg(feature = "jam-codec")] +mod jam_codec_impl { + codec_impl!(jam_codec); +} + #[cfg(all(test, feature = "std"))] mod test { use super::*; use crate::{bounded_vec, ConstU32}; - use codec::CompactLen; + #[cfg(feature = "scale-codec")] + use scale_codec::{Compact, CompactLen, Decode, Encode}; #[test] + #[cfg(feature = "scale-codec")] fn encoding_same_as_unbounded_vec() { let b: BoundedVec> = bounded_vec![0, 1, 2, 3, 4, 5]; let v: Vec = vec![0, 1, 2, 3, 4, 5]; @@ -1126,6 +1157,7 @@ mod test { } #[test] + #[cfg(feature = "scale-codec")] fn too_big_vec_fail_to_decode() { let v: Vec = vec![1, 2, 3, 4, 5]; assert_eq!( @@ -1135,6 +1167,7 @@ mod test { } #[test] + #[cfg(feature = "scale-codec")] fn dont_consume_more_data_than_bounded_len() { let v: Vec = vec![1, 2, 3, 4, 5]; let data = v.encode(); diff --git a/bounded-collections/src/weak_bounded_vec.rs b/bounded-collections/src/weak_bounded_vec.rs index 77c2aa7e..9c4f63dc 100644 --- a/bounded-collections/src/weak_bounded_vec.rs +++ b/bounded-collections/src/weak_bounded_vec.rs @@ -21,7 +21,6 @@ use super::{BoundedSlice, BoundedVec}; use crate::Get; use alloc::vec::Vec; -use codec::{Decode, DecodeWithMemTracking, Encode, MaxEncodedLen}; use core::{ marker::PhantomData, ops::{Deref, Index, IndexMut}, @@ -41,8 +40,9 @@ use serde::{ /// The length of the vec is not strictly bounded. Decoding a vec with more element that the bound /// is accepted, and some method allow to bypass the restriction with warnings. #[cfg_attr(feature = "serde", derive(Serialize), serde(transparent))] -#[derive(Encode, scale_info::TypeInfo)] -#[scale_info(skip_type_params(S))] +#[cfg_attr(feature = "scale-codec", derive(scale_codec::Encode, scale_info::TypeInfo))] +#[cfg_attr(feature = "scale-codec", scale_info(skip_type_params(S)))] +#[cfg_attr(feature = "jam-codec", derive(jam_codec::Encode))] pub struct WeakBoundedVec( pub(super) Vec, #[cfg_attr(feature = "serde", serde(skip_serializing))] PhantomData, @@ -107,19 +107,6 @@ where } } -impl> Decode for WeakBoundedVec { - fn decode(input: &mut I) -> Result { - let inner = Vec::::decode(input)?; - Ok(Self::force_from(inner, Some("decode"))) - } - - fn skip(input: &mut I) -> Result<(), codec::Error> { - Vec::::skip(input) - } -} - -impl> DecodeWithMemTracking for WeakBoundedVec {} - impl WeakBoundedVec { /// Create `Self` from `t` without any checks. fn unchecked_from(t: Vec) -> Self { @@ -348,14 +335,6 @@ impl<'a, T, S> core::iter::IntoIterator for &'a mut WeakBoundedVec { } } -impl codec::DecodeLength for WeakBoundedVec { - fn len(self_encoded: &[u8]) -> Result { - // `WeakBoundedVec` stored just a `Vec`, thus the length is at the beginning in - // `Compact` form, and same implementation as `Vec` can be used. - as codec::DecodeLength>::len(self_encoded) - } -} - impl PartialEq> for WeakBoundedVec where T: PartialEq, @@ -436,20 +415,59 @@ impl> Ord for WeakBoundedVec { } } -impl MaxEncodedLen for WeakBoundedVec -where - T: MaxEncodedLen, - S: Get, - WeakBoundedVec: Encode, -{ - fn max_encoded_len() -> usize { - // WeakBoundedVec encodes like Vec which encodes like [T], which is a compact u32 - // plus each item in the slice: - // See: https://docs.polkadot.com/polkadot-protocol/basics/data-encoding/#scale-codec-libraries - codec::Compact(S::get()) - .encoded_size() - .saturating_add(Self::bound().saturating_mul(T::max_encoded_len())) - } +#[cfg(any(feature = "scale-codec", feature = "jam-codec"))] +macro_rules! codec_impl { + ($codec:ident) => { + use super::*; + use $codec::{Compact, Decode, DecodeLength, DecodeWithMemTracking, Encode, Error, Input, MaxEncodedLen}; + + impl MaxEncodedLen for WeakBoundedVec + where + T: MaxEncodedLen, + S: Get, + WeakBoundedVec: Encode, + { + fn max_encoded_len() -> usize { + // WeakBoundedVec encodes like Vec which encodes like [T], which is a compact u32 + // plus each item in the slice: + // See: https://docs.polkadot.com/polkadot-protocol/basics/data-encoding/#scale-codec-libraries + Compact(S::get()) + .encoded_size() + .saturating_add(Self::bound().saturating_mul(T::max_encoded_len())) + } + } + + impl> Decode for WeakBoundedVec { + fn decode(input: &mut I) -> Result { + let inner = Vec::::decode(input)?; + Ok(Self::force_from(inner, Some("decode"))) + } + + fn skip(input: &mut I) -> Result<(), Error> { + Vec::::skip(input) + } + } + + impl> DecodeWithMemTracking for WeakBoundedVec {} + + impl DecodeLength for WeakBoundedVec { + fn len(self_encoded: &[u8]) -> Result { + // `WeakBoundedVec` stored just a `Vec`, thus the length is at the beginning in + // `Compact` form, and same implementation as `Vec` can be used. + as DecodeLength>::len(self_encoded) + } + } + }; +} + +#[cfg(feature = "scale-codec")] +mod scale_impl { + codec_impl!(scale_codec); +} + +#[cfg(feature = "jam-codec")] +mod jam_impl { + codec_impl!(jam_codec); } #[cfg(test)] @@ -457,6 +475,8 @@ mod test { use super::*; use crate::ConstU32; use alloc::vec; + #[cfg(feature = "scale-codec")] + use scale_codec::{Decode, Encode}; #[test] fn bound_returns_correct_value() { @@ -519,6 +539,7 @@ mod test { } #[test] + #[cfg(feature = "scale-codec")] fn too_big_succeed_to_decode() { let v: Vec = vec![1, 2, 3, 4, 5]; let w = WeakBoundedVec::>::decode(&mut &v.encode()[..]).unwrap(); diff --git a/primitive-types/impls/codec/Cargo.toml b/primitive-types/impls/codec/Cargo.toml index 06d5bbbb..f81cf49e 100644 --- a/primitive-types/impls/codec/Cargo.toml +++ b/primitive-types/impls/codec/Cargo.toml @@ -10,8 +10,8 @@ homepage.workspace = true repository.workspace = true [dependencies] -codec = { workspace = true, features = ["max-encoded-len"] } +scale-codec = { workspace = true, features = ["max-encoded-len"] } [features] default = ["std"] -std = ["codec/std"] +std = ["scale-codec/std"] diff --git a/primitive-types/impls/codec/src/lib.rs b/primitive-types/impls/codec/src/lib.rs index 9bd2174b..1a9ed621 100644 --- a/primitive-types/impls/codec/src/lib.rs +++ b/primitive-types/impls/codec/src/lib.rs @@ -11,7 +11,7 @@ #![cfg_attr(not(feature = "std"), no_std)] #[doc(hidden)] -pub use codec; +pub use scale_codec as codec; /// Add Parity Codec serialization support to an integer created by `construct_uint!`. #[macro_export]