From 691f9c26507af5bcc9a597b6a8dea263027ea54a Mon Sep 17 00:00:00 2001 From: Evan Almloff Date: Wed, 5 Nov 2025 12:36:13 -0600 Subject: [PATCH 01/20] implement const cbor encoding + decoding for basic types --- packages/const-serialize/src/cbor.rs | 293 +++++++++++++++++++++++++++ packages/const-serialize/src/lib.rs | 1 + 2 files changed, 294 insertions(+) create mode 100644 packages/const-serialize/src/cbor.rs diff --git a/packages/const-serialize/src/cbor.rs b/packages/const-serialize/src/cbor.rs new file mode 100644 index 0000000000..921bf08339 --- /dev/null +++ b/packages/const-serialize/src/cbor.rs @@ -0,0 +1,293 @@ +// Major type 0: +// An unsigned integer in the range 0..264-1 inclusive. The value of the encoded item is the argument itself. For example, the integer 10 is denoted as the one byte 0b000_01010 (major type 0, additional information 10). The integer 500 would be 0b000_11001 (major type 0, additional information 25) followed by the two bytes 0x01f4, which is 500 in decimal. +// Major type 1: +// A negative integer in the range -264..-1 inclusive. The value of the item is -1 minus the argument. For example, the integer -500 would be 0b001_11001 (major type 1, additional information 25) followed by the two bytes 0x01f3, which is 499 in decimal. +// Major type 2: +// A byte string. The number of bytes in the string is equal to the argument. For example, a byte string whose length is 5 would have an initial byte of 0b010_00101 (major type 2, additional information 5 for the length), followed by 5 bytes of binary content. A byte string whose length is 500 would have 3 initial bytes of 0b010_11001 (major type 2, additional information 25 to indicate a two-byte length) followed by the two bytes 0x01f4 for a length of 500, followed by 500 bytes of binary content. +// Major type 3: +// A text string (Section 2) encoded as UTF-8 [RFC3629]. The number of bytes in the string is equal to the argument. A string containing an invalid UTF-8 sequence is well-formed but invalid (Section 1.2). This type is provided for systems that need to interpret or display human-readable text, and allows the differentiation between unstructured bytes and text that has a specified repertoire (that of Unicode) and encoding (UTF-8). In contrast to formats such as JSON, the Unicode characters in this type are never escaped. Thus, a newline character (U+000A) is always represented in a string as the byte 0x0a, and never as the bytes 0x5c6e (the characters "\" and "n") nor as 0x5c7530303061 (the characters "\", "u", "0", "0", "0", and "a"). +// Major type 4: +// An array of data items. In other formats, arrays are also called lists, sequences, or tuples (a "CBOR sequence" is something slightly different, though [RFC8742]). The argument is the number of data items in the array. Items in an array do not need to all be of the same type. For example, an array that contains 10 items of any type would have an initial byte of 0b100_01010 (major type 4, additional information 10 for the length) followed by the 10 remaining items. +// Major type 5: +// A map of pairs of data items. Maps are also called tables, dictionaries, hashes, or objects (in JSON). A map is comprised of pairs of data items, each pair consisting of a key that is immediately followed by a value. The argument is the number of pairs of data items in the map. For example, a map that contains 9 pairs would have an initial byte of 0b101_01001 (major type 5, additional information 9 for the number of pairs) followed by the 18 remaining items. The first item is the first key, the second item is the first value, the third item is the second key, and so on. Because items in a map come in pairs, their total number is always even: a map that contains an odd number of items (no value data present after the last key data item) is not well-formed. A map that has duplicate keys may be well-formed, but it is not valid, and thus it causes indeterminate decoding; see also Section 5.6. +// Major type 6: +// A tagged data item ("tag") whose tag number, an integer in the range 0..264-1 inclusive, is the argument and whose enclosed data item (tag content) is the single encoded data item that follows the head. See Section 3.4. +// Major type 7: +// Floating-point numbers and simple values, as well as the "break" stop code. See Section 3.3. + +use crate::ConstVec; + +#[repr(u8)] +#[derive(PartialEq)] +enum MajorType { + UnsignedInteger = 0, + NegativeInteger = 1, + Bytes = 2, + Text = 3, + Array = 4, + Map = 5, + Tagged = 6, + Float = 7, +} + +impl MajorType { + const fn from_byte(byte: u8) -> Self { + match byte >> 5 { + 0 => MajorType::UnsignedInteger, + 1 => MajorType::NegativeInteger, + 2 => MajorType::Bytes, + 3 => MajorType::Text, + 4 => MajorType::Array, + 5 => MajorType::Map, + 6 => MajorType::Tagged, + 7 => MajorType::Float, + _ => panic!("Invalid major type"), + } + } +} + +const fn take_number(bytes: &[u8]) -> Result<(i64, &[u8]), ()> { + let [head, rest @ ..] = bytes else { + return Err(()); + }; + let major = MajorType::from_byte(*head); + let additional_information = *head & 0b0001_1111; + match major { + MajorType::UnsignedInteger => { + let Ok((number, rest)) = grab_u64(rest, additional_information) else { + return Err(()); + }; + Ok((number as i64, rest)) + } + MajorType::NegativeInteger => { + let Ok((number, rest)) = grab_u64(rest, additional_information) else { + return Err(()); + }; + Ok((-(1 + number as i64), rest)) + } + _ => Err(()), + } +} + +const fn write_number( + vec: ConstVec, + number: i64, +) -> ConstVec { + match number { + 0.. => write_major_type_and_u64(vec, MajorType::UnsignedInteger, number as u64), + ..0 => write_major_type_and_u64(vec, MajorType::NegativeInteger, (-(number + 1)) as u64), + } +} + +const fn write_major_type_and_u64( + vec: ConstVec, + major: MajorType, + number: u64, +) -> ConstVec { + let major = (major as u8) << 5; + match number { + 0..24 => { + let additional_information = number as u8; + let byte = major | additional_information; + vec.push(byte) + } + 24.. => { + let log2_additional_bytes = log2_bytes_for_number(number); + let additional_bytes = 1 << log2_additional_bytes; + let additional_information = log2_additional_bytes + 24; + let byte = major | additional_information; + let mut vec = vec.push(byte); + let mut byte = 0; + while byte < additional_bytes { + vec = vec.push((number >> ((additional_bytes - byte - 1) * 8)) as u8); + byte += 1; + } + vec + } + } +} + +const fn log2_bytes_for_number(number: u64) -> u8 { + let required_bytes = ((64 - number.leading_zeros()).div_ceil(8)) as u8; + match required_bytes { + ..=1 => 0, + ..=2 => 1, + ..=4 => 2, + _ => 3, + } +} + +const fn take_bytes(bytes: &[u8]) -> Result<(&[u8], &[u8]), ()> { + let [head, rest @ ..] = bytes else { + return Err(()); + }; + let major = MajorType::from_byte(*head); + let additional_information = *head & 0b0001_1111; + if let MajorType::Bytes = major { + take_bytes_from(rest, additional_information) + } else { + Err(()) + } +} + +const fn write_bytes( + vec: ConstVec, + bytes: &[u8], +) -> ConstVec { + let vec = write_major_type_and_u64(vec, MajorType::Bytes, bytes.len() as u64); + vec.extend(bytes) +} + +const fn take_str(bytes: &[u8]) -> Result<(&str, &[u8]), ()> { + let [head, rest @ ..] = bytes else { + return Err(()); + }; + let major = MajorType::from_byte(*head); + let additional_information = *head & 0b0001_1111; + if let MajorType::Text = major { + let Ok((bytes, rest)) = take_bytes_from(rest, additional_information) else { + return Err(()); + }; + let Ok(string) = str::from_utf8(bytes) else { + return Err(()); + }; + Ok((string, rest)) + } else { + Err(()) + } +} + +const fn write_str( + vec: ConstVec, + string: &str, +) -> ConstVec { + let vec = write_major_type_and_u64(vec, MajorType::Text, string.len() as u64); + vec.extend(string.as_bytes()) +} + +const fn take_array(bytes: &[u8]) -> Result<(usize, &[u8]), ()> { + let [head, rest @ ..] = bytes else { + return Err(()); + }; + let major = MajorType::from_byte(*head); + let additional_information = *head & 0b0001_1111; + if let MajorType::Array = major { + let Ok((length, rest)) = take_len_from(rest, additional_information) else { + return Err(()); + }; + Ok((length as usize, rest)) + } else { + Err(()) + } +} + +const fn write_array( + vec: ConstVec, + len: usize, +) -> ConstVec { + write_major_type_and_u64(vec, MajorType::Array, len as u64) +} + +const fn take_len_from(rest: &[u8], additional_information: u8) -> Result<(u64, &[u8]), ()> { + match additional_information { + // If additional_information < 24, the argument's value is the value of the additional information. + 0..24 => Ok((additional_information as u64, rest)), + // If additional_information is between 24 and 28, the argument's value is held in the n following bytes. + 24..28 => { + let Ok((number, rest)) = grab_u64(rest, additional_information) else { + return Err(()); + }; + Ok((number as u64, rest)) + } + _ => Err(()), + } +} + +const fn take_bytes_from(rest: &[u8], additional_information: u8) -> Result<(&[u8], &[u8]), ()> { + let Ok((number, rest)) = grab_u64(rest, additional_information) else { + return Err(()); + }; + let Some((bytes, rest)) = rest.split_at_checked(number as usize) else { + return Err(()); + }; + Ok((bytes, rest)) +} + +const fn grab_u64(mut rest: &[u8], additional_information: u8) -> Result<(u64, &[u8]), ()> { + match additional_information { + 0..24 => Ok((additional_information as u64, rest)), + 24..28 => { + let n = 1 << (additional_information - 24) as u32; + let mut value = 0; + let mut count = 0; + while count < n { + let [next, remaining @ ..] = rest else { + return Err(()); + }; + value = (value << 8) | *next as u64; + rest = remaining; + count += 1; + } + Ok((value, rest)) + } + _ => Err(()), + } +} + +#[test] +fn test_parse_byte() { + for byte in 0..=255 { + let bytes = if byte < 24 { + [byte | 0b00000000, 0] + } else { + [0b00000000 | 24, byte] + }; + let (item, _) = take_number(&bytes).unwrap(); + assert_eq!(item, byte as _); + } + for byte in 1..=255 { + let bytes = if byte < 24 { + [byte - 1 | 0b0010_0000, 0] + } else { + [0b0010_0000 | 24, byte - 1] + }; + let (item, _) = take_number(&bytes).unwrap(); + assert_eq!(item, -(byte as i64)); + } +} + +#[test] +fn test_byte_roundtrip() { + for byte in 0..=255 { + let vec = write_number(ConstVec::new(), byte as _); + println!("{vec:?}"); + let (item, _) = take_number(vec.as_ref()).unwrap(); + assert_eq!(item, byte as _); + } + for byte in 0..=255 { + let vec = write_number(ConstVec::new(), -(byte as i64)); + let (item, _) = take_number(vec.as_ref()).unwrap(); + assert_eq!(item, -(byte as i64)); + } +} + +#[test] +fn test_number_roundtrip() { + for _ in 0..100 { + let value = rand::random::(); + let vec = write_number(ConstVec::new(), value); + let (item, _) = take_number(vec.as_ref()).unwrap(); + assert_eq!(item, value); + } +} + +#[test] +fn test_bytes_roundtrip() { + for _ in 0..100 { + let len = (rand::random::() % 100) as usize; + let bytes = rand::random::<[u8; 100]>(); + let vec = write_bytes(ConstVec::new(), &bytes[..len]); + let (item, _) = take_bytes(vec.as_ref()).unwrap(); + assert_eq!(item, &bytes[..len]); + } +} diff --git a/packages/const-serialize/src/lib.rs b/packages/const-serialize/src/lib.rs index 4cc5dcff1a..3487893eb7 100644 --- a/packages/const-serialize/src/lib.rs +++ b/packages/const-serialize/src/lib.rs @@ -3,6 +3,7 @@ use std::{char, mem::MaybeUninit}; +mod cbor; mod const_buffers; mod const_vec; From 5ba45ed847a4415f0766f3a515f9bec8acf71c48 Mon Sep 17 00:00:00 2001 From: Evan Almloff Date: Thu, 6 Nov 2025 13:12:41 -0600 Subject: [PATCH 02/20] implement map encoding and decoding --- packages/const-serialize/src/cbor.rs | 229 ++++++++++++++++++++++++++- 1 file changed, 224 insertions(+), 5 deletions(-) diff --git a/packages/const-serialize/src/cbor.rs b/packages/const-serialize/src/cbor.rs index 921bf08339..009835bf31 100644 --- a/packages/const-serialize/src/cbor.rs +++ b/packages/const-serialize/src/cbor.rs @@ -46,6 +46,65 @@ impl MajorType { } } +/// Get the length of the item in bytes without deserialization. +const fn item_length(bytes: &[u8]) -> Result { + let [head, rest @ ..] = bytes else { + return Err(()); + }; + let major = MajorType::from_byte(*head); + let additional_information = *head & 0b0001_1111; + match major { + MajorType::UnsignedInteger | MajorType::NegativeInteger => { + Ok(1 + get_length_of_number(additional_information) as usize) + } + MajorType::Text | MajorType::Bytes => { + let length_of_number = get_length_of_number(additional_information); + let Ok((length_of_bytes, _)) = + grab_u64_with_byte_length(rest, length_of_number, additional_information) + else { + return Err(()); + }; + Ok(1 + length_of_number as usize + length_of_bytes as usize) + } + MajorType::Array | MajorType::Map => { + let length_of_number = get_length_of_number(additional_information); + let Ok((length_of_items, _)) = + grab_u64_with_byte_length(rest, length_of_number, additional_information) + else { + return Err(()); + }; + let mut total_length = length_of_number as usize + length_of_items as usize; + let mut items_left = length_of_items; + while items_left > 0 { + let Some((_, after)) = rest.split_at_checked(total_length) else { + return Err(()); + }; + let Ok(item_length) = item_length(after) else { + return Err(()); + }; + total_length += item_length; + items_left -= 1; + } + Ok(1 + total_length) + } + _ => Err(()), + } +} + +#[test] +fn test_item_length_str() { + let input = [ + 0x61, // text(1) + /**/ 0x31, // "1" + 0x61, // text(1) + /**/ 0x31, // "1" + ]; + let Ok(length) = item_length(&input) else { + panic!("Failed to calculate length"); + }; + assert_eq!(length, 2); +} + const fn take_number(bytes: &[u8]) -> Result<(i64, &[u8]), ()> { let [head, rest @ ..] = bytes else { return Err(()); @@ -188,6 +247,102 @@ const fn write_array( write_major_type_and_u64(vec, MajorType::Array, len as u64) } +const fn write_map( + vec: ConstVec, + len: usize, +) -> ConstVec { + // We write 2 * len as the length of the map because each key-value pair is a separate entry. + write_major_type_and_u64(vec, MajorType::Map, len as u64) +} + +const fn write_map_key( + value: ConstVec, + key: &str, +) -> ConstVec { + write_str(value, key) +} + +const fn take_map<'a>(bytes: &'a [u8]) -> Result<(MapRef<'a>, &'a [u8]), ()> { + let [head, rest @ ..] = bytes else { + return Err(()); + }; + let major = MajorType::from_byte(*head); + let additional_information = *head & 0b0001_1111; + if let MajorType::Map = major { + let Ok((length, rest)) = take_len_from(rest, additional_information) else { + return Err(()); + }; + let mut after_map = rest; + let mut items_left = length * 2; + while items_left > 0 { + // Skip the value + let Ok(len) = item_length(after_map) else { + return Err(()); + }; + let Some((_, rest)) = rest.split_at_checked(len as usize) else { + return Err(()); + }; + after_map = rest; + items_left -= 1; + } + Ok((MapRef::new(rest, length as usize), after_map)) + } else { + Err(()) + } +} + +struct MapRef<'a> { + bytes: &'a [u8], + len: usize, +} + +impl<'a> MapRef<'a> { + const fn new(bytes: &'a [u8], len: usize) -> Self { + Self { bytes, len } + } + + const fn find(&self, key: &str) -> Result, ()> { + let mut bytes = self.bytes; + let mut items_left = self.len; + while items_left > 0 { + let Ok((str, rest)) = take_str(bytes) else { + return Err(()); + }; + if str_eq(key, str) { + return Ok(Some(rest)); + } + // Skip the value associated with the key we don't care about + let Ok(len) = item_length(rest) else { + return Err(()); + }; + let Some((_, rest)) = rest.split_at_checked(len as usize) else { + return Err(()); + }; + bytes = rest; + items_left -= 1; + } + Ok(None) + } +} + +const fn str_eq(a: &str, b: &str) -> bool { + let a_bytes = a.as_bytes(); + let b_bytes = b.as_bytes(); + let a_len = a_bytes.len(); + let b_len = b_bytes.len(); + if a_len != b_len { + return false; + } + let mut index = 0; + while index < a_len { + if a_bytes[index] != b_bytes[index] { + return false; + } + index += 1; + } + true +} + const fn take_len_from(rest: &[u8], additional_information: u8) -> Result<(u64, &[u8]), ()> { match additional_information { // If additional_information < 24, the argument's value is the value of the additional information. @@ -213,11 +368,30 @@ const fn take_bytes_from(rest: &[u8], additional_information: u8) -> Result<(&[u Ok((bytes, rest)) } -const fn grab_u64(mut rest: &[u8], additional_information: u8) -> Result<(u64, &[u8]), ()> { +const fn get_length_of_number(additional_information: u8) -> u8 { match additional_information { - 0..24 => Ok((additional_information as u64, rest)), - 24..28 => { - let n = 1 << (additional_information - 24) as u32; + 0..24 => 0, + 24..28 => 1 << (additional_information - 24), + _ => 0, + } +} + +const fn grab_u64(rest: &[u8], additional_information: u8) -> Result<(u64, &[u8]), ()> { + grab_u64_with_byte_length( + rest, + get_length_of_number(additional_information), + additional_information, + ) +} + +const fn grab_u64_with_byte_length( + mut rest: &[u8], + byte_length: u8, + additional_information: u8, +) -> Result<(u64, &[u8]), ()> { + match byte_length { + 0 => Ok((additional_information as u64, rest)), + n => { let mut value = 0; let mut count = 0; while count < n { @@ -230,7 +404,6 @@ const fn grab_u64(mut rest: &[u8], additional_information: u8) -> Result<(u64, & } Ok((value, rest)) } - _ => Err(()), } } @@ -291,3 +464,49 @@ fn test_bytes_roundtrip() { assert_eq!(item, &bytes[..len]); } } + +#[test] +fn test_array_roundtrip() { + for _ in 0..100 { + let len = (rand::random::() % 100) as usize; + let mut vec = write_array(ConstVec::new(), len); + for i in 0..len { + vec = write_number(vec, i as _); + } + let (len, mut remaining) = take_array(vec.as_ref()).unwrap(); + for i in 0..len { + let (item, rest) = take_number(remaining).unwrap(); + remaining = rest; + assert_eq!(item, i as i64); + } + } +} + +#[test] +fn test_map_roundtrip() { + use rand::prelude::SliceRandom; + for _ in 0..100 { + let len = (rand::random::() % 10) as usize; + let mut vec = write_map(ConstVec::new(), len); + let mut random_order_indexes = (0..len).collect::>(); + random_order_indexes.shuffle(&mut rand::rng()); + for &i in &random_order_indexes { + vec = write_map_key(vec, &i.to_string()); + vec = write_number(vec, i as _); + } + println!("len: {}", len); + println!("Map: {:?}", vec); + let (map, remaining) = take_map(vec.as_ref()).unwrap(); + println!("remaining: {:?}", remaining); + assert!(remaining.is_empty()); + for i in 0..len { + let key = i.to_string(); + let key_location = map + .find(&key) + .expect("encoding is valid") + .expect("key exists"); + let (value, _) = take_number(key_location).unwrap(); + assert_eq!(value, i as i64); + } + } +} From 7632feda2e2f8b4830f6f468aeac1bec515b2607 Mon Sep 17 00:00:00 2001 From: Evan Almloff Date: Thu, 6 Nov 2025 13:13:14 -0600 Subject: [PATCH 03/20] fix map test --- packages/const-serialize/src/cbor.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/const-serialize/src/cbor.rs b/packages/const-serialize/src/cbor.rs index 009835bf31..d95a01152e 100644 --- a/packages/const-serialize/src/cbor.rs +++ b/packages/const-serialize/src/cbor.rs @@ -279,7 +279,7 @@ const fn take_map<'a>(bytes: &'a [u8]) -> Result<(MapRef<'a>, &'a [u8]), ()> { let Ok(len) = item_length(after_map) else { return Err(()); }; - let Some((_, rest)) = rest.split_at_checked(len as usize) else { + let Some((_, rest)) = after_map.split_at_checked(len as usize) else { return Err(()); }; after_map = rest; From 2298e0481c794a6a5aead449e10f862915ade619 Mon Sep 17 00:00:00 2001 From: Evan Almloff Date: Fri, 7 Nov 2025 08:34:03 -0600 Subject: [PATCH 04/20] all tests passing --- packages/const-serialize-macro/src/lib.rs | 3 + packages/const-serialize/README.md | 2 +- packages/const-serialize/src/cbor.rs | 113 +++++--- packages/const-serialize/src/const_buffers.rs | 38 --- packages/const-serialize/src/const_vec.rs | 18 -- packages/const-serialize/src/lib.rs | 250 ++++++++++-------- packages/const-serialize/tests/enum.rs | 101 ++++++- packages/const-serialize/tests/lists.rs | 6 +- packages/const-serialize/tests/primitive.rs | 42 +-- packages/const-serialize/tests/str.rs | 6 +- packages/const-serialize/tests/structs.rs | 42 ++- packages/const-serialize/tests/tuples.rs | 6 +- 12 files changed, 360 insertions(+), 267 deletions(-) delete mode 100644 packages/const-serialize/src/const_buffers.rs diff --git a/packages/const-serialize-macro/src/lib.rs b/packages/const-serialize-macro/src/lib.rs index 11997c6b01..4053799138 100644 --- a/packages/const-serialize-macro/src/lib.rs +++ b/packages/const-serialize-macro/src/lib.rs @@ -48,6 +48,7 @@ pub fn derive_parse(input: TokenStream) -> TokenStream { std::mem::size_of::(), &[#( const_serialize::StructFieldLayout::new( + stringify!(#field_names), std::mem::offset_of!(#ty, #field_names), <#field_types as const_serialize::SerializeConst>::MEMORY_LAYOUT, ), @@ -151,6 +152,7 @@ pub fn derive_parse(input: TokenStream) -> TokenStream { } }); last_discriminant = Some(discriminant.clone()); + let variant_name = &variant.ident; let field_names = variant.fields.iter().enumerate().map(|(i, field)| { field .ident @@ -170,6 +172,7 @@ pub fn derive_parse(input: TokenStream) -> TokenStream { )* } const_serialize::EnumVariant::new( + stringify!(#variant_name), #discriminant as u32, match VariantStruct::MEMORY_LAYOUT { const_serialize::Layout::Struct(layout) => layout, diff --git a/packages/const-serialize/README.md b/packages/const-serialize/README.md index dfa66de631..2f706bc0ce 100644 --- a/packages/const-serialize/README.md +++ b/packages/const-serialize/README.md @@ -29,7 +29,7 @@ const { }; 3]; let mut buf = ConstVec::new(); buf = serialize_const(&data, buf); - let buf = buf.read(); + let buf = buf.as_ref(); let (buf, deserialized) = match deserialize_const!([Struct; 3], buf) { Some(data) => data, None => panic!("data mismatch"), diff --git a/packages/const-serialize/src/cbor.rs b/packages/const-serialize/src/cbor.rs index d95a01152e..cf4162f532 100644 --- a/packages/const-serialize/src/cbor.rs +++ b/packages/const-serialize/src/cbor.rs @@ -1,36 +1,41 @@ -// Major type 0: -// An unsigned integer in the range 0..264-1 inclusive. The value of the encoded item is the argument itself. For example, the integer 10 is denoted as the one byte 0b000_01010 (major type 0, additional information 10). The integer 500 would be 0b000_11001 (major type 0, additional information 25) followed by the two bytes 0x01f4, which is 500 in decimal. -// Major type 1: -// A negative integer in the range -264..-1 inclusive. The value of the item is -1 minus the argument. For example, the integer -500 would be 0b001_11001 (major type 1, additional information 25) followed by the two bytes 0x01f3, which is 499 in decimal. -// Major type 2: -// A byte string. The number of bytes in the string is equal to the argument. For example, a byte string whose length is 5 would have an initial byte of 0b010_00101 (major type 2, additional information 5 for the length), followed by 5 bytes of binary content. A byte string whose length is 500 would have 3 initial bytes of 0b010_11001 (major type 2, additional information 25 to indicate a two-byte length) followed by the two bytes 0x01f4 for a length of 500, followed by 500 bytes of binary content. -// Major type 3: -// A text string (Section 2) encoded as UTF-8 [RFC3629]. The number of bytes in the string is equal to the argument. A string containing an invalid UTF-8 sequence is well-formed but invalid (Section 1.2). This type is provided for systems that need to interpret or display human-readable text, and allows the differentiation between unstructured bytes and text that has a specified repertoire (that of Unicode) and encoding (UTF-8). In contrast to formats such as JSON, the Unicode characters in this type are never escaped. Thus, a newline character (U+000A) is always represented in a string as the byte 0x0a, and never as the bytes 0x5c6e (the characters "\" and "n") nor as 0x5c7530303061 (the characters "\", "u", "0", "0", "0", and "a"). -// Major type 4: -// An array of data items. In other formats, arrays are also called lists, sequences, or tuples (a "CBOR sequence" is something slightly different, though [RFC8742]). The argument is the number of data items in the array. Items in an array do not need to all be of the same type. For example, an array that contains 10 items of any type would have an initial byte of 0b100_01010 (major type 4, additional information 10 for the length) followed by the 10 remaining items. -// Major type 5: -// A map of pairs of data items. Maps are also called tables, dictionaries, hashes, or objects (in JSON). A map is comprised of pairs of data items, each pair consisting of a key that is immediately followed by a value. The argument is the number of pairs of data items in the map. For example, a map that contains 9 pairs would have an initial byte of 0b101_01001 (major type 5, additional information 9 for the number of pairs) followed by the 18 remaining items. The first item is the first key, the second item is the first value, the third item is the second key, and so on. Because items in a map come in pairs, their total number is always even: a map that contains an odd number of items (no value data present after the last key data item) is not well-formed. A map that has duplicate keys may be well-formed, but it is not valid, and thus it causes indeterminate decoding; see also Section 5.6. -// Major type 6: -// A tagged data item ("tag") whose tag number, an integer in the range 0..264-1 inclusive, is the argument and whose enclosed data item (tag content) is the single encoded data item that follows the head. See Section 3.4. -// Major type 7: -// Floating-point numbers and simple values, as well as the "break" stop code. See Section 3.3. - use crate::ConstVec; +/// Each item in CBOR starts with a leading byte, which determines the type of the item and additional information. +/// +/// The first 3 bits of the leading byte are the major type, which indicates the type of the item. #[repr(u8)] #[derive(PartialEq)] enum MajorType { + // Major type 0: + // An unsigned integer in the range 0..264-1 inclusive. The value of the encoded item is the argument itself. For example, the integer 10 is denoted as the one byte 0b000_01010 (major type 0, additional information 10). The integer 500 would be 0b000_11001 (major type 0, additional information 25) followed by the two bytes 0x01f4, which is 500 in decimal. UnsignedInteger = 0, + // Major type 1: + // A negative integer in the range -264..-1 inclusive. The value of the item is -1 minus the argument. For example, the integer -500 would be 0b001_11001 (major type 1, additional information 25) followed by the two bytes 0x01f3, which is 499 in decimal. NegativeInteger = 1, + // Major type 2: + // A byte string. The number of bytes in the string is equal to the argument. For example, a byte string whose length is 5 would have an initial byte of 0b010_00101 (major type 2, additional information 5 for the length), followed by 5 bytes of binary content. A byte string whose length is 500 would have 3 initial bytes of 0b010_11001 (major type 2, additional information 25 to indicate a two-byte length) followed by the two bytes 0x01f4 for a length of 500, followed by 500 bytes of binary content. Bytes = 2, + // Major type 3: + // A text string (Section 2) encoded as UTF-8 [RFC3629]. The number of bytes in the string is equal to the argument. A string containing an invalid UTF-8 sequence is well-formed but invalid (Section 1.2). This type is provided for systems that need to interpret or display human-readable text, and allows the differentiation between unstructured bytes and text that has a specified repertoire (that of Unicode) and encoding (UTF-8). In contrast to formats such as JSON, the Unicode characters in this type are never escaped. Thus, a newline character (U+000A) is always represented in a string as the byte 0x0a, and never as the bytes 0x5c6e (the characters "\" and "n") nor as 0x5c7530303061 (the characters "\", "u", "0", "0", "0", and "a"). Text = 3, + // Major type 4: + // An array of data items. In other formats, arrays are also called lists, sequences, or tuples (a "CBOR sequence" is something slightly different, though [RFC8742]). The argument is the number of data items in the array. Items in an array do not need to all be of the same type. For example, an array that contains 10 items of any type would have an initial byte of 0b100_01010 (major type 4, additional information 10 for the length) followed by the 10 remaining items. Array = 4, + // Major type 5: + // A map of pairs of data items. Maps are also called tables, dictionaries, hashes, or objects (in JSON). A map is comprised of pairs of data items, each pair consisting of a key that is immediately followed by a value. The argument is the number of pairs of data items in the map. For example, a map that contains 9 pairs would have an initial byte of 0b101_01001 (major type 5, additional information 9 for the number of pairs) followed by the 18 remaining items. The first item is the first key, the second item is the first value, the third item is the second key, and so on. Because items in a map come in pairs, their total number is always even: a map that contains an odd number of items (no value data present after the last key data item) is not well-formed. A map that has duplicate keys may be well-formed, but it is not valid, and thus it causes indeterminate decoding; see also Section 5.6. Map = 5, + // Major type 6: + // A tagged data item ("tag") whose tag number, an integer in the range 0..264-1 inclusive, is the argument and whose enclosed data item (tag content) is the single encoded data item that follows the head. See Section 3.4. Tagged = 6, + // Major type 7: + // Floating-point numbers and simple values, as well as the "break" stop code. See Section 3.3. Float = 7, } impl MajorType { + /// The bitmask for the major type in the leading byte + const MASK: u8 = 0b0001_1111; + const fn from_byte(byte: u8) -> Self { match byte >> 5 { 0 => MajorType::UnsignedInteger, @@ -52,7 +57,7 @@ const fn item_length(bytes: &[u8]) -> Result { return Err(()); }; let major = MajorType::from_byte(*head); - let additional_information = *head & 0b0001_1111; + let additional_information = *head & MajorType::MASK; match major { MajorType::UnsignedInteger | MajorType::NegativeInteger => { Ok(1 + get_length_of_number(additional_information) as usize) @@ -73,8 +78,8 @@ const fn item_length(bytes: &[u8]) -> Result { else { return Err(()); }; - let mut total_length = length_of_number as usize + length_of_items as usize; - let mut items_left = length_of_items; + let mut total_length = length_of_number as usize; + let mut items_left = length_of_items * if let MajorType::Map = major { 2 } else { 1 }; while items_left > 0 { let Some((_, after)) = rest.split_at_checked(total_length) else { return Err(()); @@ -105,12 +110,33 @@ fn test_item_length_str() { assert_eq!(length, 2); } -const fn take_number(bytes: &[u8]) -> Result<(i64, &[u8]), ()> { +#[test] +fn test_item_length_map() { + #[rustfmt::skip] + let input = [ + /* map(1) */ 0xA1, + /* text(1) */ 0x61, + /* "A" */ 0x41, + /* map(2) */ 0xA2, + /* text(3) */ 0x63, + /* "one" */ 0x6F, 0x6E, 0x65, + /* unsigned(286331153) */ 0x1A, 0x11, 0x11, 0x11, 0x11, + /* text(3) */ 0x63, + /* "two" */ 0x74, 0x77, 0x6F, + /* unsigned(34) */ 0x18, 0x22, + ]; + let Ok(length) = item_length(&input) else { + panic!("Failed to calculate length"); + }; + assert_eq!(length, input.len()); +} + +pub(crate) const fn take_number(bytes: &[u8]) -> Result<(i64, &[u8]), ()> { let [head, rest @ ..] = bytes else { return Err(()); }; let major = MajorType::from_byte(*head); - let additional_information = *head & 0b0001_1111; + let additional_information = *head & MajorType::MASK; match major { MajorType::UnsignedInteger => { let Ok((number, rest)) = grab_u64(rest, additional_information) else { @@ -128,7 +154,7 @@ const fn take_number(bytes: &[u8]) -> Result<(i64, &[u8]), ()> { } } -const fn write_number( +pub(crate) const fn write_number( vec: ConstVec, number: i64, ) -> ConstVec { @@ -176,12 +202,12 @@ const fn log2_bytes_for_number(number: u64) -> u8 { } } -const fn take_bytes(bytes: &[u8]) -> Result<(&[u8], &[u8]), ()> { +pub(crate) const fn take_bytes(bytes: &[u8]) -> Result<(&[u8], &[u8]), ()> { let [head, rest @ ..] = bytes else { return Err(()); }; let major = MajorType::from_byte(*head); - let additional_information = *head & 0b0001_1111; + let additional_information = *head & MajorType::MASK; if let MajorType::Bytes = major { take_bytes_from(rest, additional_information) } else { @@ -189,7 +215,7 @@ const fn take_bytes(bytes: &[u8]) -> Result<(&[u8], &[u8]), ()> { } } -const fn write_bytes( +pub(crate) const fn write_bytes( vec: ConstVec, bytes: &[u8], ) -> ConstVec { @@ -197,12 +223,12 @@ const fn write_bytes( vec.extend(bytes) } -const fn take_str(bytes: &[u8]) -> Result<(&str, &[u8]), ()> { +pub(crate) const fn take_str(bytes: &[u8]) -> Result<(&str, &[u8]), ()> { let [head, rest @ ..] = bytes else { return Err(()); }; let major = MajorType::from_byte(*head); - let additional_information = *head & 0b0001_1111; + let additional_information = *head & MajorType::MASK; if let MajorType::Text = major { let Ok((bytes, rest)) = take_bytes_from(rest, additional_information) else { return Err(()); @@ -216,7 +242,7 @@ const fn take_str(bytes: &[u8]) -> Result<(&str, &[u8]), ()> { } } -const fn write_str( +pub(crate) const fn write_str( vec: ConstVec, string: &str, ) -> ConstVec { @@ -224,12 +250,12 @@ const fn write_str( vec.extend(string.as_bytes()) } -const fn take_array(bytes: &[u8]) -> Result<(usize, &[u8]), ()> { +pub(crate) const fn take_array(bytes: &[u8]) -> Result<(usize, &[u8]), ()> { let [head, rest @ ..] = bytes else { return Err(()); }; let major = MajorType::from_byte(*head); - let additional_information = *head & 0b0001_1111; + let additional_information = *head & MajorType::MASK; if let MajorType::Array = major { let Ok((length, rest)) = take_len_from(rest, additional_information) else { return Err(()); @@ -240,14 +266,14 @@ const fn take_array(bytes: &[u8]) -> Result<(usize, &[u8]), ()> { } } -const fn write_array( +pub(crate) const fn write_array( vec: ConstVec, len: usize, ) -> ConstVec { write_major_type_and_u64(vec, MajorType::Array, len as u64) } -const fn write_map( +pub(crate) const fn write_map( vec: ConstVec, len: usize, ) -> ConstVec { @@ -255,19 +281,19 @@ const fn write_map( write_major_type_and_u64(vec, MajorType::Map, len as u64) } -const fn write_map_key( +pub(crate) const fn write_map_key( value: ConstVec, key: &str, ) -> ConstVec { write_str(value, key) } -const fn take_map<'a>(bytes: &'a [u8]) -> Result<(MapRef<'a>, &'a [u8]), ()> { +pub(crate) const fn take_map<'a>(bytes: &'a [u8]) -> Result<(MapRef<'a>, &'a [u8]), ()> { let [head, rest @ ..] = bytes else { return Err(()); }; let major = MajorType::from_byte(*head); - let additional_information = *head & 0b0001_1111; + let additional_information = *head & MajorType::MASK; if let MajorType::Map = major { let Ok((length, rest)) = take_len_from(rest, additional_information) else { return Err(()); @@ -291,9 +317,9 @@ const fn take_map<'a>(bytes: &'a [u8]) -> Result<(MapRef<'a>, &'a [u8]), ()> { } } -struct MapRef<'a> { - bytes: &'a [u8], - len: usize, +pub(crate) struct MapRef<'a> { + pub(crate) bytes: &'a [u8], + pub(crate) len: usize, } impl<'a> MapRef<'a> { @@ -301,7 +327,7 @@ impl<'a> MapRef<'a> { Self { bytes, len } } - const fn find(&self, key: &str) -> Result, ()> { + pub(crate) const fn find(&self, key: &str) -> Result, ()> { let mut bytes = self.bytes; let mut items_left = self.len; while items_left > 0 { @@ -325,7 +351,7 @@ impl<'a> MapRef<'a> { } } -const fn str_eq(a: &str, b: &str) -> bool { +pub(crate) const fn str_eq(a: &str, b: &str) -> bool { let a_bytes = a.as_bytes(); let b_bytes = b.as_bytes(); let a_len = a_bytes.len(); @@ -358,7 +384,10 @@ const fn take_len_from(rest: &[u8], additional_information: u8) -> Result<(u64, } } -const fn take_bytes_from(rest: &[u8], additional_information: u8) -> Result<(&[u8], &[u8]), ()> { +pub(crate) const fn take_bytes_from( + rest: &[u8], + additional_information: u8, +) -> Result<(&[u8], &[u8]), ()> { let Ok((number, rest)) = grab_u64(rest, additional_information) else { return Err(()); }; diff --git a/packages/const-serialize/src/const_buffers.rs b/packages/const-serialize/src/const_buffers.rs deleted file mode 100644 index 4e93ddbdbc..0000000000 --- a/packages/const-serialize/src/const_buffers.rs +++ /dev/null @@ -1,38 +0,0 @@ -/// A buffer that can be read from at compile time. This is very similar to [Cursor](std::io::Cursor) but is -/// designed to be used in const contexts. -#[derive(Debug, Clone, Copy, PartialEq)] -pub struct ConstReadBuffer<'a> { - location: usize, - memory: &'a [u8], -} - -impl<'a> ConstReadBuffer<'a> { - /// Create a new buffer from a byte slice - pub const fn new(memory: &'a [u8]) -> Self { - Self { - location: 0, - memory, - } - } - - /// Get the next byte from the buffer. Returns `None` if the buffer is empty. - /// This will return the new version of the buffer with the first byte removed. - pub const fn get(mut self) -> Option<(Self, u8)> { - if self.location >= self.memory.len() { - return None; - } - let value = self.memory[self.location]; - self.location += 1; - Some((self, value)) - } - - /// Get a reference to the underlying byte slice - pub const fn as_ref(&self) -> &[u8] { - self.memory - } - - /// Get a slice of the buffer from the current location to the end of the buffer - pub const fn remaining(&self) -> &[u8] { - self.memory.split_at(self.location).1 - } -} diff --git a/packages/const-serialize/src/const_vec.rs b/packages/const-serialize/src/const_vec.rs index 4c3c9a4a2a..5b618bd80c 100644 --- a/packages/const-serialize/src/const_vec.rs +++ b/packages/const-serialize/src/const_vec.rs @@ -1,8 +1,6 @@ #![allow(dead_code)] use std::{fmt::Debug, hash::Hash, mem::MaybeUninit}; -use crate::ConstReadBuffer; - const DEFAULT_MAX_SIZE: usize = 2usize.pow(10); /// [`ConstVec`] is a version of [`Vec`] that is usable in const contexts. It has @@ -327,22 +325,6 @@ impl ConstVec { } } -impl ConstVec { - /// Convert the [`ConstVec`] into a [`ConstReadBuffer`] - /// - /// # Example - /// ```rust - /// # use const_serialize::{ConstVec, ConstReadBuffer}; - /// const EMPTY: ConstVec = ConstVec::new(); - /// const ONE: ConstVec = EMPTY.push(1); - /// const TWO: ConstVec = ONE.push(2); - /// const READ: ConstReadBuffer = TWO.read(); - /// ``` - pub const fn read(&self) -> ConstReadBuffer<'_> { - ConstReadBuffer::new(self.as_ref()) - } -} - #[test] fn test_const_vec() { const VEC: ConstVec = { diff --git a/packages/const-serialize/src/lib.rs b/packages/const-serialize/src/lib.rs index 3487893eb7..2dbbca4215 100644 --- a/packages/const-serialize/src/lib.rs +++ b/packages/const-serialize/src/lib.rs @@ -4,24 +4,32 @@ use std::{char, mem::MaybeUninit}; mod cbor; -mod const_buffers; mod const_vec; -pub use const_buffers::ConstReadBuffer; pub use const_serialize_macro::SerializeConst; pub use const_vec::ConstVec; +use crate::cbor::{ + str_eq, take_array, take_map, take_number, take_str, write_array, write_map, write_map_key, + write_number, +}; + /// Plain old data for a field. Stores the offset of the field in the struct and the layout of the field. #[derive(Debug, Copy, Clone)] pub struct StructFieldLayout { + name: &'static str, offset: usize, layout: Layout, } impl StructFieldLayout { /// Create a new struct field layout - pub const fn new(offset: usize, layout: Layout) -> Self { - Self { offset, layout } + pub const fn new(name: &'static str, offset: usize, layout: Layout) -> Self { + Self { + name, + offset, + layout, + } } } @@ -83,6 +91,7 @@ impl EnumLayout { /// The layout for an enum variant. The enum variant layout is just a struct layout with a tag and alignment. #[derive(Debug, Copy, Clone)] pub struct EnumVariant { + name: &'static str, // Note: tags may not be sequential tag: u32, data: StructLayout, @@ -91,8 +100,13 @@ pub struct EnumVariant { impl EnumVariant { /// Create a new enum variant layout - pub const fn new(tag: u32, data: StructLayout, align: usize) -> Self { - Self { tag, data, align } + pub const fn new(name: &'static str, tag: u32, data: StructLayout, align: usize) -> Self { + Self { + name, + tag, + data, + align, + } } } @@ -199,7 +213,7 @@ macro_rules! impl_serialize_const_tuple { size: std::mem::size_of::<($($generic,)*)>(), data: &[ $( - StructFieldLayout::new(std::mem::offset_of!($inner, $generic_number), $generic::MEMORY_LAYOUT), + StructFieldLayout::new(stringify!($generic_number), std::mem::offset_of!($inner, $generic_number), $generic::MEMORY_LAYOUT), )* ], }) @@ -259,6 +273,7 @@ unsafe impl SerializeConst for ConstStr { size: std::mem::size_of::(), data: &[ StructFieldLayout::new( + "bytes", std::mem::offset_of!(Self, bytes), Layout::List(ListLayout { len: MAX_STR_SIZE, @@ -268,6 +283,7 @@ unsafe impl SerializeConst for ConstStr { }), ), StructFieldLayout::new( + "len", std::mem::offset_of!(Self, len), Layout::Primitive(PrimitiveLayout { size: std::mem::size_of::(), @@ -588,13 +604,20 @@ fn fuzz_utf8_byte_to_char_len() { /// Serialize a struct that is stored at the pointer passed in const fn serialize_const_struct( ptr: *const (), - mut to: ConstVec, + to: ConstVec, layout: &StructLayout, ) -> ConstVec { let mut i = 0; - while i < layout.data.len() { + let field_count = layout.data.len(); + let mut to = write_map(to, field_count); + while i < field_count { // Serialize the field at the offset pointer in the struct - let StructFieldLayout { offset, layout } = &layout.data[i]; + let StructFieldLayout { + name, + offset, + layout, + } = &layout.data[i]; + to = write_map_key(to, name); let field = ptr.wrapping_byte_add(*offset as _); to = serialize_const_ptr(field, to, layout); i += 1; @@ -623,7 +646,6 @@ const fn serialize_const_enum( } else { unsafe { byte_ptr.wrapping_byte_add(offset as _).read() } }; - to = to.push(byte); discriminant |= (byte as u32) << (offset * 8); offset += 1; } @@ -631,8 +653,12 @@ const fn serialize_const_enum( let mut i = 0; while i < layout.variants.len() { // If the variant is the discriminated one, serialize it - let EnumVariant { tag, data, .. } = &layout.variants[i]; + let EnumVariant { + tag, name, data, .. + } = &layout.variants[i]; if discriminant == *tag { + to = write_map(to, 1); + to = write_map_key(to, name); let data_ptr = ptr.wrapping_byte_offset(layout.variants_offset as _); to = serialize_const_struct(data_ptr, to, data); break; @@ -645,24 +671,27 @@ const fn serialize_const_enum( /// Serialize a primitive type that is stored at the pointer passed in const fn serialize_const_primitive( ptr: *const (), - mut to: ConstVec, + to: ConstVec, layout: &PrimitiveLayout, ) -> ConstVec { let ptr = ptr as *const u8; let mut offset = 0; + let mut i64_bytes = [0u8; 8]; while offset < layout.size { // If the bytes are reversed, walk backwards from the end of the number when pushing bytes - if cfg!(any(target_endian = "big", feature = "test-big-endian")) { - to = to.push(unsafe { + let byte = unsafe { + if cfg!(any(target_endian = "big", feature = "test-big-endian")) { ptr.wrapping_byte_offset((layout.size - offset - 1) as _) .read() - }); - } else { - to = to.push(unsafe { ptr.wrapping_byte_offset(offset as _).read() }); - } + } else { + ptr.wrapping_byte_offset(offset as _).read() + } + }; + i64_bytes[offset as usize] = byte; offset += 1; } - to + let number = i64::from_ne_bytes(i64_bytes); + write_number(to, number) } /// Serialize a constant sized array that is stored at the pointer passed in @@ -673,6 +702,7 @@ const fn serialize_const_list( ) -> ConstVec { let len = layout.len; let mut i = 0; + to = write_array(to, len); while i < len { let field = ptr.wrapping_byte_offset((i * layout.item_layout.size()) as _); to = serialize_const_ptr(field, to, layout.item_layout); @@ -711,8 +741,7 @@ const fn serialize_const_ptr(ptr: *const (), to: ConstVec, layout: &Layout) /// b: 0x22, /// c: 0x33333333, /// }, buffer); -/// let buf = buffer.read(); -/// assert_eq!(buf.as_ref(), &[0x11, 0x11, 0x11, 0x11, 0x22, 0x33, 0x33, 0x33, 0x33]); +/// assert_eq!(buffer.as_ref(), &[0xa3, 0x61, 0x61, 0x1a, 0x11, 0x11, 0x11, 0x11, 0x61, 0x62, 0x18, 0x22, 0x61, 0x63, 0x1a, 0x33, 0x33, 0x33, 0x33]); /// ``` #[must_use = "The data is serialized into the returned buffer"] pub const fn serialize_const(data: &T, to: ConstVec) -> ConstVec { @@ -721,96 +750,98 @@ pub const fn serialize_const(data: &T, to: ConstVec) -> C } /// Deserialize a primitive type into the out buffer at the offset passed in. Returns a new version of the buffer with the data added. -const fn deserialize_const_primitive<'a, const N: usize>( - mut from: ConstReadBuffer<'a>, +const fn deserialize_const_primitive<'a>( + from: &'a [u8], layout: &PrimitiveLayout, - out: (usize, [MaybeUninit; N]), -) -> Option<(ConstReadBuffer<'a>, [MaybeUninit; N])> { - let (start, mut out) = out; + out: &mut [MaybeUninit], +) -> Option<&'a [u8]> { let mut offset = 0; + let Ok((number, from)) = take_number(from) else { + return None; + }; + let bytes = number.to_le_bytes(); while offset < layout.size { // If the bytes are reversed, walk backwards from the end of the number when filling in bytes - let (from_new, value) = match from.get() { - Some(data) => data, - None => return None, - }; - from = from_new; + let byte = bytes[offset]; if cfg!(any(target_endian = "big", feature = "test-big-endian")) { - out[start + layout.size - offset - 1] = MaybeUninit::new(value); + out[layout.size - offset - 1] = MaybeUninit::new(byte); } else { - out[start + offset] = MaybeUninit::new(value); + out[offset] = MaybeUninit::new(byte); } offset += 1; } - Some((from, out)) + Some(from) } /// Deserialize a struct type into the out buffer at the offset passed in. Returns a new version of the buffer with the data added. -const fn deserialize_const_struct<'a, const N: usize>( - mut from: ConstReadBuffer<'a>, +const fn deserialize_const_struct<'a>( + from: &'a [u8], layout: &StructLayout, - out: (usize, [MaybeUninit; N]), -) -> Option<(ConstReadBuffer<'a>, [MaybeUninit; N])> { - let (start, mut out) = out; + out: &mut [MaybeUninit], +) -> Option<&'a [u8]> { + let Ok((map, from)) = take_map(from) else { + return None; + }; let mut i = 0; while i < layout.data.len() { // Deserialize the field at the offset pointer in the struct - let StructFieldLayout { offset, layout } = &layout.data[i]; - let (new_from, new_out) = match deserialize_const_ptr(from, layout, (start + *offset, out)) - { - Some(data) => data, - None => return None, + let StructFieldLayout { + name, + offset, + layout, + } = &layout.data[i]; + let Ok(Some(from)) = map.find(name) else { + return None; }; - from = new_from; - out = new_out; + let Some((_, field_bytes)) = out.split_at_mut_checked(*offset) else { + return None; + }; + if deserialize_const_ptr(from, layout, field_bytes).is_none() { + return None; + } i += 1; } - Some((from, out)) + Some(from) } /// Deserialize an enum type into the out buffer at the offset passed in. Returns a new version of the buffer with the data added. -const fn deserialize_const_enum<'a, const N: usize>( - mut from: ConstReadBuffer<'a>, +const fn deserialize_const_enum<'a>( + from: &'a [u8], layout: &EnumLayout, - out: (usize, [MaybeUninit; N]), -) -> Option<(ConstReadBuffer<'a>, [MaybeUninit; N])> { - let (start, mut out) = out; - let mut discriminant = 0; + out: &mut [MaybeUninit], +) -> Option<&'a [u8]> { + // First, deserialize the map + let Ok((map, remaining)) = take_map(from) else { + return None; + }; - // First, deserialize the discriminant - let mut offset = 0; - while offset < layout.discriminant.size { - // If the bytes are reversed, walk backwards from the end of the number when filling in bytes - let (from_new, value) = match from.get() { - Some(data) => data, - None => return None, - }; - from = from_new; - if cfg!(target_endian = "big") { - out[start + layout.size - offset - 1] = MaybeUninit::new(value); - discriminant |= (value as u32) << ((layout.discriminant.size - offset - 1) * 8); - } else { - out[start + offset] = MaybeUninit::new(value); - discriminant |= (value as u32) << (offset * 8); - } - offset += 1; - } + // Then get the only field which is the tag + let Ok((deserilized_name, from)) = take_str(&map.bytes) else { + return None; + }; // Then, deserialize the variant let mut i = 0; let mut matched_variant = false; while i < layout.variants.len() { // If the variant is the discriminated one, deserialize it - let EnumVariant { tag, data, .. } = &layout.variants[i]; - if discriminant == *tag { - let offset = layout.variants_offset; - let (new_from, new_out) = - match deserialize_const_struct(from, data, (start + offset, out)) { - Some(data) => data, - None => return None, - }; - from = new_from; - out = new_out; + let EnumVariant { + name, data, tag, .. + } = &layout.variants[i]; + if str_eq(deserilized_name, *name) { + // Write the tag to the output buffer + let tag_bytes = tag.to_ne_bytes(); + let mut offset = 0; + while offset < layout.discriminant.size { + out[offset] = MaybeUninit::new(tag_bytes[offset]); + offset += 1; + } + let Some((_, out)) = out.split_at_mut_checked(layout.variants_offset) else { + return None; + }; + if deserialize_const_struct(from, data, out).is_none() { + return None; + } matched_variant = true; break; } @@ -820,38 +851,40 @@ const fn deserialize_const_enum<'a, const N: usize>( return None; } - Some((from, out)) + Some(remaining) } /// Deserialize a list type into the out buffer at the offset passed in. Returns a new version of the buffer with the data added. -const fn deserialize_const_list<'a, const N: usize>( - mut from: ConstReadBuffer<'a>, +const fn deserialize_const_list<'a>( + from: &'a [u8], layout: &ListLayout, - out: (usize, [MaybeUninit; N]), -) -> Option<(ConstReadBuffer<'a>, [MaybeUninit; N])> { - let (start, mut out) = out; - let len = layout.len; + mut out: &mut [MaybeUninit], +) -> Option<&'a [u8]> { let item_layout = layout.item_layout; + let Ok((len, mut from)) = take_array(from) else { + return None; + }; let mut i = 0; while i < len { - let (new_from, new_out) = - match deserialize_const_ptr(from, item_layout, (start + i * item_layout.size(), out)) { - Some(data) => data, - None => return None, - }; + let Some(new_from) = deserialize_const_ptr(from, item_layout, out) else { + return None; + }; + let Some((_, item_out)) = out.split_at_mut_checked(item_layout.size()) else { + return None; + }; + out = item_out; from = new_from; - out = new_out; i += 1; } - Some((from, out)) + Some(from) } /// Deserialize a type into the out buffer at the offset passed in. Returns a new version of the buffer with the data added. -const fn deserialize_const_ptr<'a, const N: usize>( - from: ConstReadBuffer<'a>, +const fn deserialize_const_ptr<'a>( + from: &'a [u8], layout: &Layout, - out: (usize, [MaybeUninit; N]), -) -> Option<(ConstReadBuffer<'a>, [MaybeUninit; N])> { + out: &mut [MaybeUninit], +) -> Option<&'a [u8]> { match layout { Layout::Enum(layout) => deserialize_const_enum(from, layout, out), Layout::Struct(layout) => deserialize_const_struct(from, layout, out), @@ -860,7 +893,7 @@ const fn deserialize_const_ptr<'a, const N: usize>( } } -/// Deserialize a type into the output buffer. Accepts `(type, ConstVec)` as input and returns `Option<(ConstReadBuffer, Instance of type)>` +/// Deserialize a type into the output buffer. Accepts `(type, ConstVec)` as input and returns `Option<(&'a [u8], Instance of type)>` /// /// # Example /// ```rust @@ -880,7 +913,7 @@ const fn deserialize_const_ptr<'a, const N: usize>( /// c: 0x33333333, /// d: 0x44444444, /// }, buffer); -/// let buf = buffer.read(); +/// let buf = buffer.as_ref(); /// assert_eq!(deserialize_const!(Struct, buf).unwrap().1, Struct { /// a: 0x11111111, /// b: 0x22, @@ -902,15 +935,14 @@ macro_rules! deserialize_const { /// # Safety /// N must be `std::mem::size_of::()` #[must_use = "The data is deserialized from the input buffer"] -pub const unsafe fn deserialize_const_raw( - from: ConstReadBuffer, -) -> Option<(ConstReadBuffer, T)> { +pub const unsafe fn deserialize_const_raw<'a, const N: usize, T: SerializeConst>( + from: &'a [u8], +) -> Option<(&'a [u8], T)> { // Create uninitized memory with the size of the type - let out = [MaybeUninit::uninit(); N]; + let mut out = [MaybeUninit::uninit(); N]; // Fill in the bytes into the buffer for the type - let (from, out) = match deserialize_const_ptr(from, &T::MEMORY_LAYOUT, (0, out)) { - Some(data) => data, - None => return None, + let Some(from) = deserialize_const_ptr(from, &T::MEMORY_LAYOUT, &mut out) else { + return None; }; // Now that the memory is filled in, transmute it into the type Some((from, unsafe { diff --git a/packages/const-serialize/tests/enum.rs b/packages/const-serialize/tests/enum.rs index a0df9f160c..5b8e286ebd 100644 --- a/packages/const-serialize/tests/enum.rs +++ b/packages/const-serialize/tests/enum.rs @@ -81,7 +81,7 @@ fn test_serialize_enum() { let mut buf = ConstVec::new(); buf = serialize_const(&data, buf); println!("{:?}", buf.as_ref()); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!(deserialize_const!(Enum, buf).unwrap().1, data); let data = Enum::B { @@ -91,7 +91,7 @@ fn test_serialize_enum() { let mut buf = ConstVec::new(); buf = serialize_const(&data, buf); println!("{:?}", buf.as_ref()); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!(deserialize_const!(Enum, buf).unwrap().1, data); } @@ -110,7 +110,7 @@ fn test_serialize_list_of_lopsided_enums() { let mut buf = ConstVec::new(); buf = serialize_const(&data, buf); println!("{:?}", buf.as_ref()); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!(deserialize_const!([Enum; 2], buf).unwrap().1, data); let data = [ @@ -126,7 +126,7 @@ fn test_serialize_list_of_lopsided_enums() { let mut buf = ConstVec::new(); buf = serialize_const(&data, buf); println!("{:?}", buf.as_ref()); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!(deserialize_const!([Enum; 2], buf).unwrap().1, data); let data = [ @@ -139,7 +139,7 @@ fn test_serialize_list_of_lopsided_enums() { let mut buf = ConstVec::new(); buf = serialize_const(&data, buf); println!("{:?}", buf.as_ref()); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!(deserialize_const!([Enum; 2], buf).unwrap().1, data); let data = [ @@ -152,7 +152,7 @@ fn test_serialize_list_of_lopsided_enums() { let mut buf = ConstVec::new(); buf = serialize_const(&data, buf); println!("{:?}", buf.as_ref()); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!(deserialize_const!([Enum; 2], buf).unwrap().1, data); } @@ -171,14 +171,14 @@ fn test_serialize_u8_enum() { let mut buf = ConstVec::new(); buf = serialize_const(&data, buf); println!("{:?}", buf.as_ref()); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!(deserialize_const!(Enum, buf).unwrap().1, data); let data = Enum::B; let mut buf = ConstVec::new(); buf = serialize_const(&data, buf); println!("{:?}", buf.as_ref()); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!(deserialize_const!(Enum, buf).unwrap().1, data); } @@ -198,7 +198,7 @@ fn test_serialize_corrupted_enum() { buf = serialize_const(&data, buf); buf = buf.set(0, 2); println!("{:?}", buf.as_ref()); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!(deserialize_const!(Enum, buf), None); } @@ -226,7 +226,7 @@ fn test_serialize_nested_enum() { let mut buf = ConstVec::new(); buf = serialize_const(&data, buf); println!("{:?}", buf.as_ref()); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!(deserialize_const!(Enum, buf).unwrap().1, data); let data = Enum::B { @@ -236,7 +236,7 @@ fn test_serialize_nested_enum() { let mut buf = ConstVec::new(); buf = serialize_const(&data, buf); println!("{:?}", buf.as_ref()); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!(deserialize_const!(Enum, buf).unwrap().1, data); let data = Enum::B { @@ -249,7 +249,7 @@ fn test_serialize_nested_enum() { let mut buf = ConstVec::new(); buf = serialize_const(&data, buf); println!("{:?}", buf.as_ref()); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!(deserialize_const!(Enum, buf).unwrap().1, data); let data = Enum::B { @@ -262,6 +262,81 @@ fn test_serialize_nested_enum() { let mut buf = ConstVec::new(); buf = serialize_const(&data, buf); println!("{:?}", buf.as_ref()); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!(deserialize_const!(Enum, buf).unwrap().1, data); } + +#[test] +fn test_adding_enum_field_non_breaking() { + #[derive(Debug, PartialEq, SerializeConst)] + #[repr(C, u8)] + enum Initial { + A { a: u32, b: u8 }, + } + + #[derive(Debug, PartialEq, SerializeConst)] + #[repr(C, u8)] + enum New { + A { b: u8, a: u32, c: u32 }, + } + + let data = New::A { + a: 0x11111111, + b: 0x22, + c: 0x33333333, + }; + let mut buf = ConstVec::new(); + buf = serialize_const(&data, buf); + let buf = buf.as_ref(); + // The new struct should be able to deserialize into the initial struct + let (_, data2) = deserialize_const!(Initial, buf).unwrap(); + assert_eq!( + Initial::A { + a: 0x11111111, + b: 0x22, + }, + data2 + ); +} + +#[test] +fn test_adding_enum_variant_non_breaking() { + #[derive(Debug, PartialEq, SerializeConst)] + #[repr(C, u8)] + enum Initial { + A { a: u32, b: u8 }, + } + + #[derive(Debug, PartialEq, SerializeConst)] + #[repr(C, u8)] + enum New { + #[allow(unused)] + B { + d: u32, + e: u8, + }, + A { + c: u32, + b: u8, + a: u32, + }, + } + + let data = New::A { + a: 0x11111111, + b: 0x22, + c: 0x33333333, + }; + let mut buf = ConstVec::new(); + buf = serialize_const(&data, buf); + let buf = buf.as_ref(); + // The new struct should be able to deserialize into the initial struct + let (_, data2) = deserialize_const!(Initial, buf).unwrap(); + assert_eq!( + Initial::A { + a: 0x11111111, + b: 0x22, + }, + data2 + ); +} diff --git a/packages/const-serialize/tests/lists.rs b/packages/const-serialize/tests/lists.rs index 84f9fe11b2..4192499150 100644 --- a/packages/const-serialize/tests/lists.rs +++ b/packages/const-serialize/tests/lists.rs @@ -5,7 +5,7 @@ fn test_serialize_const_layout_list() { let mut buf = ConstVec::new(); buf = serialize_const(&[1u8, 2, 3] as &[u8; 3], buf); println!("{:?}", buf.as_ref()); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!(deserialize_const!([u8; 3], buf).unwrap().1, [1, 2, 3]) } @@ -17,7 +17,7 @@ fn test_serialize_const_layout_nested_lists() { buf, ); println!("{:?}", buf.as_ref()); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!( deserialize_const!([[u8; 3]; 3], buf).unwrap().1, @@ -29,6 +29,6 @@ fn test_serialize_const_layout_nested_lists() { fn test_serialize_list_too_little_data() { let mut buf = ConstVec::new(); buf = buf.push(1); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!(deserialize_const!([u64; 10], buf), None); } diff --git a/packages/const-serialize/tests/primitive.rs b/packages/const-serialize/tests/primitive.rs index a5e3e803ff..0423dcf219 100644 --- a/packages/const-serialize/tests/primitive.rs +++ b/packages/const-serialize/tests/primitive.rs @@ -4,58 +4,34 @@ use const_serialize::{deserialize_const, serialize_const, ConstVec}; fn test_serialize_const_layout_primitive() { let mut buf = ConstVec::new(); buf = serialize_const(&1234u32, buf); - if cfg!(feature = "test-big-endian") { - assert_eq!(buf.as_ref(), 1234u32.to_be_bytes()); - } else { - assert_eq!(buf.as_ref(), 1234u32.to_le_bytes()); - } - let buf = buf.read(); + let buf = buf.as_ref(); + println!("{:?}", buf); assert_eq!(deserialize_const!(u32, buf).unwrap().1, 1234u32); let mut buf = ConstVec::new(); buf = serialize_const(&1234u64, buf); - if cfg!(feature = "test-big-endian") { - assert_eq!(buf.as_ref(), 1234u64.to_be_bytes()); - } else { - assert_eq!(buf.as_ref(), 1234u64.to_le_bytes()); - } - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!(deserialize_const!(u64, buf).unwrap().1, 1234u64); let mut buf = ConstVec::new(); buf = serialize_const(&1234i32, buf); - if cfg!(feature = "test-big-endian") { - assert_eq!(buf.as_ref(), 1234i32.to_be_bytes()); - } else { - assert_eq!(buf.as_ref(), 1234i32.to_le_bytes()); - } - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!(deserialize_const!(i32, buf).unwrap().1, 1234i32); let mut buf = ConstVec::new(); buf = serialize_const(&1234i64, buf); - if cfg!(feature = "test-big-endian") { - assert_eq!(buf.as_ref(), 1234i64.to_be_bytes()); - } else { - assert_eq!(buf.as_ref(), 1234i64.to_le_bytes()); - } - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!(deserialize_const!(i64, buf).unwrap().1, 1234i64); let mut buf = ConstVec::new(); buf = serialize_const(&true, buf); assert_eq!(buf.as_ref(), [1u8]); - let buf = buf.read(); + let buf = buf.as_ref(); assert!(deserialize_const!(bool, buf).unwrap().1); let mut buf = ConstVec::new(); buf = serialize_const(&0.631f32, buf); - if cfg!(feature = "test-big-endian") { - assert_eq!(buf.as_ref(), 0.631f32.to_be_bytes()); - } else { - assert_eq!(buf.as_ref(), 0.631f32.to_le_bytes()); - } - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!(deserialize_const!(f32, buf).unwrap().1, 0.631); } @@ -66,6 +42,6 @@ fn test_serialize_primitive_too_little_data() { buf = buf.push(1); buf = buf.push(1); buf = buf.push(1); - let buf = buf.read(); - assert_eq!(deserialize_const!(u64, buf), None); + let buf = buf.as_ref(); + assert_eq!(deserialize_const!([u64; 10], buf), None); } diff --git a/packages/const-serialize/tests/str.rs b/packages/const-serialize/tests/str.rs index 45371741d5..51ca7c665f 100644 --- a/packages/const-serialize/tests/str.rs +++ b/packages/const-serialize/tests/str.rs @@ -6,7 +6,7 @@ fn test_serialize_const_layout_str() { let str = ConstStr::new("hello"); buf = serialize_const(&str, buf); println!("{:?}", buf.as_ref()); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!( deserialize_const!(ConstStr, buf).unwrap().1.as_str(), "hello" @@ -19,7 +19,7 @@ fn test_serialize_const_layout_nested_str() { let str = ConstStr::new("hello"); buf = serialize_const(&[str, str, str] as &[ConstStr; 3], buf); println!("{:?}", buf.as_ref()); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!( deserialize_const!([ConstStr; 3], buf).unwrap().1, @@ -35,6 +35,6 @@ fn test_serialize_const_layout_nested_str() { fn test_serialize_str_too_little_data() { let mut buf = ConstVec::new(); buf = buf.push(1); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!(deserialize_const!(ConstStr, buf), None); } diff --git a/packages/const-serialize/tests/structs.rs b/packages/const-serialize/tests/structs.rs index 68ce249381..cb1f9847d2 100644 --- a/packages/const-serialize/tests/structs.rs +++ b/packages/const-serialize/tests/structs.rs @@ -96,7 +96,7 @@ fn test_serialize_const_layout_struct_list() { const _ASSERT: () = { let mut buf = ConstVec::new(); buf = serialize_const(&DATA, buf); - let buf = buf.read(); + let buf = buf.as_ref(); let [first, second, third] = match deserialize_const!([OtherStruct; 3], buf) { Some((_, data)) => data, None => panic!("data mismatch"), @@ -109,7 +109,7 @@ fn test_serialize_const_layout_struct_list() { let mut buf = ConstVec::new(); const DATA_AGAIN: [[OtherStruct; 3]; 3] = [DATA, DATA, DATA]; buf = serialize_const(&DATA_AGAIN, buf); - let buf = buf.read(); + let buf = buf.as_ref(); let [first, second, third] = match deserialize_const!([[OtherStruct; 3]; 3], buf) { Some((_, data)) => data, None => panic!("data mismatch"), @@ -128,7 +128,7 @@ fn test_serialize_const_layout_struct_list() { let mut buf = ConstVec::new(); buf = serialize_const(&DATA, buf); println!("{:?}", buf.as_ref()); - let buf = buf.read(); + let buf = buf.as_ref(); let (_, data2) = deserialize_const!([OtherStruct; 3], buf).unwrap(); assert_eq!(DATA, data2); } @@ -158,7 +158,41 @@ fn test_serialize_const_layout_struct() { let mut buf = ConstVec::new(); buf = serialize_const(&data, buf); println!("{:?}", buf.as_ref()); - let buf = buf.read(); + let buf = buf.as_ref(); let (_, data2) = deserialize_const!(OtherStruct, buf).unwrap(); assert_eq!(data, data2); } + +#[test] +fn test_adding_struct_field_non_breaking() { + #[derive(Debug, PartialEq, SerializeConst)] + struct Initial { + a: u32, + b: u8, + } + + #[derive(Debug, PartialEq, SerializeConst)] + struct New { + c: u32, + b: u8, + a: u32, + } + + let data = New { + a: 0x11111111, + b: 0x22, + c: 0x33333333, + }; + let mut buf = ConstVec::new(); + buf = serialize_const(&data, buf); + let buf = buf.as_ref(); + // The new struct should be able to deserialize into the initial struct + let (_, data2) = deserialize_const!(Initial, buf).unwrap(); + assert_eq!( + Initial { + a: data.a, + b: data.b, + }, + data2 + ); +} diff --git a/packages/const-serialize/tests/tuples.rs b/packages/const-serialize/tests/tuples.rs index 43a036c413..d277d826bf 100644 --- a/packages/const-serialize/tests/tuples.rs +++ b/packages/const-serialize/tests/tuples.rs @@ -4,7 +4,7 @@ use const_serialize::{deserialize_const, serialize_const, ConstVec}; fn test_serialize_const_layout_tuple() { let mut buf = ConstVec::new(); buf = serialize_const(&(1234u32, 5678u16), buf); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!( deserialize_const!((u32, u16), buf).unwrap().1, (1234u32, 5678u16) @@ -12,7 +12,7 @@ fn test_serialize_const_layout_tuple() { let mut buf = ConstVec::new(); buf = serialize_const(&(1234f64, 5678u16, 90u8), buf); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!( deserialize_const!((f64, u16, u8), buf).unwrap().1, (1234f64, 5678u16, 90u8) @@ -20,7 +20,7 @@ fn test_serialize_const_layout_tuple() { let mut buf = ConstVec::new(); buf = serialize_const(&(1234u32, 5678u16, 90u8, 1000000f64), buf); - let buf = buf.read(); + let buf = buf.as_ref(); assert_eq!( deserialize_const!((u32, u16, u8, f64), buf).unwrap().1, (1234u32, 5678u16, 90u8, 1000000f64) From 14a70ae18f0d780e46f399cce298f6dd47178de0 Mon Sep 17 00:00:00 2001 From: Evan Almloff Date: Fri, 7 Nov 2025 09:20:49 -0600 Subject: [PATCH 05/20] dynamically sized arrays --- packages/const-serialize/src/cbor.rs | 6 + packages/const-serialize/src/lib.rs | 261 ++++++++++++++++++-------- packages/const-serialize/tests/str.rs | 7 +- 3 files changed, 195 insertions(+), 79 deletions(-) diff --git a/packages/const-serialize/src/cbor.rs b/packages/const-serialize/src/cbor.rs index cf4162f532..22f6dc0254 100644 --- a/packages/const-serialize/src/cbor.rs +++ b/packages/const-serialize/src/cbor.rs @@ -1,3 +1,9 @@ +//! Const serialization utilities for the CBOR data format. +//! +//! Resources: +//! The spec: https://www.rfc-editor.org/rfc/rfc8949.html +//! A playground to check examples against: https://cbor.me/ + use crate::ConstVec; /// Each item in CBOR starts with a leading byte, which determines the type of the item and additional information. diff --git a/packages/const-serialize/src/lib.rs b/packages/const-serialize/src/lib.rs index 2dbbca4215..384abcbd8a 100644 --- a/packages/const-serialize/src/lib.rs +++ b/packages/const-serialize/src/lib.rs @@ -1,7 +1,7 @@ #![doc = include_str!("../README.md")] #![warn(missing_docs)] -use std::{char, mem::MaybeUninit}; +use std::{char, hash::Hash, mem::MaybeUninit}; mod cbor; mod const_vec; @@ -124,6 +124,35 @@ impl ListLayout { } } +/// The layout for a dynamically sized array. The array layout is just a length and an item layout. +#[derive(Debug, Copy, Clone)] +pub struct ArrayLayout { + size: usize, + len_offset: usize, + len_layout: PrimitiveLayout, + data_offset: usize, + data_layout: ListLayout, +} + +impl ArrayLayout { + /// Create a new list layout + pub const fn new( + size: usize, + len_offset: usize, + len_layout: PrimitiveLayout, + data_offset: usize, + data_layout: ListLayout, + ) -> Self { + Self { + size, + len_offset, + len_layout, + data_offset, + data_layout, + } + } +} + /// The layout for a primitive type. The bytes will be reversed if the target is big endian. #[derive(Debug, Copy, Clone)] pub struct PrimitiveLayout { @@ -135,6 +164,37 @@ impl PrimitiveLayout { pub const fn new(size: usize) -> Self { Self { size } } + + /// Read the value from the given pointer + pub const unsafe fn read(self, byte_ptr: *const u8) -> u32 { + let mut value = 0; + let mut offset = 0; + while offset < self.size { + // If the bytes are reversed, walk backwards from the end of the number when pushing bytes + let byte = if cfg!(target_endian = "big") { + unsafe { + byte_ptr + .wrapping_byte_add((self.size - offset - 1) as _) + .read() + } + } else { + unsafe { byte_ptr.wrapping_byte_add(offset as _).read() } + }; + value |= (byte as u32) << (offset * 8); + offset += 1; + } + value + } + + /// Write the value to the given buffer + pub const fn write(self, value: u32, out: &mut [MaybeUninit]) { + let bytes = value.to_ne_bytes(); + let mut offset = 0; + while offset < self.size { + out[offset] = MaybeUninit::new(bytes[offset]); + offset += 1; + } + } } /// The layout for a type. This layout defines a sequence of locations and reversed or not bytes. These bytes will be copied from during serialization and copied into during deserialization. @@ -148,6 +208,8 @@ pub enum Layout { List(ListLayout), /// A primitive layout Primitive(PrimitiveLayout), + /// A dynamically sized array layout + Array(ArrayLayout), } impl Layout { @@ -157,6 +219,7 @@ impl Layout { Layout::Enum(layout) => layout.size, Layout::Struct(layout) => layout.size, Layout::List(layout) => layout.len * layout.item_layout.size(), + Layout::Array(layout) => layout.size, Layout::Primitive(layout) => layout.size, } } @@ -236,9 +299,9 @@ impl_serialize_const_tuple!(T1: 0, T2: 1, T3: 2, T4: 3, T5: 4, T6: 5, T7: 6, T8: const MAX_STR_SIZE: usize = 256; /// A string that is stored in a constant sized buffer that can be serialized and deserialized at compile time -#[derive(Eq, PartialEq, PartialOrd, Clone, Copy, Hash)] +#[derive(Clone, Copy, Debug)] pub struct ConstStr { - bytes: [u8; MAX_STR_SIZE], + bytes: [MaybeUninit; MAX_STR_SIZE], len: u32, } @@ -269,27 +332,19 @@ mod serde_bytes { } unsafe impl SerializeConst for ConstStr { - const MEMORY_LAYOUT: Layout = Layout::Struct(StructLayout { + const MEMORY_LAYOUT: Layout = Layout::Array(ArrayLayout { size: std::mem::size_of::(), - data: &[ - StructFieldLayout::new( - "bytes", - std::mem::offset_of!(Self, bytes), - Layout::List(ListLayout { - len: MAX_STR_SIZE, - item_layout: &Layout::Primitive(PrimitiveLayout { - size: std::mem::size_of::(), - }), - }), - ), - StructFieldLayout::new( - "len", - std::mem::offset_of!(Self, len), - Layout::Primitive(PrimitiveLayout { - size: std::mem::size_of::(), - }), - ), - ], + data_offset: std::mem::offset_of!(Self, bytes), + data_layout: ListLayout { + len: MAX_STR_SIZE, + item_layout: &Layout::Primitive(PrimitiveLayout { + size: std::mem::size_of::(), + }), + }, + len_offset: std::mem::offset_of!(Self, len), + len_layout: PrimitiveLayout { + size: std::mem::size_of::(), + }, }); } @@ -297,10 +352,10 @@ impl ConstStr { /// Create a new constant string pub const fn new(s: &str) -> Self { let str_bytes = s.as_bytes(); - let mut bytes = [0; MAX_STR_SIZE]; + let mut bytes = [MaybeUninit::uninit(); MAX_STR_SIZE]; let mut i = 0; while i < str_bytes.len() { - bytes[i] = str_bytes[i]; + bytes[i].write(str_bytes[i]); i += 1; } Self { @@ -311,7 +366,10 @@ impl ConstStr { /// Get a reference to the string pub const fn as_str(&self) -> &str { - let str_bytes = self.bytes.split_at(self.len as usize).0; + let str_bytes = unsafe { + &*(self.bytes.split_at(self.len as usize).0 as *const [MaybeUninit] + as *const [u8]) + }; match std::str::from_utf8(str_bytes) { Ok(s) => s, Err(_) => panic!( @@ -352,7 +410,7 @@ impl ConstStr { let new_len = len as usize + str_bytes.len(); let mut i = 0; while i < str_bytes.len() { - bytes[len as usize + i] = str_bytes[i]; + bytes[len as usize + i].write(str_bytes[i]); i += 1; } Self { @@ -363,19 +421,7 @@ impl ConstStr { /// Split the string at a byte index. The byte index must be a char boundary pub const fn split_at(self, index: usize) -> (Self, Self) { - let (left, right) = self.bytes.split_at(index); - let left = match std::str::from_utf8(left) { - Ok(s) => s, - Err(_) => { - panic!("Invalid utf8; you cannot split at a byte that is not a char boundary") - } - }; - let right = match std::str::from_utf8(right) { - Ok(s) => s, - Err(_) => { - panic!("Invalid utf8; you cannot split at a byte that is not a char boundary") - } - }; + let (left, right) = self.as_str().split_at(index); (Self::new(left), Self::new(right)) } @@ -478,9 +524,29 @@ impl ConstStr { } } -impl std::fmt::Debug for ConstStr { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{:?}", self.as_str()) +impl PartialEq for ConstStr { + fn eq(&self, other: &Self) -> bool { + self.as_str() == other.as_str() + } +} + +impl Eq for ConstStr {} + +impl PartialOrd for ConstStr { + fn partial_cmp(&self, other: &Self) -> Option { + self.as_str().partial_cmp(other.as_str()) + } +} + +impl Ord for ConstStr { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + self.as_str().cmp(other.as_str()) + } +} + +impl Hash for ConstStr { + fn hash(&self, state: &mut H) { + self.as_str().hash(state); } } @@ -602,7 +668,7 @@ fn fuzz_utf8_byte_to_char_len() { } /// Serialize a struct that is stored at the pointer passed in -const fn serialize_const_struct( +const unsafe fn serialize_const_struct( ptr: *const (), to: ConstVec, layout: &StructLayout, @@ -626,29 +692,13 @@ const fn serialize_const_struct( } /// Serialize an enum that is stored at the pointer passed in -const fn serialize_const_enum( +const unsafe fn serialize_const_enum( ptr: *const (), mut to: ConstVec, layout: &EnumLayout, ) -> ConstVec { - let mut discriminant = 0; - let byte_ptr = ptr as *const u8; - let mut offset = 0; - while offset < layout.discriminant.size { - // If the bytes are reversed, walk backwards from the end of the number when pushing bytes - let byte = if cfg!(target_endian = "big") { - unsafe { - byte_ptr - .wrapping_byte_add((layout.discriminant.size - offset - 1) as _) - .read() - } - } else { - unsafe { byte_ptr.wrapping_byte_add(offset as _).read() } - }; - discriminant |= (byte as u32) << (offset * 8); - offset += 1; - } + let discriminant = layout.discriminant.read(byte_ptr); let mut i = 0; while i < layout.variants.len() { @@ -669,7 +719,7 @@ const fn serialize_const_enum( } /// Serialize a primitive type that is stored at the pointer passed in -const fn serialize_const_primitive( +const unsafe fn serialize_const_primitive( ptr: *const (), to: ConstVec, layout: &PrimitiveLayout, @@ -695,7 +745,7 @@ const fn serialize_const_primitive( } /// Serialize a constant sized array that is stored at the pointer passed in -const fn serialize_const_list( +const unsafe fn serialize_const_list( ptr: *const (), mut to: ConstVec, layout: &ListLayout, @@ -711,12 +761,39 @@ const fn serialize_const_list( to } +/// Serialize a dynamically sized array that is stored at the pointer passed in +const unsafe fn serialize_const_array( + ptr: *const (), + mut to: ConstVec, + layout: &ArrayLayout, +) -> ConstVec { + // Read the length of the array + let len_ptr = ptr.wrapping_byte_offset(layout.len_offset as _); + let len = layout.len_layout.read(len_ptr as *const u8) as usize; + + let data_ptr = ptr.wrapping_byte_offset(layout.data_offset as _); + let item_layout = layout.data_layout.item_layout; + let mut i = 0; + to = write_array(to, len); + while i < len { + let item = data_ptr.wrapping_byte_offset((i * item_layout.size()) as _); + to = serialize_const_ptr(item, to, item_layout); + i += 1; + } + to +} + /// Serialize a pointer to a type that is stored at the pointer passed in -const fn serialize_const_ptr(ptr: *const (), to: ConstVec, layout: &Layout) -> ConstVec { +const unsafe fn serialize_const_ptr( + ptr: *const (), + to: ConstVec, + layout: &Layout, +) -> ConstVec { match layout { Layout::Enum(layout) => serialize_const_enum(ptr, to, layout), Layout::Struct(layout) => serialize_const_struct(ptr, to, layout), Layout::List(layout) => serialize_const_list(ptr, to, layout), + Layout::Array(layout) => serialize_const_array(ptr, to, layout), Layout::Primitive(layout) => serialize_const_primitive(ptr, to, layout), } } @@ -746,7 +823,8 @@ const fn serialize_const_ptr(ptr: *const (), to: ConstVec, layout: &Layout) #[must_use = "The data is serialized into the returned buffer"] pub const fn serialize_const(data: &T, to: ConstVec) -> ConstVec { let ptr = data as *const T as *const (); - serialize_const_ptr(ptr, to, &T::MEMORY_LAYOUT) + // SAFETY: The pointer is valid and the layout is correct + unsafe { serialize_const_ptr(ptr, to, &T::MEMORY_LAYOUT) } } /// Deserialize a primitive type into the out buffer at the offset passed in. Returns a new version of the buffer with the data added. @@ -829,13 +907,7 @@ const fn deserialize_const_enum<'a>( name, data, tag, .. } = &layout.variants[i]; if str_eq(deserilized_name, *name) { - // Write the tag to the output buffer - let tag_bytes = tag.to_ne_bytes(); - let mut offset = 0; - while offset < layout.discriminant.size { - out[offset] = MaybeUninit::new(tag_bytes[offset]); - offset += 1; - } + layout.discriminant.write(*tag, out); let Some((_, out)) = out.split_at_mut_checked(layout.variants_offset) else { return None; }; @@ -861,11 +933,11 @@ const fn deserialize_const_list<'a>( mut out: &mut [MaybeUninit], ) -> Option<&'a [u8]> { let item_layout = layout.item_layout; - let Ok((len, mut from)) = take_array(from) else { + let Ok((_, mut from)) = take_array(from) else { return None; }; let mut i = 0; - while i < len { + while i < layout.len { let Some(new_from) = deserialize_const_ptr(from, item_layout, out) else { return None; }; @@ -879,6 +951,44 @@ const fn deserialize_const_list<'a>( Some(from) } +/// Deserialize a array type into the out buffer at the offset passed in. Returns a new version of the buffer with the data added. +const fn deserialize_const_array<'a>( + from: &'a [u8], + layout: &ArrayLayout, + out: &mut [MaybeUninit], +) -> Option<&'a [u8]> { + let Ok((len, mut from)) = take_array(from) else { + return None; + }; + + let Some((_, len_out)) = out.split_at_mut_checked(layout.len_offset) else { + return None; + }; + + // Write out the length of the array + layout.len_layout.write(len as u32, len_out); + + let Some((_, mut data_out)) = out.split_at_mut_checked(layout.data_offset) else { + return None; + }; + + let item_layout = layout.data_layout.item_layout; + let mut i = 0; + while i < len { + let Some(new_from) = deserialize_const_ptr(from, item_layout, data_out) else { + return None; + }; + let Some((_, item_out)) = data_out.split_at_mut_checked(item_layout.size()) else { + return None; + }; + data_out = item_out; + from = new_from; + i += 1; + } + + Some(from) +} + /// Deserialize a type into the out buffer at the offset passed in. Returns a new version of the buffer with the data added. const fn deserialize_const_ptr<'a>( from: &'a [u8], @@ -889,6 +999,7 @@ const fn deserialize_const_ptr<'a>( Layout::Enum(layout) => deserialize_const_enum(from, layout, out), Layout::Struct(layout) => deserialize_const_struct(from, layout, out), Layout::List(layout) => deserialize_const_list(from, layout, out), + Layout::Array(layout) => deserialize_const_array(from, layout, out), Layout::Primitive(layout) => deserialize_const_primitive(from, layout, out), } } diff --git a/packages/const-serialize/tests/str.rs b/packages/const-serialize/tests/str.rs index 51ca7c665f..d2608f66cb 100644 --- a/packages/const-serialize/tests/str.rs +++ b/packages/const-serialize/tests/str.rs @@ -7,10 +7,9 @@ fn test_serialize_const_layout_str() { buf = serialize_const(&str, buf); println!("{:?}", buf.as_ref()); let buf = buf.as_ref(); - assert_eq!( - deserialize_const!(ConstStr, buf).unwrap().1.as_str(), - "hello" - ); + let str = deserialize_const!(ConstStr, buf).unwrap().1; + eprintln!("{str:?}"); + assert_eq!(str.as_str(), "hello"); } #[test] From 58a18c2e7ab935f7607f1431c525c39661f225d1 Mon Sep 17 00:00:00 2001 From: Evan Almloff Date: Fri, 7 Nov 2025 09:28:38 -0600 Subject: [PATCH 06/20] smaller str encoding --- packages/const-serialize/src/lib.rs | 74 ++++++++++++++++++----------- 1 file changed, 46 insertions(+), 28 deletions(-) diff --git a/packages/const-serialize/src/lib.rs b/packages/const-serialize/src/lib.rs index 384abcbd8a..b8b3ff5047 100644 --- a/packages/const-serialize/src/lib.rs +++ b/packages/const-serialize/src/lib.rs @@ -10,8 +10,8 @@ pub use const_serialize_macro::SerializeConst; pub use const_vec::ConstVec; use crate::cbor::{ - str_eq, take_array, take_map, take_number, take_str, write_array, write_map, write_map_key, - write_number, + str_eq, take_array, take_bytes, take_map, take_number, take_str, write_array, write_bytes, + write_map, write_map_key, write_number, }; /// Plain old data for a field. Stores the offset of the field in the struct and the layout of the field. @@ -773,12 +773,17 @@ const unsafe fn serialize_const_array( let data_ptr = ptr.wrapping_byte_offset(layout.data_offset as _); let item_layout = layout.data_layout.item_layout; - let mut i = 0; - to = write_array(to, len); - while i < len { - let item = data_ptr.wrapping_byte_offset((i * item_layout.size()) as _); - to = serialize_const_ptr(item, to, item_layout); - i += 1; + if item_layout.size() == 1 { + let slice = std::slice::from_raw_parts(data_ptr as *const u8, len); + to = write_bytes(to, slice); + } else { + let mut i = 0; + to = write_array(to, len); + while i < len { + let item = data_ptr.wrapping_byte_offset((i * item_layout.size()) as _); + to = serialize_const_ptr(item, to, item_layout); + i += 1; + } } to } @@ -957,36 +962,49 @@ const fn deserialize_const_array<'a>( layout: &ArrayLayout, out: &mut [MaybeUninit], ) -> Option<&'a [u8]> { - let Ok((len, mut from)) = take_array(from) else { - return None; - }; - let Some((_, len_out)) = out.split_at_mut_checked(layout.len_offset) else { return None; }; - // Write out the length of the array - layout.len_layout.write(len as u32, len_out); - - let Some((_, mut data_out)) = out.split_at_mut_checked(layout.data_offset) else { - return None; - }; - let item_layout = layout.data_layout.item_layout; - let mut i = 0; - while i < len { - let Some(new_from) = deserialize_const_ptr(from, item_layout, data_out) else { + if item_layout.size() == 1 { + let Ok((bytes, new_from)) = take_bytes(from) else { return None; }; - let Some((_, item_out)) = data_out.split_at_mut_checked(item_layout.size()) else { + // Write out the length of the array + layout.len_layout.write(bytes.len() as u32, len_out); + let Some((_, data_out)) = out.split_at_mut_checked(layout.data_offset) else { return None; }; - data_out = item_out; - from = new_from; - i += 1; + let mut offset = 0; + while offset < bytes.len() { + data_out[offset].write(bytes[offset]); + offset += 1; + } + Some(new_from) + } else { + let Ok((len, mut from)) = take_array(from) else { + return None; + }; + // Write out the length of the array + layout.len_layout.write(len as u32, len_out); + let Some((_, mut data_out)) = out.split_at_mut_checked(layout.data_offset) else { + return None; + }; + let mut i = 0; + while i < len { + let Some(new_from) = deserialize_const_ptr(from, item_layout, data_out) else { + return None; + }; + let Some((_, item_out)) = data_out.split_at_mut_checked(item_layout.size()) else { + return None; + }; + data_out = item_out; + from = new_from; + i += 1; + } + Some(from) } - - Some(from) } /// Deserialize a type into the out buffer at the offset passed in. Returns a new version of the buffer with the data added. From 4a4260558f7d6b9be118055e587a4caf75eccbdc Mon Sep 17 00:00:00 2001 From: Evan Almloff Date: Tue, 11 Nov 2025 13:26:24 -0600 Subject: [PATCH 07/20] swap array and list and add more documentation --- packages/cli/src/build/assets.rs | 2 +- packages/const-serialize/README.md | 2 +- packages/const-serialize/src/cbor.rs | 349 ++++--- packages/const-serialize/src/lib.rs | 950 +----------------- packages/const-serialize/tests/str.rs | 2 + packages/manganis/manganis-core/src/asset.rs | 2 +- .../manganis/manganis/src/macro_helpers.rs | 2 +- 7 files changed, 233 insertions(+), 1076 deletions(-) diff --git a/packages/cli/src/build/assets.rs b/packages/cli/src/build/assets.rs index 4ca40f3a53..4e03320338 100644 --- a/packages/cli/src/build/assets.rs +++ b/packages/cli/src/build/assets.rs @@ -316,7 +316,7 @@ pub(crate) async fn extract_assets_from_file(path: impl AsRef) -> Result`. These types are difficult to create in const contexts in general - Only types with a well defined memory layout are supported (see and ). `repr(Rust)` enums don't have a well defined layout, so they are not supported. `repr(C, u8)` enums can be used instead -- Const rust does not support mutable references or points, so this crate leans heavily on function data structures for data processing. +- Const rust does not support mutable references or points, so this crate leans heavily on functional data structures for data processing. diff --git a/packages/const-serialize/src/cbor.rs b/packages/const-serialize/src/cbor.rs index 22f6dc0254..ba28110530 100644 --- a/packages/const-serialize/src/cbor.rs +++ b/packages/const-serialize/src/cbor.rs @@ -1,5 +1,18 @@ //! Const serialization utilities for the CBOR data format. //! +//! ## Overview of the format +//! +//! Const serialize only supports a subset of the CBOR format, specifically the major types: +//! - UnsignedInteger +//! - NegativeInteger +//! - Bytes +//! - String +//! - Array +//! +//! Each item in CBOR starts with a leading byte, which determines the type of the item and additional information. +//! The additional information is encoded in the lower 5 bits of the leading byte and generally indicates either a +//! small number or how many of the next bytes are part of the first number. +//! //! Resources: //! The spec: https://www.rfc-editor.org/rfc/rfc8949.html //! A playground to check examples against: https://cbor.me/ @@ -12,29 +25,21 @@ use crate::ConstVec; #[repr(u8)] #[derive(PartialEq)] enum MajorType { - // Major type 0: - // An unsigned integer in the range 0..264-1 inclusive. The value of the encoded item is the argument itself. For example, the integer 10 is denoted as the one byte 0b000_01010 (major type 0, additional information 10). The integer 500 would be 0b000_11001 (major type 0, additional information 25) followed by the two bytes 0x01f4, which is 500 in decimal. + /// An unsigned integer in the range 0..2^64. The value of the number is encoded in the remaining bits of the leading byte and any additional bytes. UnsignedInteger = 0, - // Major type 1: - // A negative integer in the range -264..-1 inclusive. The value of the item is -1 minus the argument. For example, the integer -500 would be 0b001_11001 (major type 1, additional information 25) followed by the two bytes 0x01f3, which is 499 in decimal. + /// An unsigned integer in the range -2^64..-1. The value of the number is encoded in the remaining bits of the leading byte and any additional bytes NegativeInteger = 1, - // Major type 2: - // A byte string. The number of bytes in the string is equal to the argument. For example, a byte string whose length is 5 would have an initial byte of 0b010_00101 (major type 2, additional information 5 for the length), followed by 5 bytes of binary content. A byte string whose length is 500 would have 3 initial bytes of 0b010_11001 (major type 2, additional information 25 to indicate a two-byte length) followed by the two bytes 0x01f4 for a length of 500, followed by 500 bytes of binary content. + /// A byte sequence. The number of bytes in the sequence is encoded in the remaining bits of the leading byte and any additional bytes. Bytes = 2, - // Major type 3: - // A text string (Section 2) encoded as UTF-8 [RFC3629]. The number of bytes in the string is equal to the argument. A string containing an invalid UTF-8 sequence is well-formed but invalid (Section 1.2). This type is provided for systems that need to interpret or display human-readable text, and allows the differentiation between unstructured bytes and text that has a specified repertoire (that of Unicode) and encoding (UTF-8). In contrast to formats such as JSON, the Unicode characters in this type are never escaped. Thus, a newline character (U+000A) is always represented in a string as the byte 0x0a, and never as the bytes 0x5c6e (the characters "\" and "n") nor as 0x5c7530303061 (the characters "\", "u", "0", "0", "0", and "a"). + /// A text sequence. The number of bytes in the sequence is encoded in the remaining bits of the leading byte and any additional bytes. Text = 3, - // Major type 4: - // An array of data items. In other formats, arrays are also called lists, sequences, or tuples (a "CBOR sequence" is something slightly different, though [RFC8742]). The argument is the number of data items in the array. Items in an array do not need to all be of the same type. For example, an array that contains 10 items of any type would have an initial byte of 0b100_01010 (major type 4, additional information 10 for the length) followed by the 10 remaining items. + /// A dynamically sized array of non-uniform data items. The number of items in the array is encoded in the remaining bits of the leading byte and any additional bytes. Array = 4, - // Major type 5: - // A map of pairs of data items. Maps are also called tables, dictionaries, hashes, or objects (in JSON). A map is comprised of pairs of data items, each pair consisting of a key that is immediately followed by a value. The argument is the number of pairs of data items in the map. For example, a map that contains 9 pairs would have an initial byte of 0b101_01001 (major type 5, additional information 9 for the number of pairs) followed by the 18 remaining items. The first item is the first key, the second item is the first value, the third item is the second key, and so on. Because items in a map come in pairs, their total number is always even: a map that contains an odd number of items (no value data present after the last key data item) is not well-formed. A map that has duplicate keys may be well-formed, but it is not valid, and thus it causes indeterminate decoding; see also Section 5.6. + /// A map of pairs of data items. The first item in each pair is the key and the second item is the value. The number of items in the array is encoded in the remaining bits of the leading byte and any additional bytes. Map = 5, - // Major type 6: - // A tagged data item ("tag") whose tag number, an integer in the range 0..264-1 inclusive, is the argument and whose enclosed data item (tag content) is the single encoded data item that follows the head. See Section 3.4. + /// Tagged values - not supported Tagged = 6, - // Major type 7: - // Floating-point numbers and simple values, as well as the "break" stop code. See Section 3.3. + /// Floating point values - not supported Float = 7, } @@ -64,10 +69,15 @@ const fn item_length(bytes: &[u8]) -> Result { }; let major = MajorType::from_byte(*head); let additional_information = *head & MajorType::MASK; - match major { + let length_of_item = match major { + // The length of the number is the total of: + // - The length of the number (which may be 0 if the number is encoded in additional information) MajorType::UnsignedInteger | MajorType::NegativeInteger => { - Ok(1 + get_length_of_number(additional_information) as usize) + get_length_of_number(additional_information) as usize } + // The length of the text or bytes is the total of: + // - The length of the number that denotes the length of the text or bytes + // - The length of the text or bytes themselves MajorType::Text | MajorType::Bytes => { let length_of_number = get_length_of_number(additional_information); let Ok((length_of_bytes, _)) = @@ -75,8 +85,11 @@ const fn item_length(bytes: &[u8]) -> Result { else { return Err(()); }; - Ok(1 + length_of_number as usize + length_of_bytes as usize) + length_of_number as usize + length_of_bytes as usize } + // The length of the map is the total of: + // - The length of the number that denotes the number of items + // - The length of the pairs of items themselves MajorType::Array | MajorType::Map => { let length_of_number = get_length_of_number(additional_information); let Ok((length_of_items, _)) = @@ -96,47 +109,15 @@ const fn item_length(bytes: &[u8]) -> Result { total_length += item_length; items_left -= 1; } - Ok(1 + total_length) + total_length } - _ => Err(()), - } -} - -#[test] -fn test_item_length_str() { - let input = [ - 0x61, // text(1) - /**/ 0x31, // "1" - 0x61, // text(1) - /**/ 0x31, // "1" - ]; - let Ok(length) = item_length(&input) else { - panic!("Failed to calculate length"); + _ => return Err(()), }; - assert_eq!(length, 2); -} - -#[test] -fn test_item_length_map() { - #[rustfmt::skip] - let input = [ - /* map(1) */ 0xA1, - /* text(1) */ 0x61, - /* "A" */ 0x41, - /* map(2) */ 0xA2, - /* text(3) */ 0x63, - /* "one" */ 0x6F, 0x6E, 0x65, - /* unsigned(286331153) */ 0x1A, 0x11, 0x11, 0x11, 0x11, - /* text(3) */ 0x63, - /* "two" */ 0x74, 0x77, 0x6F, - /* unsigned(34) */ 0x18, 0x22, - ]; - let Ok(length) = item_length(&input) else { - panic!("Failed to calculate length"); - }; - assert_eq!(length, input.len()); + let length_of_head = 1; + Ok(length_of_head + length_of_item) } +/// Read a number from the buffer, returning the number and the remaining bytes. pub(crate) const fn take_number(bytes: &[u8]) -> Result<(i64, &[u8]), ()> { let [head, rest @ ..] = bytes else { return Err(()); @@ -160,6 +141,7 @@ pub(crate) const fn take_number(bytes: &[u8]) -> Result<(i64, &[u8]), ()> { } } +/// Write a number to the buffer pub(crate) const fn write_number( vec: ConstVec, number: i64, @@ -170,6 +152,9 @@ pub(crate) const fn write_number( } } +/// Write the major type along with a number to the buffer. The first byte +/// contains both the major type and the additional information which contains +/// either the number itself or the number of extra bytes the number occupies. const fn write_major_type_and_u64( vec: ConstVec, major: MajorType, @@ -177,11 +162,14 @@ const fn write_major_type_and_u64( ) -> ConstVec { let major = (major as u8) << 5; match number { + // For numbers less than 24, store the number in the lower bits + // of the first byte 0..24 => { let additional_information = number as u8; let byte = major | additional_information; vec.push(byte) } + // For larger numbers, store the number of extra bytes the number occupies 24.. => { let log2_additional_bytes = log2_bytes_for_number(number); let additional_bytes = 1 << log2_additional_bytes; @@ -198,6 +186,8 @@ const fn write_major_type_and_u64( } } +/// Find the number of bytes required to store a number and return the log2 of the number of bytes. +/// This is the number stored in the additional information field if the number is more than 24. const fn log2_bytes_for_number(number: u64) -> u8 { let required_bytes = ((64 - number.leading_zeros()).div_ceil(8)) as u8; match required_bytes { @@ -208,6 +198,7 @@ const fn log2_bytes_for_number(number: u64) -> u8 { } } +/// Take bytes from a slice and return the bytes and the remaining slice. pub(crate) const fn take_bytes(bytes: &[u8]) -> Result<(&[u8], &[u8]), ()> { let [head, rest @ ..] = bytes else { return Err(()); @@ -221,6 +212,7 @@ pub(crate) const fn take_bytes(bytes: &[u8]) -> Result<(&[u8], &[u8]), ()> { } } +/// Write bytes to a buffer and return the new buffer. pub(crate) const fn write_bytes( vec: ConstVec, bytes: &[u8], @@ -229,6 +221,7 @@ pub(crate) const fn write_bytes( vec.extend(bytes) } +/// Take a string from a buffer and return the string and the remaining buffer. pub(crate) const fn take_str(bytes: &[u8]) -> Result<(&str, &[u8]), ()> { let [head, rest @ ..] = bytes else { return Err(()); @@ -248,6 +241,7 @@ pub(crate) const fn take_str(bytes: &[u8]) -> Result<(&str, &[u8]), ()> { } } +/// Write a string to a buffer and return the new buffer. pub(crate) const fn write_str( vec: ConstVec, string: &str, @@ -256,6 +250,8 @@ pub(crate) const fn write_str( vec.extend(string.as_bytes()) } +/// Take the length and header of an array from a buffer and return the length and the remaining buffer. +/// You must loop over the elements of the array and parse them outside of this method. pub(crate) const fn take_array(bytes: &[u8]) -> Result<(usize, &[u8]), ()> { let [head, rest @ ..] = bytes else { return Err(()); @@ -272,6 +268,7 @@ pub(crate) const fn take_array(bytes: &[u8]) -> Result<(usize, &[u8]), ()> { } } +/// Write the header and length of an array. pub(crate) const fn write_array( vec: ConstVec, len: usize, @@ -279,6 +276,7 @@ pub(crate) const fn write_array( write_major_type_and_u64(vec, MajorType::Array, len as u64) } +/// Write the header and length of a map. pub(crate) const fn write_map( vec: ConstVec, len: usize, @@ -287,6 +285,7 @@ pub(crate) const fn write_map( write_major_type_and_u64(vec, MajorType::Map, len as u64) } +/// Write the key of a map entry. pub(crate) const fn write_map_key( value: ConstVec, key: &str, @@ -294,6 +293,7 @@ pub(crate) const fn write_map_key( write_str(value, key) } +/// Take a map from the byte slice and return the map reference and the remaining bytes. pub(crate) const fn take_map<'a>(bytes: &'a [u8]) -> Result<(MapRef<'a>, &'a [u8]), ()> { let [head, rest @ ..] = bytes else { return Err(()); @@ -311,7 +311,7 @@ pub(crate) const fn take_map<'a>(bytes: &'a [u8]) -> Result<(MapRef<'a>, &'a [u8 let Ok(len) = item_length(after_map) else { return Err(()); }; - let Some((_, rest)) = after_map.split_at_checked(len as usize) else { + let Some((_, rest)) = after_map.split_at_checked(len) else { return Err(()); }; after_map = rest; @@ -323,16 +323,21 @@ pub(crate) const fn take_map<'a>(bytes: &'a [u8]) -> Result<(MapRef<'a>, &'a [u8 } } +/// A reference to a CBOR map. pub(crate) struct MapRef<'a> { + /// The bytes of the map. pub(crate) bytes: &'a [u8], + /// The length of the map. pub(crate) len: usize, } impl<'a> MapRef<'a> { + /// Create a new map reference. const fn new(bytes: &'a [u8], len: usize) -> Self { Self { bytes, len } } + /// Find a key in the map and return the buffer associated with it. pub(crate) const fn find(&self, key: &str) -> Result, ()> { let mut bytes = self.bytes; let mut items_left = self.len; @@ -347,7 +352,7 @@ impl<'a> MapRef<'a> { let Ok(len) = item_length(rest) else { return Err(()); }; - let Some((_, rest)) = rest.split_at_checked(len as usize) else { + let Some((_, rest)) = rest.split_at_checked(len) else { return Err(()); }; bytes = rest; @@ -357,6 +362,7 @@ impl<'a> MapRef<'a> { } } +/// Compare two strings for equality at compile time. pub(crate) const fn str_eq(a: &str, b: &str) -> bool { let a_bytes = a.as_bytes(); let b_bytes = b.as_bytes(); @@ -375,6 +381,7 @@ pub(crate) const fn str_eq(a: &str, b: &str) -> bool { true } +/// Take the length from the additional information byte and return it along with the remaining bytes. const fn take_len_from(rest: &[u8], additional_information: u8) -> Result<(u64, &[u8]), ()> { match additional_information { // If additional_information < 24, the argument's value is the value of the additional information. @@ -384,12 +391,14 @@ const fn take_len_from(rest: &[u8], additional_information: u8) -> Result<(u64, let Ok((number, rest)) = grab_u64(rest, additional_information) else { return Err(()); }; - Ok((number as u64, rest)) + Ok((number, rest)) } _ => Err(()), } } +/// Take a list of bytes from the byte slice and the additional information byte +/// and return the bytes and the remaining bytes. pub(crate) const fn take_bytes_from( rest: &[u8], additional_information: u8, @@ -403,6 +412,7 @@ pub(crate) const fn take_bytes_from( Ok((bytes, rest)) } +/// Find the length of the number based on the additional information byte. const fn get_length_of_number(additional_information: u8) -> u8 { match additional_information { 0..24 => 0, @@ -411,6 +421,7 @@ const fn get_length_of_number(additional_information: u8) -> u8 { } } +/// Read a u64 from the byte slice and the additional information byte. const fn grab_u64(rest: &[u8], additional_information: u8) -> Result<(u64, &[u8]), ()> { grab_u64_with_byte_length( rest, @@ -419,6 +430,7 @@ const fn grab_u64(rest: &[u8], additional_information: u8) -> Result<(u64, &[u8] ) } +/// Read a u64 from the byte slice and the additional information byte along with the byte length. const fn grab_u64_with_byte_length( mut rest: &[u8], byte_length: u8, @@ -442,106 +454,147 @@ const fn grab_u64_with_byte_length( } } -#[test] -fn test_parse_byte() { - for byte in 0..=255 { - let bytes = if byte < 24 { - [byte | 0b00000000, 0] - } else { - [0b00000000 | 24, byte] - }; - let (item, _) = take_number(&bytes).unwrap(); - assert_eq!(item, byte as _); - } - for byte in 1..=255 { - let bytes = if byte < 24 { - [byte - 1 | 0b0010_0000, 0] - } else { - [0b0010_0000 | 24, byte - 1] - }; - let (item, _) = take_number(&bytes).unwrap(); - assert_eq!(item, -(byte as i64)); - } -} +#[cfg(test)] +mod tests { + use super::*; -#[test] -fn test_byte_roundtrip() { - for byte in 0..=255 { - let vec = write_number(ConstVec::new(), byte as _); - println!("{vec:?}"); - let (item, _) = take_number(vec.as_ref()).unwrap(); - assert_eq!(item, byte as _); - } - for byte in 0..=255 { - let vec = write_number(ConstVec::new(), -(byte as i64)); - let (item, _) = take_number(vec.as_ref()).unwrap(); - assert_eq!(item, -(byte as i64)); + #[test] + fn test_parse_byte() { + for byte in 0..=255 { + let bytes = if byte < 24 { + [byte | 0b00000000, 0] + } else { + [0b00000000 | 24, byte] + }; + let (item, _) = take_number(&bytes).unwrap(); + assert_eq!(item, byte as _); + } + for byte in 1..=255 { + let bytes = if byte < 24 { + [byte - 1 | 0b0010_0000, 0] + } else { + [0b0010_0000 | 24, byte - 1] + }; + let (item, _) = take_number(&bytes).unwrap(); + assert_eq!(item, -(byte as i64)); + } } -} -#[test] -fn test_number_roundtrip() { - for _ in 0..100 { - let value = rand::random::(); - let vec = write_number(ConstVec::new(), value); - let (item, _) = take_number(vec.as_ref()).unwrap(); - assert_eq!(item, value); + #[test] + fn test_byte_roundtrip() { + for byte in 0..=255 { + let vec = write_number(ConstVec::new(), byte as _); + println!("{vec:?}"); + let (item, _) = take_number(vec.as_ref()).unwrap(); + assert_eq!(item, byte as _); + } + for byte in 0..=255 { + let vec = write_number(ConstVec::new(), -(byte as i64)); + let (item, _) = take_number(vec.as_ref()).unwrap(); + assert_eq!(item, -(byte as i64)); + } } -} -#[test] -fn test_bytes_roundtrip() { - for _ in 0..100 { - let len = (rand::random::() % 100) as usize; - let bytes = rand::random::<[u8; 100]>(); - let vec = write_bytes(ConstVec::new(), &bytes[..len]); - let (item, _) = take_bytes(vec.as_ref()).unwrap(); - assert_eq!(item, &bytes[..len]); + #[test] + fn test_number_roundtrip() { + for _ in 0..100 { + let value = rand::random::(); + let vec = write_number(ConstVec::new(), value); + let (item, _) = take_number(vec.as_ref()).unwrap(); + assert_eq!(item, value); + } } -} -#[test] -fn test_array_roundtrip() { - for _ in 0..100 { - let len = (rand::random::() % 100) as usize; - let mut vec = write_array(ConstVec::new(), len); - for i in 0..len { - vec = write_number(vec, i as _); - } - let (len, mut remaining) = take_array(vec.as_ref()).unwrap(); - for i in 0..len { - let (item, rest) = take_number(remaining).unwrap(); - remaining = rest; - assert_eq!(item, i as i64); + #[test] + fn test_bytes_roundtrip() { + for _ in 0..100 { + let len = (rand::random::() % 100) as usize; + let bytes = rand::random::<[u8; 100]>(); + let vec = write_bytes(ConstVec::new(), &bytes[..len]); + let (item, _) = take_bytes(vec.as_ref()).unwrap(); + assert_eq!(item, &bytes[..len]); } } -} -#[test] -fn test_map_roundtrip() { - use rand::prelude::SliceRandom; - for _ in 0..100 { - let len = (rand::random::() % 10) as usize; - let mut vec = write_map(ConstVec::new(), len); - let mut random_order_indexes = (0..len).collect::>(); - random_order_indexes.shuffle(&mut rand::rng()); - for &i in &random_order_indexes { - vec = write_map_key(vec, &i.to_string()); - vec = write_number(vec, i as _); + #[test] + fn test_array_roundtrip() { + for _ in 0..100 { + let len = (rand::random::() % 100) as usize; + let mut vec = write_array(ConstVec::new(), len); + for i in 0..len { + vec = write_number(vec, i as _); + } + let (len, mut remaining) = take_array(vec.as_ref()).unwrap(); + for i in 0..len { + let (item, rest) = take_number(remaining).unwrap(); + remaining = rest; + assert_eq!(item, i as i64); + } } - println!("len: {}", len); - println!("Map: {:?}", vec); - let (map, remaining) = take_map(vec.as_ref()).unwrap(); - println!("remaining: {:?}", remaining); - assert!(remaining.is_empty()); - for i in 0..len { - let key = i.to_string(); - let key_location = map - .find(&key) - .expect("encoding is valid") - .expect("key exists"); - let (value, _) = take_number(key_location).unwrap(); - assert_eq!(value, i as i64); + } + + #[test] + fn test_map_roundtrip() { + use rand::prelude::SliceRandom; + for _ in 0..100 { + let len = (rand::random::() % 10) as usize; + let mut vec = write_map(ConstVec::new(), len); + let mut random_order_indexes = (0..len).collect::>(); + random_order_indexes.shuffle(&mut rand::rng()); + for &i in &random_order_indexes { + vec = write_map_key(vec, &i.to_string()); + vec = write_number(vec, i as _); + } + println!("len: {}", len); + println!("Map: {:?}", vec); + let (map, remaining) = take_map(vec.as_ref()).unwrap(); + println!("remaining: {:?}", remaining); + assert!(remaining.is_empty()); + for i in 0..len { + let key = i.to_string(); + let key_location = map + .find(&key) + .expect("encoding is valid") + .expect("key exists"); + let (value, _) = take_number(key_location).unwrap(); + assert_eq!(value, i as i64); + } } } + + #[test] + fn test_item_length_str() { + #[rustfmt::skip] + let input = [ + /* text(1) */ 0x61, + /* "1" */ 0x31, + /* text(1) */ 0x61, + /* "1" */ 0x31, + ]; + let Ok(length) = item_length(&input) else { + panic!("Failed to calculate length"); + }; + assert_eq!(length, 2); + } + + #[test] + fn test_item_length_map() { + #[rustfmt::skip] + let input = [ + /* map(1) */ 0xA1, + /* text(1) */ 0x61, + /* "A" */ 0x41, + /* map(2) */ 0xA2, + /* text(3) */ 0x63, + /* "one" */ 0x6F, 0x6E, 0x65, + /* unsigned(286331153) */ 0x1A, 0x11, 0x11, 0x11, 0x11, + /* text(3) */ 0x63, + /* "two" */ 0x74, 0x77, 0x6F, + /* unsigned(34) */ 0x18, 0x22, + ]; + let Ok(length) = item_length(&input) else { + panic!("Failed to calculate length"); + }; + assert_eq!(length, input.len()); + } } diff --git a/packages/const-serialize/src/lib.rs b/packages/const-serialize/src/lib.rs index b8b3ff5047..1e6608951d 100644 --- a/packages/const-serialize/src/lib.rs +++ b/packages/const-serialize/src/lib.rs @@ -1,10 +1,22 @@ #![doc = include_str!("../README.md")] #![warn(missing_docs)] -use std::{char, hash::Hash, mem::MaybeUninit}; +use std::mem::MaybeUninit; mod cbor; mod const_vec; +mod r#enum; +pub use r#enum::*; +mod r#struct; +pub use r#struct::*; +mod primitive; +pub use primitive::*; +mod list; +pub use list::*; +mod array; +pub use array::*; +mod str; +pub use str::*; pub use const_serialize_macro::SerializeConst; pub use const_vec::ConstVec; @@ -14,189 +26,6 @@ use crate::cbor::{ write_map, write_map_key, write_number, }; -/// Plain old data for a field. Stores the offset of the field in the struct and the layout of the field. -#[derive(Debug, Copy, Clone)] -pub struct StructFieldLayout { - name: &'static str, - offset: usize, - layout: Layout, -} - -impl StructFieldLayout { - /// Create a new struct field layout - pub const fn new(name: &'static str, offset: usize, layout: Layout) -> Self { - Self { - name, - offset, - layout, - } - } -} - -/// Layout for a struct. The struct layout is just a list of fields with offsets -#[derive(Debug, Copy, Clone)] -pub struct StructLayout { - size: usize, - data: &'static [StructFieldLayout], -} - -impl StructLayout { - /// Create a new struct layout - pub const fn new(size: usize, data: &'static [StructFieldLayout]) -> Self { - Self { size, data } - } -} - -/// The layout for an enum. The enum layout is just a discriminate size and a tag layout. -#[derive(Debug, Copy, Clone)] -pub struct EnumLayout { - size: usize, - discriminant: PrimitiveLayout, - variants_offset: usize, - variants: &'static [EnumVariant], -} - -impl EnumLayout { - /// Create a new enum layout - pub const fn new( - size: usize, - discriminant: PrimitiveLayout, - variants: &'static [EnumVariant], - ) -> Self { - let mut max_align = 1; - let mut i = 0; - while i < variants.len() { - let EnumVariant { align, .. } = &variants[i]; - if *align > max_align { - max_align = *align; - } - i += 1; - } - - let variants_offset_raw = discriminant.size; - let padding = (max_align - (variants_offset_raw % max_align)) % max_align; - let variants_offset = variants_offset_raw + padding; - - assert!(variants_offset % max_align == 0); - - Self { - size, - discriminant, - variants_offset, - variants, - } - } -} - -/// The layout for an enum variant. The enum variant layout is just a struct layout with a tag and alignment. -#[derive(Debug, Copy, Clone)] -pub struct EnumVariant { - name: &'static str, - // Note: tags may not be sequential - tag: u32, - data: StructLayout, - align: usize, -} - -impl EnumVariant { - /// Create a new enum variant layout - pub const fn new(name: &'static str, tag: u32, data: StructLayout, align: usize) -> Self { - Self { - name, - tag, - data, - align, - } - } -} - -/// The layout for a constant sized array. The array layout is just a length and an item layout. -#[derive(Debug, Copy, Clone)] -pub struct ListLayout { - len: usize, - item_layout: &'static Layout, -} - -impl ListLayout { - /// Create a new list layout - pub const fn new(len: usize, item_layout: &'static Layout) -> Self { - Self { len, item_layout } - } -} - -/// The layout for a dynamically sized array. The array layout is just a length and an item layout. -#[derive(Debug, Copy, Clone)] -pub struct ArrayLayout { - size: usize, - len_offset: usize, - len_layout: PrimitiveLayout, - data_offset: usize, - data_layout: ListLayout, -} - -impl ArrayLayout { - /// Create a new list layout - pub const fn new( - size: usize, - len_offset: usize, - len_layout: PrimitiveLayout, - data_offset: usize, - data_layout: ListLayout, - ) -> Self { - Self { - size, - len_offset, - len_layout, - data_offset, - data_layout, - } - } -} - -/// The layout for a primitive type. The bytes will be reversed if the target is big endian. -#[derive(Debug, Copy, Clone)] -pub struct PrimitiveLayout { - size: usize, -} - -impl PrimitiveLayout { - /// Create a new primitive layout - pub const fn new(size: usize) -> Self { - Self { size } - } - - /// Read the value from the given pointer - pub const unsafe fn read(self, byte_ptr: *const u8) -> u32 { - let mut value = 0; - let mut offset = 0; - while offset < self.size { - // If the bytes are reversed, walk backwards from the end of the number when pushing bytes - let byte = if cfg!(target_endian = "big") { - unsafe { - byte_ptr - .wrapping_byte_add((self.size - offset - 1) as _) - .read() - } - } else { - unsafe { byte_ptr.wrapping_byte_add(offset as _).read() } - }; - value |= (byte as u32) << (offset * 8); - offset += 1; - } - value - } - - /// Write the value to the given buffer - pub const fn write(self, value: u32, out: &mut [MaybeUninit]) { - let bytes = value.to_ne_bytes(); - let mut offset = 0; - while offset < self.size { - out[offset] = MaybeUninit::new(bytes[offset]); - offset += 1; - } - } -} - /// The layout for a type. This layout defines a sequence of locations and reversed or not bytes. These bytes will be copied from during serialization and copied into during deserialization. #[derive(Debug, Copy, Clone)] pub enum Layout { @@ -204,12 +33,12 @@ pub enum Layout { Enum(EnumLayout), /// A struct layout Struct(StructLayout), - /// A list layout - List(ListLayout), + /// An array layout + Array(ArrayLayout), /// A primitive layout Primitive(PrimitiveLayout), - /// A dynamically sized array layout - Array(ArrayLayout), + /// A dynamically sized list layout + List(ListLayout), } impl Layout { @@ -218,8 +47,8 @@ impl Layout { match self { Layout::Enum(layout) => layout.size, Layout::Struct(layout) => layout.size, - Layout::List(layout) => layout.len * layout.item_layout.size(), - Layout::Array(layout) => layout.size, + Layout::Array(layout) => layout.len * layout.item_layout.size(), + Layout::List(layout) => layout.size, Layout::Primitive(layout) => layout.size, } } @@ -236,558 +65,6 @@ pub unsafe trait SerializeConst: Sized { const _ASSERT: () = assert!(Self::MEMORY_LAYOUT.size() == std::mem::size_of::()); } -macro_rules! impl_serialize_const { - ($type:ty) => { - unsafe impl SerializeConst for $type { - const MEMORY_LAYOUT: Layout = Layout::Primitive(PrimitiveLayout { - size: std::mem::size_of::<$type>(), - }); - } - }; -} - -impl_serialize_const!(u8); -impl_serialize_const!(u16); -impl_serialize_const!(u32); -impl_serialize_const!(u64); -impl_serialize_const!(i8); -impl_serialize_const!(i16); -impl_serialize_const!(i32); -impl_serialize_const!(i64); -impl_serialize_const!(bool); -impl_serialize_const!(f32); -impl_serialize_const!(f64); - -unsafe impl SerializeConst for [T; N] { - const MEMORY_LAYOUT: Layout = Layout::List(ListLayout { - len: N, - item_layout: &T::MEMORY_LAYOUT, - }); -} - -macro_rules! impl_serialize_const_tuple { - ($($generic:ident: $generic_number:expr),*) => { - impl_serialize_const_tuple!(@impl ($($generic,)*) = $($generic: $generic_number),*); - }; - (@impl $inner:ty = $($generic:ident: $generic_number:expr),*) => { - unsafe impl<$($generic: SerializeConst),*> SerializeConst for ($($generic,)*) { - const MEMORY_LAYOUT: Layout = { - Layout::Struct(StructLayout { - size: std::mem::size_of::<($($generic,)*)>(), - data: &[ - $( - StructFieldLayout::new(stringify!($generic_number), std::mem::offset_of!($inner, $generic_number), $generic::MEMORY_LAYOUT), - )* - ], - }) - }; - } - }; -} - -impl_serialize_const_tuple!(T1: 0); -impl_serialize_const_tuple!(T1: 0, T2: 1); -impl_serialize_const_tuple!(T1: 0, T2: 1, T3: 2); -impl_serialize_const_tuple!(T1: 0, T2: 1, T3: 2, T4: 3); -impl_serialize_const_tuple!(T1: 0, T2: 1, T3: 2, T4: 3, T5: 4); -impl_serialize_const_tuple!(T1: 0, T2: 1, T3: 2, T4: 3, T5: 4, T6: 5); -impl_serialize_const_tuple!(T1: 0, T2: 1, T3: 2, T4: 3, T5: 4, T6: 5, T7: 6); -impl_serialize_const_tuple!(T1: 0, T2: 1, T3: 2, T4: 3, T5: 4, T6: 5, T7: 6, T8: 7); -impl_serialize_const_tuple!(T1: 0, T2: 1, T3: 2, T4: 3, T5: 4, T6: 5, T7: 6, T8: 7, T9: 8); -impl_serialize_const_tuple!(T1: 0, T2: 1, T3: 2, T4: 3, T5: 4, T6: 5, T7: 6, T8: 7, T9: 8, T10: 9); - -const MAX_STR_SIZE: usize = 256; - -/// A string that is stored in a constant sized buffer that can be serialized and deserialized at compile time -#[derive(Clone, Copy, Debug)] -pub struct ConstStr { - bytes: [MaybeUninit; MAX_STR_SIZE], - len: u32, -} - -#[cfg(feature = "serde")] -mod serde_bytes { - use serde::{Deserialize, Serialize, Serializer}; - - use crate::ConstStr; - - impl Serialize for ConstStr { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - serializer.serialize_str(self.as_str()) - } - } - - impl<'de> Deserialize<'de> for ConstStr { - fn deserialize(deserializer: D) -> Result - where - D: serde::Deserializer<'de>, - { - let s = String::deserialize(deserializer)?; - Ok(ConstStr::new(&s)) - } - } -} - -unsafe impl SerializeConst for ConstStr { - const MEMORY_LAYOUT: Layout = Layout::Array(ArrayLayout { - size: std::mem::size_of::(), - data_offset: std::mem::offset_of!(Self, bytes), - data_layout: ListLayout { - len: MAX_STR_SIZE, - item_layout: &Layout::Primitive(PrimitiveLayout { - size: std::mem::size_of::(), - }), - }, - len_offset: std::mem::offset_of!(Self, len), - len_layout: PrimitiveLayout { - size: std::mem::size_of::(), - }, - }); -} - -impl ConstStr { - /// Create a new constant string - pub const fn new(s: &str) -> Self { - let str_bytes = s.as_bytes(); - let mut bytes = [MaybeUninit::uninit(); MAX_STR_SIZE]; - let mut i = 0; - while i < str_bytes.len() { - bytes[i].write(str_bytes[i]); - i += 1; - } - Self { - bytes, - len: str_bytes.len() as u32, - } - } - - /// Get a reference to the string - pub const fn as_str(&self) -> &str { - let str_bytes = unsafe { - &*(self.bytes.split_at(self.len as usize).0 as *const [MaybeUninit] - as *const [u8]) - }; - match std::str::from_utf8(str_bytes) { - Ok(s) => s, - Err(_) => panic!( - "Invalid utf8; ConstStr should only ever be constructed from valid utf8 strings" - ), - } - } - - /// Get the length of the string - pub const fn len(&self) -> usize { - self.len as usize - } - - /// Check if the string is empty - pub const fn is_empty(&self) -> bool { - self.len == 0 - } - - /// Push a character onto the string - pub const fn push(self, byte: char) -> Self { - assert!(byte.is_ascii(), "Only ASCII bytes are supported"); - let (bytes, len) = char_to_bytes(byte); - let (str, _) = bytes.split_at(len); - let Ok(str) = std::str::from_utf8(str) else { - panic!("Invalid utf8; char_to_bytes should always return valid utf8 bytes") - }; - self.push_str(str) - } - - /// Push a str onto the string - pub const fn push_str(self, str: &str) -> Self { - let Self { mut bytes, len } = self; - assert!( - str.len() + len as usize <= MAX_STR_SIZE, - "String is too long" - ); - let str_bytes = str.as_bytes(); - let new_len = len as usize + str_bytes.len(); - let mut i = 0; - while i < str_bytes.len() { - bytes[len as usize + i].write(str_bytes[i]); - i += 1; - } - Self { - bytes, - len: new_len as u32, - } - } - - /// Split the string at a byte index. The byte index must be a char boundary - pub const fn split_at(self, index: usize) -> (Self, Self) { - let (left, right) = self.as_str().split_at(index); - (Self::new(left), Self::new(right)) - } - - /// Split the string at the last occurrence of a character - pub const fn rsplit_once(&self, char: char) -> Option<(Self, Self)> { - let str = self.as_str(); - let mut index = str.len() - 1; - // First find the bytes we are searching for - let (char_bytes, len) = char_to_bytes(char); - let (char_bytes, _) = char_bytes.split_at(len); - let bytes = str.as_bytes(); - - // Then walk backwards from the end of the string - loop { - let byte = bytes[index]; - // Look for char boundaries in the string and check if the bytes match - if let Some(char_boundary_len) = utf8_char_boundary_to_char_len(byte) { - // Split up the string into three sections: [before_char, in_char, after_char] - let (before_char, after_index) = bytes.split_at(index); - let (in_char, after_char) = after_index.split_at(char_boundary_len as usize); - if in_char.len() != char_boundary_len as usize { - panic!("in_char.len() should always be equal to char_boundary_len as usize") - } - // Check if the bytes for the current char and the target char match - let mut in_char_eq = true; - let mut i = 0; - let min_len = if in_char.len() < char_bytes.len() { - in_char.len() - } else { - char_bytes.len() - }; - while i < min_len { - in_char_eq &= in_char[i] == char_bytes[i]; - i += 1; - } - // If they do, convert the bytes to strings and return the split strings - if in_char_eq { - let Ok(before_char_str) = std::str::from_utf8(before_char) else { - panic!("Invalid utf8; utf8_char_boundary_to_char_len should only return Some when the byte is a character boundary") - }; - let Ok(after_char_str) = std::str::from_utf8(after_char) else { - panic!("Invalid utf8; utf8_char_boundary_to_char_len should only return Some when the byte is a character boundary") - }; - return Some((Self::new(before_char_str), Self::new(after_char_str))); - } - } - match index.checked_sub(1) { - Some(new_index) => index = new_index, - None => return None, - } - } - } - - /// Split the string at the first occurrence of a character - pub const fn split_once(&self, char: char) -> Option<(Self, Self)> { - let str = self.as_str(); - let mut index = 0; - // First find the bytes we are searching for - let (char_bytes, len) = char_to_bytes(char); - let (char_bytes, _) = char_bytes.split_at(len); - let bytes = str.as_bytes(); - - // Then walk forwards from the start of the string - while index < bytes.len() { - let byte = bytes[index]; - // Look for char boundaries in the string and check if the bytes match - if let Some(char_boundary_len) = utf8_char_boundary_to_char_len(byte) { - // Split up the string into three sections: [before_char, in_char, after_char] - let (before_char, after_index) = bytes.split_at(index); - let (in_char, after_char) = after_index.split_at(char_boundary_len as usize); - if in_char.len() != char_boundary_len as usize { - panic!("in_char.len() should always be equal to char_boundary_len as usize") - } - // Check if the bytes for the current char and the target char match - let mut in_char_eq = true; - let mut i = 0; - let min_len = if in_char.len() < char_bytes.len() { - in_char.len() - } else { - char_bytes.len() - }; - while i < min_len { - in_char_eq &= in_char[i] == char_bytes[i]; - i += 1; - } - // If they do, convert the bytes to strings and return the split strings - if in_char_eq { - let Ok(before_char_str) = std::str::from_utf8(before_char) else { - panic!("Invalid utf8; utf8_char_boundary_to_char_len should only return Some when the byte is a character boundary") - }; - let Ok(after_char_str) = std::str::from_utf8(after_char) else { - panic!("Invalid utf8; utf8_char_boundary_to_char_len should only return Some when the byte is a character boundary") - }; - return Some((Self::new(before_char_str), Self::new(after_char_str))); - } - } - index += 1 - } - None - } -} - -impl PartialEq for ConstStr { - fn eq(&self, other: &Self) -> bool { - self.as_str() == other.as_str() - } -} - -impl Eq for ConstStr {} - -impl PartialOrd for ConstStr { - fn partial_cmp(&self, other: &Self) -> Option { - self.as_str().partial_cmp(other.as_str()) - } -} - -impl Ord for ConstStr { - fn cmp(&self, other: &Self) -> std::cmp::Ordering { - self.as_str().cmp(other.as_str()) - } -} - -impl Hash for ConstStr { - fn hash(&self, state: &mut H) { - self.as_str().hash(state); - } -} - -#[test] -fn test_rsplit_once() { - let str = ConstStr::new("hello world"); - assert_eq!( - str.rsplit_once(' '), - Some((ConstStr::new("hello"), ConstStr::new("world"))) - ); - - let unicode_str = ConstStr::new("hi😀hello😀world😀world"); - assert_eq!( - unicode_str.rsplit_once('😀'), - Some((ConstStr::new("hi😀hello😀world"), ConstStr::new("world"))) - ); - assert_eq!(unicode_str.rsplit_once('❌'), None); - - for _ in 0..100 { - let random_str: String = (0..rand::random::() % 50) - .map(|_| rand::random::()) - .collect(); - let konst = ConstStr::new(&random_str); - let mut seen_chars = std::collections::HashSet::new(); - for char in random_str.chars().rev() { - let (char_bytes, len) = char_to_bytes(char); - let char_bytes = &char_bytes[..len]; - assert_eq!(char_bytes, char.to_string().as_bytes()); - if seen_chars.contains(&char) { - continue; - } - seen_chars.insert(char); - let (correct_left, correct_right) = random_str.rsplit_once(char).unwrap(); - let (left, right) = konst.rsplit_once(char).unwrap(); - println!("splitting {random_str:?} at {char:?}"); - assert_eq!(left.as_str(), correct_left); - assert_eq!(right.as_str(), correct_right); - } - } -} - -const CONTINUED_CHAR_MASK: u8 = 0b10000000; -const BYTE_CHAR_BOUNDARIES: [u8; 4] = [0b00000000, 0b11000000, 0b11100000, 0b11110000]; - -// Const version of https://doc.rust-lang.org/src/core/char/methods.rs.html#1765-1797 -const fn char_to_bytes(char: char) -> ([u8; 4], usize) { - let code = char as u32; - let len = char.len_utf8(); - let mut bytes = [0; 4]; - match len { - 1 => { - bytes[0] = code as u8; - } - 2 => { - bytes[0] = ((code >> 6) & 0x1F) as u8 | BYTE_CHAR_BOUNDARIES[1]; - bytes[1] = (code & 0x3F) as u8 | CONTINUED_CHAR_MASK; - } - 3 => { - bytes[0] = ((code >> 12) & 0x0F) as u8 | BYTE_CHAR_BOUNDARIES[2]; - bytes[1] = ((code >> 6) & 0x3F) as u8 | CONTINUED_CHAR_MASK; - bytes[2] = (code & 0x3F) as u8 | CONTINUED_CHAR_MASK; - } - 4 => { - bytes[0] = ((code >> 18) & 0x07) as u8 | BYTE_CHAR_BOUNDARIES[3]; - bytes[1] = ((code >> 12) & 0x3F) as u8 | CONTINUED_CHAR_MASK; - bytes[2] = ((code >> 6) & 0x3F) as u8 | CONTINUED_CHAR_MASK; - bytes[3] = (code & 0x3F) as u8 | CONTINUED_CHAR_MASK; - } - _ => panic!( - "encode_utf8: need more than 4 bytes to encode the unicode character, but the buffer has 4 bytes" - ), - }; - (bytes, len) -} - -#[test] -fn fuzz_char_to_bytes() { - use std::char; - for _ in 0..100 { - let char = rand::random::(); - let (bytes, len) = char_to_bytes(char); - let str = std::str::from_utf8(&bytes[..len]).unwrap(); - assert_eq!(char.to_string(), str); - } -} - -const fn utf8_char_boundary_to_char_len(byte: u8) -> Option { - match byte { - 0b00000000..=0b01111111 => Some(1), - 0b11000000..=0b11011111 => Some(2), - 0b11100000..=0b11101111 => Some(3), - 0b11110000..=0b11111111 => Some(4), - _ => None, - } -} - -#[test] -fn fuzz_utf8_byte_to_char_len() { - for _ in 0..100 { - let random_string: String = (0..rand::random::()) - .map(|_| rand::random::()) - .collect(); - let bytes = random_string.as_bytes(); - let chars: std::collections::HashMap<_, _> = random_string.char_indices().collect(); - for (i, byte) in bytes.iter().enumerate() { - match utf8_char_boundary_to_char_len(*byte) { - Some(char_len) => { - let char = chars - .get(&i) - .unwrap_or_else(|| panic!("{byte:b} is not a character boundary")); - assert_eq!(char.len_utf8(), char_len as usize); - } - None => { - assert!(!chars.contains_key(&i), "{byte:b} is a character boundary"); - } - } - } - } -} - -/// Serialize a struct that is stored at the pointer passed in -const unsafe fn serialize_const_struct( - ptr: *const (), - to: ConstVec, - layout: &StructLayout, -) -> ConstVec { - let mut i = 0; - let field_count = layout.data.len(); - let mut to = write_map(to, field_count); - while i < field_count { - // Serialize the field at the offset pointer in the struct - let StructFieldLayout { - name, - offset, - layout, - } = &layout.data[i]; - to = write_map_key(to, name); - let field = ptr.wrapping_byte_add(*offset as _); - to = serialize_const_ptr(field, to, layout); - i += 1; - } - to -} - -/// Serialize an enum that is stored at the pointer passed in -const unsafe fn serialize_const_enum( - ptr: *const (), - mut to: ConstVec, - layout: &EnumLayout, -) -> ConstVec { - let byte_ptr = ptr as *const u8; - let discriminant = layout.discriminant.read(byte_ptr); - - let mut i = 0; - while i < layout.variants.len() { - // If the variant is the discriminated one, serialize it - let EnumVariant { - tag, name, data, .. - } = &layout.variants[i]; - if discriminant == *tag { - to = write_map(to, 1); - to = write_map_key(to, name); - let data_ptr = ptr.wrapping_byte_offset(layout.variants_offset as _); - to = serialize_const_struct(data_ptr, to, data); - break; - } - i += 1; - } - to -} - -/// Serialize a primitive type that is stored at the pointer passed in -const unsafe fn serialize_const_primitive( - ptr: *const (), - to: ConstVec, - layout: &PrimitiveLayout, -) -> ConstVec { - let ptr = ptr as *const u8; - let mut offset = 0; - let mut i64_bytes = [0u8; 8]; - while offset < layout.size { - // If the bytes are reversed, walk backwards from the end of the number when pushing bytes - let byte = unsafe { - if cfg!(any(target_endian = "big", feature = "test-big-endian")) { - ptr.wrapping_byte_offset((layout.size - offset - 1) as _) - .read() - } else { - ptr.wrapping_byte_offset(offset as _).read() - } - }; - i64_bytes[offset as usize] = byte; - offset += 1; - } - let number = i64::from_ne_bytes(i64_bytes); - write_number(to, number) -} - -/// Serialize a constant sized array that is stored at the pointer passed in -const unsafe fn serialize_const_list( - ptr: *const (), - mut to: ConstVec, - layout: &ListLayout, -) -> ConstVec { - let len = layout.len; - let mut i = 0; - to = write_array(to, len); - while i < len { - let field = ptr.wrapping_byte_offset((i * layout.item_layout.size()) as _); - to = serialize_const_ptr(field, to, layout.item_layout); - i += 1; - } - to -} - -/// Serialize a dynamically sized array that is stored at the pointer passed in -const unsafe fn serialize_const_array( - ptr: *const (), - mut to: ConstVec, - layout: &ArrayLayout, -) -> ConstVec { - // Read the length of the array - let len_ptr = ptr.wrapping_byte_offset(layout.len_offset as _); - let len = layout.len_layout.read(len_ptr as *const u8) as usize; - - let data_ptr = ptr.wrapping_byte_offset(layout.data_offset as _); - let item_layout = layout.data_layout.item_layout; - if item_layout.size() == 1 { - let slice = std::slice::from_raw_parts(data_ptr as *const u8, len); - to = write_bytes(to, slice); - } else { - let mut i = 0; - to = write_array(to, len); - while i < len { - let item = data_ptr.wrapping_byte_offset((i * item_layout.size()) as _); - to = serialize_const_ptr(item, to, item_layout); - i += 1; - } - } - to -} - /// Serialize a pointer to a type that is stored at the pointer passed in const unsafe fn serialize_const_ptr( ptr: *const (), @@ -797,8 +74,8 @@ const unsafe fn serialize_const_ptr( match layout { Layout::Enum(layout) => serialize_const_enum(ptr, to, layout), Layout::Struct(layout) => serialize_const_struct(ptr, to, layout), - Layout::List(layout) => serialize_const_list(ptr, to, layout), - Layout::Array(layout) => serialize_const_array(ptr, to, layout), + Layout::Array(layout) => serialize_const_list(ptr, to, layout), + Layout::List(layout) => serialize_const_array(ptr, to, layout), Layout::Primitive(layout) => serialize_const_primitive(ptr, to, layout), } } @@ -832,181 +109,6 @@ pub const fn serialize_const(data: &T, to: ConstVec) -> C unsafe { serialize_const_ptr(ptr, to, &T::MEMORY_LAYOUT) } } -/// Deserialize a primitive type into the out buffer at the offset passed in. Returns a new version of the buffer with the data added. -const fn deserialize_const_primitive<'a>( - from: &'a [u8], - layout: &PrimitiveLayout, - out: &mut [MaybeUninit], -) -> Option<&'a [u8]> { - let mut offset = 0; - let Ok((number, from)) = take_number(from) else { - return None; - }; - let bytes = number.to_le_bytes(); - while offset < layout.size { - // If the bytes are reversed, walk backwards from the end of the number when filling in bytes - let byte = bytes[offset]; - if cfg!(any(target_endian = "big", feature = "test-big-endian")) { - out[layout.size - offset - 1] = MaybeUninit::new(byte); - } else { - out[offset] = MaybeUninit::new(byte); - } - offset += 1; - } - Some(from) -} - -/// Deserialize a struct type into the out buffer at the offset passed in. Returns a new version of the buffer with the data added. -const fn deserialize_const_struct<'a>( - from: &'a [u8], - layout: &StructLayout, - out: &mut [MaybeUninit], -) -> Option<&'a [u8]> { - let Ok((map, from)) = take_map(from) else { - return None; - }; - let mut i = 0; - while i < layout.data.len() { - // Deserialize the field at the offset pointer in the struct - let StructFieldLayout { - name, - offset, - layout, - } = &layout.data[i]; - let Ok(Some(from)) = map.find(name) else { - return None; - }; - let Some((_, field_bytes)) = out.split_at_mut_checked(*offset) else { - return None; - }; - if deserialize_const_ptr(from, layout, field_bytes).is_none() { - return None; - } - i += 1; - } - Some(from) -} - -/// Deserialize an enum type into the out buffer at the offset passed in. Returns a new version of the buffer with the data added. -const fn deserialize_const_enum<'a>( - from: &'a [u8], - layout: &EnumLayout, - out: &mut [MaybeUninit], -) -> Option<&'a [u8]> { - // First, deserialize the map - let Ok((map, remaining)) = take_map(from) else { - return None; - }; - - // Then get the only field which is the tag - let Ok((deserilized_name, from)) = take_str(&map.bytes) else { - return None; - }; - - // Then, deserialize the variant - let mut i = 0; - let mut matched_variant = false; - while i < layout.variants.len() { - // If the variant is the discriminated one, deserialize it - let EnumVariant { - name, data, tag, .. - } = &layout.variants[i]; - if str_eq(deserilized_name, *name) { - layout.discriminant.write(*tag, out); - let Some((_, out)) = out.split_at_mut_checked(layout.variants_offset) else { - return None; - }; - if deserialize_const_struct(from, data, out).is_none() { - return None; - } - matched_variant = true; - break; - } - i += 1; - } - if !matched_variant { - return None; - } - - Some(remaining) -} - -/// Deserialize a list type into the out buffer at the offset passed in. Returns a new version of the buffer with the data added. -const fn deserialize_const_list<'a>( - from: &'a [u8], - layout: &ListLayout, - mut out: &mut [MaybeUninit], -) -> Option<&'a [u8]> { - let item_layout = layout.item_layout; - let Ok((_, mut from)) = take_array(from) else { - return None; - }; - let mut i = 0; - while i < layout.len { - let Some(new_from) = deserialize_const_ptr(from, item_layout, out) else { - return None; - }; - let Some((_, item_out)) = out.split_at_mut_checked(item_layout.size()) else { - return None; - }; - out = item_out; - from = new_from; - i += 1; - } - Some(from) -} - -/// Deserialize a array type into the out buffer at the offset passed in. Returns a new version of the buffer with the data added. -const fn deserialize_const_array<'a>( - from: &'a [u8], - layout: &ArrayLayout, - out: &mut [MaybeUninit], -) -> Option<&'a [u8]> { - let Some((_, len_out)) = out.split_at_mut_checked(layout.len_offset) else { - return None; - }; - - let item_layout = layout.data_layout.item_layout; - if item_layout.size() == 1 { - let Ok((bytes, new_from)) = take_bytes(from) else { - return None; - }; - // Write out the length of the array - layout.len_layout.write(bytes.len() as u32, len_out); - let Some((_, data_out)) = out.split_at_mut_checked(layout.data_offset) else { - return None; - }; - let mut offset = 0; - while offset < bytes.len() { - data_out[offset].write(bytes[offset]); - offset += 1; - } - Some(new_from) - } else { - let Ok((len, mut from)) = take_array(from) else { - return None; - }; - // Write out the length of the array - layout.len_layout.write(len as u32, len_out); - let Some((_, mut data_out)) = out.split_at_mut_checked(layout.data_offset) else { - return None; - }; - let mut i = 0; - while i < len { - let Some(new_from) = deserialize_const_ptr(from, item_layout, data_out) else { - return None; - }; - let Some((_, item_out)) = data_out.split_at_mut_checked(item_layout.size()) else { - return None; - }; - data_out = item_out; - from = new_from; - i += 1; - } - Some(from) - } -} - /// Deserialize a type into the out buffer at the offset passed in. Returns a new version of the buffer with the data added. const fn deserialize_const_ptr<'a>( from: &'a [u8], @@ -1016,8 +118,8 @@ const fn deserialize_const_ptr<'a>( match layout { Layout::Enum(layout) => deserialize_const_enum(from, layout, out), Layout::Struct(layout) => deserialize_const_struct(from, layout, out), - Layout::List(layout) => deserialize_const_list(from, layout, out), - Layout::Array(layout) => deserialize_const_array(from, layout, out), + Layout::Array(layout) => deserialize_const_list(from, layout, out), + Layout::List(layout) => deserialize_const_array(from, layout, out), Layout::Primitive(layout) => deserialize_const_primitive(from, layout, out), } } @@ -1064,9 +166,9 @@ macro_rules! deserialize_const { /// # Safety /// N must be `std::mem::size_of::()` #[must_use = "The data is deserialized from the input buffer"] -pub const unsafe fn deserialize_const_raw<'a, const N: usize, T: SerializeConst>( - from: &'a [u8], -) -> Option<(&'a [u8], T)> { +pub const unsafe fn deserialize_const_raw( + from: &[u8], +) -> Option<(&[u8], T)> { // Create uninitized memory with the size of the type let mut out = [MaybeUninit::uninit(); N]; // Fill in the bytes into the buffer for the type diff --git a/packages/const-serialize/tests/str.rs b/packages/const-serialize/tests/str.rs index d2608f66cb..4a11deeb41 100644 --- a/packages/const-serialize/tests/str.rs +++ b/packages/const-serialize/tests/str.rs @@ -7,6 +7,7 @@ fn test_serialize_const_layout_str() { buf = serialize_const(&str, buf); println!("{:?}", buf.as_ref()); let buf = buf.as_ref(); + assert!(buf.len() < 10); let str = deserialize_const!(ConstStr, buf).unwrap().1; eprintln!("{str:?}"); assert_eq!(str.as_str(), "hello"); @@ -18,6 +19,7 @@ fn test_serialize_const_layout_nested_str() { let str = ConstStr::new("hello"); buf = serialize_const(&[str, str, str] as &[ConstStr; 3], buf); println!("{:?}", buf.as_ref()); + assert!(buf.len() < 30); let buf = buf.as_ref(); assert_eq!( diff --git a/packages/manganis/manganis-core/src/asset.rs b/packages/manganis/manganis-core/src/asset.rs index 92c543599a..fabae8332e 100644 --- a/packages/manganis/manganis-core/src/asset.rs +++ b/packages/manganis/manganis-core/src/asset.rs @@ -140,7 +140,7 @@ impl Asset { let byte = unsafe { std::ptr::read_volatile(ptr.add(byte)) }; bytes = bytes.push(byte); } - let read = bytes.read(); + let read = bytes.as_ref(); deserialize_const!(BundledAsset, read).expect("Failed to deserialize asset. Make sure you built with the matching version of the Dioxus CLI").1 } diff --git a/packages/manganis/manganis/src/macro_helpers.rs b/packages/manganis/manganis/src/macro_helpers.rs index 984461b031..8b7dba7981 100644 --- a/packages/manganis/manganis/src/macro_helpers.rs +++ b/packages/manganis/manganis/src/macro_helpers.rs @@ -36,7 +36,7 @@ pub const fn serialize_asset(asset: &BundledAsset) -> ConstVec { /// Deserialize a const buffer into a BundledAsset pub const fn deserialize_asset(bytes: &[u8]) -> BundledAsset { let bytes = ConstVec::new().extend(bytes); - match const_serialize::deserialize_const!(BundledAsset, bytes.read()) { + match const_serialize::deserialize_const!(BundledAsset, bytes.as_ref()) { Some((_, asset)) => asset, None => panic!("Failed to deserialize asset. This may be caused by a mismatch between your dioxus and dioxus-cli versions"), } From d673846a4377eac6138d07ca4fc43a5198b18088 Mon Sep 17 00:00:00 2001 From: Evan Almloff Date: Tue, 11 Nov 2025 13:26:59 -0600 Subject: [PATCH 08/20] move serialization and deserialization for each struct together --- packages/const-serialize/src/array.rs | 64 ++++ packages/const-serialize/src/enum.rs | 135 ++++++++ packages/const-serialize/src/list.rs | 119 +++++++ packages/const-serialize/src/primitive.rs | 118 +++++++ packages/const-serialize/src/str.rs | 373 ++++++++++++++++++++++ packages/const-serialize/src/struct.rs | 120 +++++++ 6 files changed, 929 insertions(+) create mode 100644 packages/const-serialize/src/array.rs create mode 100644 packages/const-serialize/src/enum.rs create mode 100644 packages/const-serialize/src/list.rs create mode 100644 packages/const-serialize/src/primitive.rs create mode 100644 packages/const-serialize/src/str.rs create mode 100644 packages/const-serialize/src/struct.rs diff --git a/packages/const-serialize/src/array.rs b/packages/const-serialize/src/array.rs new file mode 100644 index 0000000000..3966eaabc2 --- /dev/null +++ b/packages/const-serialize/src/array.rs @@ -0,0 +1,64 @@ +use crate::*; + +/// The layout for a constant sized array. The array layout is just a length and an item layout. +#[derive(Debug, Copy, Clone)] +pub struct ArrayLayout { + pub(crate) len: usize, + pub(crate) item_layout: &'static Layout, +} + +impl ArrayLayout { + /// Create a new list layout + pub const fn new(len: usize, item_layout: &'static Layout) -> Self { + Self { len, item_layout } + } +} + +unsafe impl SerializeConst for [T; N] { + const MEMORY_LAYOUT: Layout = Layout::Array(ArrayLayout { + len: N, + item_layout: &T::MEMORY_LAYOUT, + }); +} + +/// Serialize a constant sized array that is stored at the pointer passed in +pub(crate) const unsafe fn serialize_const_list( + ptr: *const (), + mut to: ConstVec, + layout: &ArrayLayout, +) -> ConstVec { + let len = layout.len; + let mut i = 0; + to = write_array(to, len); + while i < len { + let field = ptr.wrapping_byte_offset((i * layout.item_layout.size()) as _); + to = serialize_const_ptr(field, to, layout.item_layout); + i += 1; + } + to +} + +/// Deserialize a list type into the out buffer at the offset passed in. Returns a new version of the buffer with the data added. +pub(crate) const fn deserialize_const_list<'a>( + from: &'a [u8], + layout: &ArrayLayout, + mut out: &mut [MaybeUninit], +) -> Option<&'a [u8]> { + let item_layout = layout.item_layout; + let Ok((_, mut from)) = take_array(from) else { + return None; + }; + let mut i = 0; + while i < layout.len { + let Some(new_from) = deserialize_const_ptr(from, item_layout, out) else { + return None; + }; + let Some((_, item_out)) = out.split_at_mut_checked(item_layout.size()) else { + return None; + }; + out = item_out; + from = new_from; + i += 1; + } + Some(from) +} diff --git a/packages/const-serialize/src/enum.rs b/packages/const-serialize/src/enum.rs new file mode 100644 index 0000000000..953af21474 --- /dev/null +++ b/packages/const-serialize/src/enum.rs @@ -0,0 +1,135 @@ +use crate::*; + +/// Serialize an enum that is stored at the pointer passed in +pub(crate) const unsafe fn serialize_const_enum( + ptr: *const (), + mut to: ConstVec, + layout: &EnumLayout, +) -> ConstVec { + let byte_ptr = ptr as *const u8; + let discriminant = layout.discriminant.read(byte_ptr); + + let mut i = 0; + while i < layout.variants.len() { + // If the variant is the discriminated one, serialize it + let EnumVariant { + tag, name, data, .. + } = &layout.variants[i]; + if discriminant == *tag { + to = write_map(to, 1); + to = write_map_key(to, name); + let data_ptr = ptr.wrapping_byte_offset(layout.variants_offset as _); + to = serialize_const_struct(data_ptr, to, data); + break; + } + i += 1; + } + to +} + +/// The layout for an enum. The enum layout is just a discriminate size and a tag layout. +#[derive(Debug, Copy, Clone)] +pub struct EnumLayout { + pub(crate) size: usize, + discriminant: PrimitiveLayout, + variants_offset: usize, + variants: &'static [EnumVariant], +} + +impl EnumLayout { + /// Create a new enum layout + pub const fn new( + size: usize, + discriminant: PrimitiveLayout, + variants: &'static [EnumVariant], + ) -> Self { + let mut max_align = 1; + let mut i = 0; + while i < variants.len() { + let EnumVariant { align, .. } = &variants[i]; + if *align > max_align { + max_align = *align; + } + i += 1; + } + + let variants_offset_raw = discriminant.size; + let padding = (max_align - (variants_offset_raw % max_align)) % max_align; + let variants_offset = variants_offset_raw + padding; + + assert!(variants_offset % max_align == 0); + + Self { + size, + discriminant, + variants_offset, + variants, + } + } +} + +/// The layout for an enum variant. The enum variant layout is just a struct layout with a tag and alignment. +#[derive(Debug, Copy, Clone)] +pub struct EnumVariant { + name: &'static str, + // Note: tags may not be sequential + tag: u32, + data: StructLayout, + align: usize, +} + +impl EnumVariant { + /// Create a new enum variant layout + pub const fn new(name: &'static str, tag: u32, data: StructLayout, align: usize) -> Self { + Self { + name, + tag, + data, + align, + } + } +} + +/// Deserialize an enum type into the out buffer at the offset passed in. Returns a new version of the buffer with the data added. +pub(crate) const fn deserialize_const_enum<'a>( + from: &'a [u8], + layout: &EnumLayout, + out: &mut [MaybeUninit], +) -> Option<&'a [u8]> { + // First, deserialize the map + let Ok((map, remaining)) = take_map(from) else { + return None; + }; + + // Then get the only field which is the tag + let Ok((deserilized_name, from)) = take_str(map.bytes) else { + return None; + }; + + // Then, deserialize the variant + let mut i = 0; + let mut matched_variant = false; + while i < layout.variants.len() { + // If the variant is the discriminated one, deserialize it + let EnumVariant { + name, data, tag, .. + } = &layout.variants[i]; + if str_eq(deserilized_name, name) { + layout.discriminant.write(*tag, out); + let Some((_, out)) = out.split_at_mut_checked(layout.variants_offset) else { + return None; + }; + if deserialize_const_struct(from, data, out).is_none() { + return None; + } + matched_variant = true; + break; + } + i += 1; + } + if !matched_variant { + return None; + } + + Some(remaining) +} diff --git a/packages/const-serialize/src/list.rs b/packages/const-serialize/src/list.rs new file mode 100644 index 0000000000..f5a26b8f2a --- /dev/null +++ b/packages/const-serialize/src/list.rs @@ -0,0 +1,119 @@ +use crate::*; + +/// The layout for a dynamically sized array. The array layout is just a length and an item layout. +#[derive(Debug, Copy, Clone)] +pub struct ListLayout { + /// The size of the struct backing the array + pub(crate) size: usize, + /// The byte offset of the length field + len_offset: usize, + /// The layout of the length field + len_layout: PrimitiveLayout, + /// The byte offset of the data field + data_offset: usize, + /// The layout of the data field + data_layout: ArrayLayout, +} + +impl ListLayout { + /// Create a new list layout + pub const fn new( + size: usize, + len_offset: usize, + len_layout: PrimitiveLayout, + data_offset: usize, + data_layout: ArrayLayout, + ) -> Self { + Self { + size, + len_offset, + len_layout, + data_offset, + data_layout, + } + } +} + +/// Serialize a dynamically sized array that is stored at the pointer passed in +pub(crate) const unsafe fn serialize_const_array( + ptr: *const (), + mut to: ConstVec, + layout: &ListLayout, +) -> ConstVec { + // Read the length of the array + let len_ptr = ptr.wrapping_byte_offset(layout.len_offset as _); + let len = layout.len_layout.read(len_ptr as *const u8) as usize; + + let data_ptr = ptr.wrapping_byte_offset(layout.data_offset as _); + let item_layout = layout.data_layout.item_layout; + // If the item size is 1, deserialize as bytes directly + if item_layout.size() == 1 { + let slice = std::slice::from_raw_parts(data_ptr as *const u8, len); + to = write_bytes(to, slice); + } + // Otherwise, deserialize as a list of items + else { + let mut i = 0; + to = write_array(to, len); + while i < len { + let item = data_ptr.wrapping_byte_offset((i * item_layout.size()) as _); + to = serialize_const_ptr(item, to, item_layout); + i += 1; + } + } + to +} + +/// Deserialize a array type into the out buffer at the offset passed in. Returns a new version of the buffer with the data added. +pub(crate) const fn deserialize_const_array<'a>( + from: &'a [u8], + layout: &ListLayout, + out: &mut [MaybeUninit], +) -> Option<&'a [u8]> { + let Some((_, len_out)) = out.split_at_mut_checked(layout.len_offset) else { + return None; + }; + + // If the list items are only one byte, serialize as bytes directly + let item_layout = layout.data_layout.item_layout; + if item_layout.size() == 1 { + let Ok((bytes, new_from)) = take_bytes(from) else { + return None; + }; + // Write out the length of the array + layout.len_layout.write(bytes.len() as u32, len_out); + let Some((_, data_out)) = out.split_at_mut_checked(layout.data_offset) else { + return None; + }; + let mut offset = 0; + while offset < bytes.len() { + data_out[offset].write(bytes[offset]); + offset += 1; + } + Some(new_from) + } + // Otherwise, serialize as an array of objects + else { + let Ok((len, mut from)) = take_array(from) else { + return None; + }; + // Write out the length of the array + layout.len_layout.write(len as u32, len_out); + let Some((_, mut data_out)) = out.split_at_mut_checked(layout.data_offset) else { + return None; + }; + let mut i = 0; + while i < len { + let Some(new_from) = deserialize_const_ptr(from, item_layout, data_out) else { + return None; + }; + let Some((_, item_out)) = data_out.split_at_mut_checked(item_layout.size()) else { + return None; + }; + data_out = item_out; + from = new_from; + i += 1; + } + Some(from) + } +} diff --git a/packages/const-serialize/src/primitive.rs b/packages/const-serialize/src/primitive.rs new file mode 100644 index 0000000000..5f0a6447b8 --- /dev/null +++ b/packages/const-serialize/src/primitive.rs @@ -0,0 +1,118 @@ +use crate::*; +use std::mem::MaybeUninit; + +/// The layout for a primitive type. The bytes will be reversed if the target is big endian. +#[derive(Debug, Copy, Clone)] +pub struct PrimitiveLayout { + pub(crate) size: usize, +} + +impl PrimitiveLayout { + /// Create a new primitive layout + pub const fn new(size: usize) -> Self { + Self { size } + } + + /// Read the value from the given pointer + pub const unsafe fn read(self, byte_ptr: *const u8) -> u32 { + let mut value = 0; + let mut offset = 0; + while offset < self.size { + // If the bytes are reversed, walk backwards from the end of the number when pushing bytes + let byte = if cfg!(target_endian = "big") { + unsafe { + byte_ptr + .wrapping_byte_add((self.size - offset - 1) as _) + .read() + } + } else { + unsafe { byte_ptr.wrapping_byte_add(offset as _).read() } + }; + value |= (byte as u32) << (offset * 8); + offset += 1; + } + value + } + + /// Write the value to the given buffer + pub const fn write(self, value: u32, out: &mut [MaybeUninit]) { + let bytes = value.to_ne_bytes(); + let mut offset = 0; + while offset < self.size { + out[offset] = MaybeUninit::new(bytes[offset]); + offset += 1; + } + } +} + +macro_rules! impl_serialize_const { + ($type:ty) => { + unsafe impl SerializeConst for $type { + const MEMORY_LAYOUT: Layout = Layout::Primitive(PrimitiveLayout { + size: std::mem::size_of::<$type>(), + }); + } + }; +} + +impl_serialize_const!(u8); +impl_serialize_const!(u16); +impl_serialize_const!(u32); +impl_serialize_const!(u64); +impl_serialize_const!(i8); +impl_serialize_const!(i16); +impl_serialize_const!(i32); +impl_serialize_const!(i64); +impl_serialize_const!(bool); +impl_serialize_const!(f32); +impl_serialize_const!(f64); + +/// Serialize a primitive type that is stored at the pointer passed in +pub(crate) const unsafe fn serialize_const_primitive( + ptr: *const (), + to: ConstVec, + layout: &PrimitiveLayout, +) -> ConstVec { + let ptr = ptr as *const u8; + let mut offset = 0; + let mut i64_bytes = [0u8; 8]; + while offset < layout.size { + // If the bytes are reversed, walk backwards from the end of the number when pushing bytes + let byte = unsafe { + if cfg!(any(target_endian = "big", feature = "test-big-endian")) { + ptr.wrapping_byte_offset((layout.size - offset - 1) as _) + .read() + } else { + ptr.wrapping_byte_offset(offset as _).read() + } + }; + i64_bytes[offset] = byte; + offset += 1; + } + let number = i64::from_ne_bytes(i64_bytes); + write_number(to, number) +} + +/// Deserialize a primitive type into the out buffer at the offset passed in. Returns a new version of the buffer with the data added. +pub(crate) const fn deserialize_const_primitive<'a>( + from: &'a [u8], + layout: &PrimitiveLayout, + out: &mut [MaybeUninit], +) -> Option<&'a [u8]> { + let mut offset = 0; + let Ok((number, from)) = take_number(from) else { + return None; + }; + let bytes = number.to_le_bytes(); + while offset < layout.size { + // If the bytes are reversed, walk backwards from the end of the number when filling in bytes + let byte = bytes[offset]; + if cfg!(any(target_endian = "big", feature = "test-big-endian")) { + out[layout.size - offset - 1] = MaybeUninit::new(byte); + } else { + out[offset] = MaybeUninit::new(byte); + } + offset += 1; + } + Some(from) +} diff --git a/packages/const-serialize/src/str.rs b/packages/const-serialize/src/str.rs new file mode 100644 index 0000000000..3d553eba85 --- /dev/null +++ b/packages/const-serialize/src/str.rs @@ -0,0 +1,373 @@ +use crate::*; +use std::{char, hash::Hash, mem::MaybeUninit}; + +const MAX_STR_SIZE: usize = 256; + +/// A string that is stored in a constant sized buffer that can be serialized and deserialized at compile time +#[derive(Clone, Copy, Debug)] +pub struct ConstStr { + pub(crate) bytes: [MaybeUninit; MAX_STR_SIZE], + pub(crate) len: u32, +} + +#[cfg(feature = "serde")] +mod serde_bytes { + use serde::{Deserialize, Serialize, Serializer}; + + use crate::ConstStr; + + impl Serialize for ConstStr { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + serializer.serialize_str(self.as_str()) + } + } + + impl<'de> Deserialize<'de> for ConstStr { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + let s = String::deserialize(deserializer)?; + Ok(ConstStr::new(&s)) + } + } +} + +unsafe impl SerializeConst for ConstStr { + const MEMORY_LAYOUT: Layout = Layout::List(ListLayout::new( + std::mem::size_of::(), + std::mem::offset_of!(Self, len), + PrimitiveLayout { + size: std::mem::size_of::(), + }, + std::mem::offset_of!(Self, bytes), + ArrayLayout { + len: MAX_STR_SIZE, + item_layout: &Layout::Primitive(PrimitiveLayout { + size: std::mem::size_of::(), + }), + }, + )); +} + +impl ConstStr { + /// Create a new constant string + pub const fn new(s: &str) -> Self { + let str_bytes = s.as_bytes(); + let mut bytes = [MaybeUninit::uninit(); MAX_STR_SIZE]; + let mut i = 0; + while i < str_bytes.len() { + bytes[i].write(str_bytes[i]); + i += 1; + } + Self { + bytes, + len: str_bytes.len() as u32, + } + } + + /// Get a reference to the string + pub const fn as_str(&self) -> &str { + let str_bytes = unsafe { + &*(self.bytes.split_at(self.len as usize).0 as *const [MaybeUninit] + as *const [u8]) + }; + match std::str::from_utf8(str_bytes) { + Ok(s) => s, + Err(_) => panic!( + "Invalid utf8; ConstStr should only ever be constructed from valid utf8 strings" + ), + } + } + + /// Get the length of the string + pub const fn len(&self) -> usize { + self.len as usize + } + + /// Check if the string is empty + pub const fn is_empty(&self) -> bool { + self.len == 0 + } + + /// Push a character onto the string + pub const fn push(self, byte: char) -> Self { + assert!(byte.is_ascii(), "Only ASCII bytes are supported"); + let (bytes, len) = char_to_bytes(byte); + let (str, _) = bytes.split_at(len); + let Ok(str) = std::str::from_utf8(str) else { + panic!("Invalid utf8; char_to_bytes should always return valid utf8 bytes") + }; + self.push_str(str) + } + + /// Push a str onto the string + pub const fn push_str(self, str: &str) -> Self { + let Self { mut bytes, len } = self; + assert!( + str.len() + len as usize <= MAX_STR_SIZE, + "String is too long" + ); + let str_bytes = str.as_bytes(); + let new_len = len as usize + str_bytes.len(); + let mut i = 0; + while i < str_bytes.len() { + bytes[len as usize + i].write(str_bytes[i]); + i += 1; + } + Self { + bytes, + len: new_len as u32, + } + } + + /// Split the string at a byte index. The byte index must be a char boundary + pub const fn split_at(self, index: usize) -> (Self, Self) { + let (left, right) = self.as_str().split_at(index); + (Self::new(left), Self::new(right)) + } + + /// Split the string at the last occurrence of a character + pub const fn rsplit_once(&self, char: char) -> Option<(Self, Self)> { + let str = self.as_str(); + let mut index = str.len() - 1; + // First find the bytes we are searching for + let (char_bytes, len) = char_to_bytes(char); + let (char_bytes, _) = char_bytes.split_at(len); + let bytes = str.as_bytes(); + + // Then walk backwards from the end of the string + loop { + let byte = bytes[index]; + // Look for char boundaries in the string and check if the bytes match + if let Some(char_boundary_len) = utf8_char_boundary_to_char_len(byte) { + // Split up the string into three sections: [before_char, in_char, after_char] + let (before_char, after_index) = bytes.split_at(index); + let (in_char, after_char) = after_index.split_at(char_boundary_len as usize); + if in_char.len() != char_boundary_len as usize { + panic!("in_char.len() should always be equal to char_boundary_len as usize") + } + // Check if the bytes for the current char and the target char match + let mut in_char_eq = true; + let mut i = 0; + let min_len = if in_char.len() < char_bytes.len() { + in_char.len() + } else { + char_bytes.len() + }; + while i < min_len { + in_char_eq &= in_char[i] == char_bytes[i]; + i += 1; + } + // If they do, convert the bytes to strings and return the split strings + if in_char_eq { + let Ok(before_char_str) = std::str::from_utf8(before_char) else { + panic!("Invalid utf8; utf8_char_boundary_to_char_len should only return Some when the byte is a character boundary") + }; + let Ok(after_char_str) = std::str::from_utf8(after_char) else { + panic!("Invalid utf8; utf8_char_boundary_to_char_len should only return Some when the byte is a character boundary") + }; + return Some((Self::new(before_char_str), Self::new(after_char_str))); + } + } + match index.checked_sub(1) { + Some(new_index) => index = new_index, + None => return None, + } + } + } + + /// Split the string at the first occurrence of a character + pub const fn split_once(&self, char: char) -> Option<(Self, Self)> { + let str = self.as_str(); + let mut index = 0; + // First find the bytes we are searching for + let (char_bytes, len) = char_to_bytes(char); + let (char_bytes, _) = char_bytes.split_at(len); + let bytes = str.as_bytes(); + + // Then walk forwards from the start of the string + while index < bytes.len() { + let byte = bytes[index]; + // Look for char boundaries in the string and check if the bytes match + if let Some(char_boundary_len) = utf8_char_boundary_to_char_len(byte) { + // Split up the string into three sections: [before_char, in_char, after_char] + let (before_char, after_index) = bytes.split_at(index); + let (in_char, after_char) = after_index.split_at(char_boundary_len as usize); + if in_char.len() != char_boundary_len as usize { + panic!("in_char.len() should always be equal to char_boundary_len as usize") + } + // Check if the bytes for the current char and the target char match + let mut in_char_eq = true; + let mut i = 0; + let min_len = if in_char.len() < char_bytes.len() { + in_char.len() + } else { + char_bytes.len() + }; + while i < min_len { + in_char_eq &= in_char[i] == char_bytes[i]; + i += 1; + } + // If they do, convert the bytes to strings and return the split strings + if in_char_eq { + let Ok(before_char_str) = std::str::from_utf8(before_char) else { + panic!("Invalid utf8; utf8_char_boundary_to_char_len should only return Some when the byte is a character boundary") + }; + let Ok(after_char_str) = std::str::from_utf8(after_char) else { + panic!("Invalid utf8; utf8_char_boundary_to_char_len should only return Some when the byte is a character boundary") + }; + return Some((Self::new(before_char_str), Self::new(after_char_str))); + } + } + index += 1 + } + None + } +} + +impl PartialEq for ConstStr { + fn eq(&self, other: &Self) -> bool { + self.as_str() == other.as_str() + } +} + +impl Eq for ConstStr {} + +impl PartialOrd for ConstStr { + fn partial_cmp(&self, other: &Self) -> Option { + self.as_str().partial_cmp(other.as_str()) + } +} + +impl Ord for ConstStr { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + self.as_str().cmp(other.as_str()) + } +} + +impl Hash for ConstStr { + fn hash(&self, state: &mut H) { + self.as_str().hash(state); + } +} + +#[test] +fn test_rsplit_once() { + let str = ConstStr::new("hello world"); + assert_eq!( + str.rsplit_once(' '), + Some((ConstStr::new("hello"), ConstStr::new("world"))) + ); + + let unicode_str = ConstStr::new("hi😀hello😀world😀world"); + assert_eq!( + unicode_str.rsplit_once('😀'), + Some((ConstStr::new("hi😀hello😀world"), ConstStr::new("world"))) + ); + assert_eq!(unicode_str.rsplit_once('❌'), None); + + for _ in 0..100 { + let random_str: String = (0..rand::random::() % 50) + .map(|_| rand::random::()) + .collect(); + let konst = ConstStr::new(&random_str); + let mut seen_chars = std::collections::HashSet::new(); + for char in random_str.chars().rev() { + let (char_bytes, len) = char_to_bytes(char); + let char_bytes = &char_bytes[..len]; + assert_eq!(char_bytes, char.to_string().as_bytes()); + if seen_chars.contains(&char) { + continue; + } + seen_chars.insert(char); + let (correct_left, correct_right) = random_str.rsplit_once(char).unwrap(); + let (left, right) = konst.rsplit_once(char).unwrap(); + println!("splitting {random_str:?} at {char:?}"); + assert_eq!(left.as_str(), correct_left); + assert_eq!(right.as_str(), correct_right); + } + } +} + +const CONTINUED_CHAR_MASK: u8 = 0b10000000; +const BYTE_CHAR_BOUNDARIES: [u8; 4] = [0b00000000, 0b11000000, 0b11100000, 0b11110000]; + +// Const version of https://doc.rust-lang.org/src/core/char/methods.rs.html#1765-1797 +const fn char_to_bytes(char: char) -> ([u8; 4], usize) { + let code = char as u32; + let len = char.len_utf8(); + let mut bytes = [0; 4]; + match len { + 1 => { + bytes[0] = code as u8; + } + 2 => { + bytes[0] = ((code >> 6) & 0x1F) as u8 | BYTE_CHAR_BOUNDARIES[1]; + bytes[1] = (code & 0x3F) as u8 | CONTINUED_CHAR_MASK; + } + 3 => { + bytes[0] = ((code >> 12) & 0x0F) as u8 | BYTE_CHAR_BOUNDARIES[2]; + bytes[1] = ((code >> 6) & 0x3F) as u8 | CONTINUED_CHAR_MASK; + bytes[2] = (code & 0x3F) as u8 | CONTINUED_CHAR_MASK; + } + 4 => { + bytes[0] = ((code >> 18) & 0x07) as u8 | BYTE_CHAR_BOUNDARIES[3]; + bytes[1] = ((code >> 12) & 0x3F) as u8 | CONTINUED_CHAR_MASK; + bytes[2] = ((code >> 6) & 0x3F) as u8 | CONTINUED_CHAR_MASK; + bytes[3] = (code & 0x3F) as u8 | CONTINUED_CHAR_MASK; + } + _ => panic!( + "encode_utf8: need more than 4 bytes to encode the unicode character, but the buffer has 4 bytes" + ), + }; + (bytes, len) +} + +#[test] +fn fuzz_char_to_bytes() { + use std::char; + for _ in 0..100 { + let char = rand::random::(); + let (bytes, len) = char_to_bytes(char); + let str = std::str::from_utf8(&bytes[..len]).unwrap(); + assert_eq!(char.to_string(), str); + } +} + +const fn utf8_char_boundary_to_char_len(byte: u8) -> Option { + match byte { + 0b00000000..=0b01111111 => Some(1), + 0b11000000..=0b11011111 => Some(2), + 0b11100000..=0b11101111 => Some(3), + 0b11110000..=0b11111111 => Some(4), + _ => None, + } +} + +#[test] +fn fuzz_utf8_byte_to_char_len() { + for _ in 0..100 { + let random_string: String = (0..rand::random::()) + .map(|_| rand::random::()) + .collect(); + let bytes = random_string.as_bytes(); + let chars: std::collections::HashMap<_, _> = random_string.char_indices().collect(); + for (i, byte) in bytes.iter().enumerate() { + match utf8_char_boundary_to_char_len(*byte) { + Some(char_len) => { + let char = chars + .get(&i) + .unwrap_or_else(|| panic!("{byte:b} is not a character boundary")); + assert_eq!(char.len_utf8(), char_len as usize); + } + None => { + assert!(!chars.contains_key(&i), "{byte:b} is a character boundary"); + } + } + } + } +} diff --git a/packages/const-serialize/src/struct.rs b/packages/const-serialize/src/struct.rs new file mode 100644 index 0000000000..a2db822b6a --- /dev/null +++ b/packages/const-serialize/src/struct.rs @@ -0,0 +1,120 @@ +use crate::*; + +/// Plain old data for a field. Stores the offset of the field in the struct and the layout of the field. +#[derive(Debug, Copy, Clone)] +pub struct StructFieldLayout { + name: &'static str, + offset: usize, + layout: Layout, +} + +impl StructFieldLayout { + /// Create a new struct field layout + pub const fn new(name: &'static str, offset: usize, layout: Layout) -> Self { + Self { + name, + offset, + layout, + } + } +} + +/// Layout for a struct. The struct layout is just a list of fields with offsets +#[derive(Debug, Copy, Clone)] +pub struct StructLayout { + pub(crate) size: usize, + pub(crate) data: &'static [StructFieldLayout], +} + +impl StructLayout { + /// Create a new struct layout + pub const fn new(size: usize, data: &'static [StructFieldLayout]) -> Self { + Self { size, data } + } +} + +/// Serialize a struct that is stored at the pointer passed in +pub(crate) const unsafe fn serialize_const_struct( + ptr: *const (), + to: ConstVec, + layout: &StructLayout, +) -> ConstVec { + let mut i = 0; + let field_count = layout.data.len(); + let mut to = write_map(to, field_count); + while i < field_count { + // Serialize the field at the offset pointer in the struct + let StructFieldLayout { + name, + offset, + layout, + } = &layout.data[i]; + to = write_map_key(to, name); + let field = ptr.wrapping_byte_add(*offset as _); + to = serialize_const_ptr(field, to, layout); + i += 1; + } + to +} + +/// Deserialize a struct type into the out buffer at the offset passed in. Returns a new version of the buffer with the data added. +pub(crate) const fn deserialize_const_struct<'a>( + from: &'a [u8], + layout: &StructLayout, + out: &mut [MaybeUninit], +) -> Option<&'a [u8]> { + let Ok((map, from)) = take_map(from) else { + return None; + }; + let mut i = 0; + while i < layout.data.len() { + // Deserialize the field at the offset pointer in the struct + let StructFieldLayout { + name, + offset, + layout, + } = &layout.data[i]; + let Ok(Some(from)) = map.find(name) else { + return None; + }; + let Some((_, field_bytes)) = out.split_at_mut_checked(*offset) else { + return None; + }; + if deserialize_const_ptr(from, layout, field_bytes).is_none() { + return None; + } + i += 1; + } + Some(from) +} + +macro_rules! impl_serialize_const_tuple { + ($($generic:ident: $generic_number:expr),*) => { + impl_serialize_const_tuple!(@impl ($($generic,)*) = $($generic: $generic_number),*); + }; + (@impl $inner:ty = $($generic:ident: $generic_number:expr),*) => { + unsafe impl<$($generic: SerializeConst),*> SerializeConst for ($($generic,)*) { + const MEMORY_LAYOUT: Layout = { + Layout::Struct(StructLayout { + size: std::mem::size_of::<($($generic,)*)>(), + data: &[ + $( + StructFieldLayout::new(stringify!($generic_number), std::mem::offset_of!($inner, $generic_number), $generic::MEMORY_LAYOUT), + )* + ], + }) + }; + } + }; +} + +impl_serialize_const_tuple!(T1: 0); +impl_serialize_const_tuple!(T1: 0, T2: 1); +impl_serialize_const_tuple!(T1: 0, T2: 1, T3: 2); +impl_serialize_const_tuple!(T1: 0, T2: 1, T3: 2, T4: 3); +impl_serialize_const_tuple!(T1: 0, T2: 1, T3: 2, T4: 3, T5: 4); +impl_serialize_const_tuple!(T1: 0, T2: 1, T3: 2, T4: 3, T5: 4, T6: 5); +impl_serialize_const_tuple!(T1: 0, T2: 1, T3: 2, T4: 3, T5: 4, T6: 5, T7: 6); +impl_serialize_const_tuple!(T1: 0, T2: 1, T3: 2, T4: 3, T5: 4, T6: 5, T7: 6, T8: 7); +impl_serialize_const_tuple!(T1: 0, T2: 1, T3: 2, T4: 3, T5: 4, T6: 5, T7: 6, T8: 7, T9: 8); +impl_serialize_const_tuple!(T1: 0, T2: 1, T3: 2, T4: 3, T5: 4, T6: 5, T7: 6, T8: 7, T9: 8, T10: 9); From 9d3e6b8121a28e8be2ef3e8fb01aa81dbca09e88 Mon Sep 17 00:00:00 2001 From: Evan Almloff Date: Wed, 12 Nov 2025 09:44:06 -0600 Subject: [PATCH 09/20] support both legacy and new assets --- Cargo.lock | 55 +++- Cargo.toml | 8 +- packages/cli/Cargo.toml | 2 + packages/cli/src/build/assets.rs | 249 +++++++++++++++--- packages/const-serialize-macro/src/lib.rs | 70 +++-- .../manganis/manganis-core/src/options.rs | 2 +- .../manganis/manganis-macro/src/linker.rs | 2 +- 7 files changed, 309 insertions(+), 79 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c801749faa..38b78d7939 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4177,12 +4177,22 @@ dependencies = [ name = "const-serialize" version = "0.7.0" dependencies = [ - "const-serialize", - "const-serialize-macro", + "const-serialize 0.7.0", + "const-serialize-macro 0.7.0", "rand 0.9.2", "serde", ] +[[package]] +name = "const-serialize" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd339aa356cc6452308fad2ee56623f900a8e68bc0ab9360a0ddb8270e5640c8" +dependencies = [ + "const-serialize-macro 0.7.1", + "serde", +] + [[package]] name = "const-serialize-macro" version = "0.7.0" @@ -4192,6 +4202,17 @@ dependencies = [ "syn 2.0.108", ] +[[package]] +name = "const-serialize-macro" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "797d158acb331e2a89d696343a27cd39bf7e36aaef33ba4799a5ef1526e24861" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.108", +] + [[package]] name = "const-str" version = "0.3.2" @@ -5359,7 +5380,8 @@ dependencies = [ "clap", "console 0.16.1", "console-subscriber", - "const-serialize", + "const-serialize 0.7.0", + "const-serialize 0.7.1", "convert_case 0.8.0", "crossterm 0.29.0", "ctrlc", @@ -5403,7 +5425,8 @@ dependencies = [ "local-ip-address", "log", "manganis", - "manganis-core", + "manganis-core 0.7.0", + "manganis-core 0.7.1", "memmap", "memoize", "notify", @@ -5475,13 +5498,13 @@ dependencies = [ "browserslist-rs 0.19.0", "built 0.8.0", "codemap", - "const-serialize", + "const-serialize 0.7.0", "grass", "image", "imagequant", "lightningcss", "manganis", - "manganis-core", + "manganis-core 0.7.0", "mozjpeg", "object 0.37.3", "png", @@ -5720,7 +5743,7 @@ name = "dioxus-dx-wire-format" version = "0.7.0" dependencies = [ "cargo_metadata", - "manganis-core", + "manganis-core 0.7.0", "serde", "serde_json", "subsecond-types", @@ -10748,8 +10771,8 @@ dependencies = [ name = "manganis" version = "0.7.0" dependencies = [ - "const-serialize", - "manganis-core", + "const-serialize 0.7.0", + "manganis-core 0.7.0", "manganis-macro", ] @@ -10757,7 +10780,7 @@ dependencies = [ name = "manganis-core" version = "0.7.0" dependencies = [ - "const-serialize", + "const-serialize 0.7.0", "dioxus", "dioxus-cli-config", "dioxus-core-types", @@ -10765,6 +10788,16 @@ dependencies = [ "serde", ] +[[package]] +name = "manganis-core" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41fbd1fb8c5aabcc54c6b02dbc968e1c89c28f3e543f2789ef9e3ce45dbdf5df" +dependencies = [ + "const-serialize 0.7.1", + "serde", +] + [[package]] name = "manganis-macro" version = "0.7.0" @@ -10772,7 +10805,7 @@ dependencies = [ "dunce", "macro-string", "manganis", - "manganis-core", + "manganis-core 0.7.0", "proc-macro2", "quote", "syn 2.0.108", diff --git a/Cargo.toml b/Cargo.toml index aefefaccd5..daab6df754 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -193,10 +193,13 @@ dioxus-cli-opt = { path = "packages/cli-opt", version = "0.7.0" } dioxus-cli-telemetry = { path = "packages/cli-telemetry", version = "0.7.0" } dioxus-cli-config = { path = "packages/cli-config", version = "0.7.0" } -# const-serializea +# const-serialize const-serialize = { path = "packages/const-serialize", version = "0.7.0" } const-serialize-macro = { path = "packages/const-serialize-macro", version = "0.7.0" } +# The version of const-serialize published with 0.7.0 and 0.7.1 that the CLI should still support +const-serialize-07 = { package = "const-serialize", version = "0.7.1" } + # subsecond subsecond-types = { path = "packages/subsecond/subsecond-types", version = "0.7.0" } subsecond = { path = "packages/subsecond/subsecond", version = "0.7.0" } @@ -206,6 +209,9 @@ manganis = { path = "packages/manganis/manganis", version = "0.7.0" } manganis-core = { path = "packages/manganis/manganis-core", version = "0.7.0" } manganis-macro = { path = "packages/manganis/manganis-macro", version = "0.7.0" } +# The version of assets published with 0.7.0 and 0.7.1 that the CLI should still support +manganis-core-07 = { package = "manganis-core", version = "0.7.1" } + # wasm-split wasm-splitter = { path = "packages/wasm-split/wasm-split", version = "0.7.0" } wasm-split-macro = { path = "packages/wasm-split/wasm-split-macro", version = "0.7.0" } diff --git a/packages/cli/Cargo.toml b/packages/cli/Cargo.toml index ddbe943d19..cc4b1ff10c 100644 --- a/packages/cli/Cargo.toml +++ b/packages/cli/Cargo.toml @@ -98,6 +98,7 @@ brotli = "8.0.1" ignore = "0.4.23" env_logger = { workspace = true } const-serialize = { workspace = true, features = ["serde"] } +const-serialize-07 = { workspace = true, features = ["serde"] } tracing-subscriber = { version = "0.3.19", features = [ "std", @@ -122,6 +123,7 @@ log = { version = "0.4", features = ["max_level_off", "release_max_level_off"] } tempfile = "3.19.1" manganis = { workspace = true } manganis-core = { workspace = true } +manganis-core-07 = { workspace = true } target-lexicon = { version = "0.13.2", features = ["serde", "serde_support"] } wasm-encoder = "0.235.0" diff --git a/packages/cli/src/build/assets.rs b/packages/cli/src/build/assets.rs index 4e03320338..bbd24af35e 100644 --- a/packages/cli/src/build/assets.rs +++ b/packages/cli/src/build/assets.rs @@ -11,10 +11,10 @@ //! process in the build system. //! //! We use the same lessons learned from the hot-patching engine which parses the binary file and its -//! symbol table to find symbols that match the `__MANGANIS__` prefix. These symbols are ideally data +//! symbol table to find symbols that match the `__ASSETS__` prefix. These symbols are ideally data //! symbols and contain the BundledAsset data type which implements ConstSerialize and ConstDeserialize. //! -//! When the binary is built, the `dioxus asset!()` macro will emit its metadata into the __MANGANIS__ +//! When the binary is built, the `dioxus asset!()` macro will emit its metadata into the __ASSETS__ //! symbols, which we process here. After reading the metadata directly from the executable, we then //! hash it and write the hash directly into the binary file. //! @@ -23,7 +23,7 @@ //! can be found relative to the current exe. Unfortunately, on android, the `current_exe` path is wrong, //! so the assets are resolved against the "asset root" - which is covered by the asset loader crate. //! -//! Finding the __MANGANIS__ symbols is not quite straightforward when hotpatching, especially on WASM +//! Finding the __ASSETS__ symbols is not quite straightforward when hotpatching, especially on WASM //! since we build and link the module as relocatable, which is not a stable WASM proposal. In this //! implementation, we handle both the non-PIE *and* PIC cases which are rather bespoke to our whole //! build system. @@ -35,9 +35,9 @@ use std::{ use crate::Result; use anyhow::{bail, Context}; -use const_serialize::{ConstVec, SerializeConst}; +use const_serialize::{serialize_const, ConstVec, SerializeConst}; use dioxus_cli_opt::AssetManifest; -use manganis::BundledAsset; +use manganis::{AssetOptions, AssetVariant, BundledAsset, ImageFormat, ImageSize}; use object::{File, Object, ObjectSection, ObjectSymbol, ReadCache, ReadRef, Section, Symbol}; use pdb::FallibleIterator; use rayon::iter::{IntoParallelRefMutIterator, ParallelIterator}; @@ -45,24 +45,191 @@ use rayon::iter::{IntoParallelRefMutIterator, ParallelIterator}; /// Extract all manganis symbols and their sections from the given object file. fn manganis_symbols<'a, 'b, R: ReadRef<'a>>( file: &'b File<'a, R>, -) -> impl Iterator, Section<'a, 'b, R>)> + 'b { - file.symbols() - .filter(|symbol| { - if let Ok(name) = symbol.name() { - looks_like_manganis_symbol(name) - } else { - false +) -> impl Iterator, Section<'a, 'b, R>)> + 'b { + file.symbols().filter_map(move |symbol| { + let name = symbol.name().ok()?; + let version = looks_like_manganis_symbol(name)?; + let section_index = symbol.section_index()?; + let section = file.section_by_index(section_index).ok()?; + Some((version, symbol, section)) + }) +} + +#[derive(Copy, Clone)] +enum ManganisVersion { + /// The legacy version of the manganis format published with 0.7.0 and 0.7.1 + Legacy, + /// The new version of the manganis format 0.7.2 onward + New, +} + +impl ManganisVersion { + fn size(&self) -> usize { + match self { + ManganisVersion::Legacy => { + ::MEMORY_LAYOUT.size() + } + ManganisVersion::New => BundledAsset::MEMORY_LAYOUT.size(), + } + } + + fn deserialize(&self, data: &[u8]) -> Option { + match self { + ManganisVersion::Legacy => { + let buffer = const_serialize_07::ConstReadBuffer::new(data); + + let (_, legacy_asset) = + const_serialize_07::deserialize_const!(manganis_core_07::BundledAsset, buffer)?; + + Some(legacy_asset_to_modern_asset(&legacy_asset)) + } + ManganisVersion::New => { + let (_, asset) = + const_serialize::deserialize_const!(manganis_core::BundledAsset, data)?; + + Some(asset) + } + } + } + + fn serialize(&self, asset: &BundledAsset) -> Vec { + match self { + ManganisVersion::Legacy => { + let legacy_asset = modern_asset_to_legacy_asset(asset); + let buffer = const_serialize_07::serialize_const( + &legacy_asset, + const_serialize_07::ConstVec::new(), + ); + buffer.as_ref().to_vec() + } + ManganisVersion::New => { + let buffer = serialize_const(asset, ConstVec::new()); + buffer.as_ref().to_vec() } - }) - .filter_map(move |symbol| { - let section_index = symbol.section_index()?; - let section = file.section_by_index(section_index).ok()?; - Some((symbol, section)) - }) + } + } } -fn looks_like_manganis_symbol(name: &str) -> bool { - name.contains("__MANGANIS__") +fn legacy_asset_to_modern_asset( + legacy_asset: &manganis_core_07::BundledAsset, +) -> manganis_core::BundledAsset { + let bundled_path = legacy_asset.bundled_path(); + let absolute_path = legacy_asset.absolute_source_path(); + let legacy_options = legacy_asset.options(); + let add_hash = legacy_options.hash_suffix(); + let options = match legacy_options.variant() { + manganis_core_07::AssetVariant::Image(image) => { + let format = match image.format() { + manganis_core_07::ImageFormat::Png => ImageFormat::Png, + manganis_core_07::ImageFormat::Jpg => ImageFormat::Jpg, + manganis_core_07::ImageFormat::Webp => ImageFormat::Webp, + manganis_core_07::ImageFormat::Avif => ImageFormat::Avif, + manganis_core_07::ImageFormat::Unknown => ImageFormat::Unknown, + }; + let size = match image.size() { + manganis_core_07::ImageSize::Automatic => ImageSize::Automatic, + manganis_core_07::ImageSize::Manual { width, height } => { + ImageSize::Manual { width, height } + } + }; + let preload = image.preloaded(); + + AssetOptions::image() + .with_format(format) + .with_size(size) + .with_preload(preload) + .with_hash_suffix(add_hash) + .into_asset_options() + } + manganis_core_07::AssetVariant::Folder(_) => AssetOptions::folder() + .with_hash_suffix(add_hash) + .into_asset_options(), + manganis_core_07::AssetVariant::Css(css) => AssetOptions::css() + .with_hash_suffix(add_hash) + .with_minify(css.minified()) + .with_preload(css.preloaded()) + .with_static_head(css.static_head()) + .into_asset_options(), + manganis_core_07::AssetVariant::CssModule(css_module) => AssetOptions::css_module() + .with_hash_suffix(add_hash) + .with_minify(css_module.minified()) + .with_preload(css_module.preloaded()) + .into_asset_options(), + manganis_core_07::AssetVariant::Js(js) => AssetOptions::js() + .with_hash_suffix(add_hash) + .with_minify(js.minified()) + .with_preload(js.preloaded()) + .with_static_head(js.static_head()) + .into_asset_options(), + _ => AssetOptions::builder().into_asset_options(), + }; + + BundledAsset::new(bundled_path, absolute_path, options) +} + +fn modern_asset_to_legacy_asset(modern_asset: &BundledAsset) -> manganis_core_07::BundledAsset { + let bundled_path = modern_asset.bundled_path(); + let absolute_path = modern_asset.absolute_source_path(); + let legacy_options = modern_asset.options(); + let add_hash = legacy_options.hash_suffix(); + let options = match legacy_options.variant() { + AssetVariant::Image(image) => { + let format = match image.format() { + ImageFormat::Png => manganis_core_07::ImageFormat::Png, + ImageFormat::Jpg => manganis_core_07::ImageFormat::Jpg, + ImageFormat::Webp => manganis_core_07::ImageFormat::Webp, + ImageFormat::Avif => manganis_core_07::ImageFormat::Avif, + ImageFormat::Unknown => manganis_core_07::ImageFormat::Unknown, + }; + let size = match image.size() { + ImageSize::Automatic => manganis_core_07::ImageSize::Automatic, + ImageSize::Manual { width, height } => { + manganis_core_07::ImageSize::Manual { width, height } + } + }; + let preload = image.preloaded(); + + manganis_core_07::AssetOptions::image() + .with_format(format) + .with_size(size) + .with_preload(preload) + .with_hash_suffix(add_hash) + .into_asset_options() + } + AssetVariant::Folder(_) => manganis_core_07::AssetOptions::folder() + .with_hash_suffix(add_hash) + .into_asset_options(), + AssetVariant::Css(css) => manganis_core_07::AssetOptions::css() + .with_hash_suffix(add_hash) + .with_minify(css.minified()) + .with_preload(css.preloaded()) + .with_static_head(css.static_head()) + .into_asset_options(), + AssetVariant::CssModule(css_module) => manganis_core_07::AssetOptions::css_module() + .with_hash_suffix(add_hash) + .with_minify(css_module.minified()) + .with_preload(css_module.preloaded()) + .into_asset_options(), + AssetVariant::Js(js) => manganis_core_07::AssetOptions::js() + .with_hash_suffix(add_hash) + .with_minify(js.minified()) + .with_preload(js.preloaded()) + .with_static_head(js.static_head()) + .into_asset_options(), + _ => manganis_core_07::AssetOptions::builder().into_asset_options(), + }; + + manganis_core_07::BundledAsset::new(bundled_path, absolute_path, options) +} + +fn looks_like_manganis_symbol(name: &str) -> Option { + if name.contains("__MANGANIS__") { + Some(ManganisVersion::Legacy) + } else if name.contains("__ASSETS__") { + Some(ManganisVersion::New) + } else { + None + } } /// Find the offsets of any manganis symbols in the given file. @@ -70,7 +237,7 @@ fn find_symbol_offsets<'a, R: ReadRef<'a>>( path: &Path, file_contents: &[u8], file: &File<'a, R>, -) -> Result> { +) -> Result> { let pdb_file = find_pdb_file(path); match file.format() { @@ -118,7 +285,7 @@ fn find_pdb_file(path: &Path) -> Option { } /// Find the offsets of any manganis symbols in a pdb file. -fn find_pdb_symbol_offsets(pdb_file: &Path) -> Result> { +fn find_pdb_symbol_offsets(pdb_file: &Path) -> Result> { let pdb_file_handle = std::fs::File::open(pdb_file)?; let mut pdb_file = pdb::PDB::open(pdb_file_handle).context("Failed to open PDB file")?; let Ok(Some(sections)) = pdb_file.sections() else { @@ -142,26 +309,28 @@ fn find_pdb_symbol_offsets(pdb_file: &Path) -> Result> { }; let name = data.name.to_string(); - if name.contains("__MANGANIS__") { + if let Some(version) = looks_like_manganis_symbol(&name) { let section = sections .get(rva.section as usize - 1) .expect("Section index out of bounds"); - addresses.push((section.pointer_to_raw_data + rva.offset) as u64); + addresses.push((version, (section.pointer_to_raw_data + rva.offset) as u64)); } } Ok(addresses) } /// Find the offsets of any manganis symbols in a native object file. -fn find_native_symbol_offsets<'a, R: ReadRef<'a>>(file: &File<'a, R>) -> Result> { +fn find_native_symbol_offsets<'a, R: ReadRef<'a>>( + file: &File<'a, R>, +) -> Result> { let mut offsets = Vec::new(); - for (symbol, section) in manganis_symbols(file) { + for (version, symbol, section) in manganis_symbols(file) { let virtual_address = symbol.address(); let Some((section_range_start, _)) = section.file_range() else { tracing::error!( - "Found __MANGANIS__ symbol {:?} in section {}, but the section has no file range", + "Found __ASSETS__ symbol {:?} in section {}, but the section has no file range", symbol.name(), section.index() ); @@ -172,7 +341,7 @@ fn find_native_symbol_offsets<'a, R: ReadRef<'a>>(file: &File<'a, R>) -> Result< .try_into() .expect("Virtual address should be greater than or equal to section address"); let file_offset = section_range_start + section_relative_address; - offsets.push(file_offset); + offsets.push((version, file_offset)); } Ok(offsets) @@ -198,7 +367,7 @@ fn eval_walrus_global_expr(module: &walrus::Module, expr: &walrus::ConstExpr) -> fn find_wasm_symbol_offsets<'a, R: ReadRef<'a>>( file_contents: &[u8], file: &File<'a, R>, -) -> Result> { +) -> Result> { let Some(section) = file .sections() .find(|section| section.name() == Ok("")) @@ -259,9 +428,9 @@ fn find_wasm_symbol_offsets<'a, R: ReadRef<'a>>( eval_walrus_global_expr(&module, &main_memory_offset).unwrap_or_default(); for export in module.exports.iter() { - if !looks_like_manganis_symbol(&export.name) { + let Some(version) = looks_like_manganis_symbol(&export.name) else { continue; - } + }; let walrus::ExportItem::Global(global) = export.item else { continue; @@ -273,7 +442,7 @@ fn find_wasm_symbol_offsets<'a, R: ReadRef<'a>>( let Some(virtual_address) = eval_walrus_global_expr(&module, &pointer) else { tracing::error!( - "Found __MANGANIS__ symbol {:?} in WASM file, but the global expression could not be evaluated", + "Found __ASSETS__ symbol {:?} in WASM file, but the global expression could not be evaluated", export.name ); continue; @@ -285,7 +454,7 @@ fn find_wasm_symbol_offsets<'a, R: ReadRef<'a>>( .expect("Virtual address should be greater than or equal to section address"); let file_offset = data_start_offset + section_relative_address; - offsets.push(file_offset); + offsets.push((version, file_offset)); } Ok(offsets) @@ -311,15 +480,12 @@ pub(crate) async fn extract_assets_from_file(path: impl AsRef) -> Result) -> Result, generics: &Generics) { +fn add_bounds(where_clause: &mut Option, generics: &Generics, krate: &Path) { let bounds = generics.params.iter().filter_map(|param| match param { syn::GenericParam::Type(ty) => { - Some::(parse_quote! { #ty: const_serialize::SerializeConst, }) + Some::(parse_quote! { #ty: #krate::SerializeConst, }) } syn::GenericParam::Lifetime(_) => None, syn::GenericParam::Const(_) => None, @@ -19,10 +19,33 @@ fn add_bounds(where_clause: &mut Option, generics: &Generics) { } /// Derive the const serialize trait for a struct -#[proc_macro_derive(SerializeConst)] -pub fn derive_parse(input: TokenStream) -> TokenStream { +#[proc_macro_derive(SerializeConst, attributes(const_serialize))] +pub fn derive_parse(raw_input: TokenStream) -> TokenStream { // Parse the input tokens into a syntax tree - let input = parse_macro_input!(input as DeriveInput); + let input = parse_macro_input!(raw_input as DeriveInput); + let krate = input.attrs.iter().find_map(|attr| { + attr.path() + .is_ident("const_serialize") + .then(|| { + let mut path = None; + if let Err(err) = attr.parse_nested_meta(|meta| { + if meta.path.is_ident("crate") { + let ident: Path = meta.value()?.parse()?; + path = Some(ident); + } + Ok(()) + }) { + return Some(Err(err)); + } + path.map(Ok) + }) + .flatten() + }); + let krate = match krate { + Some(Ok(path)) => path, + Some(Err(err)) => return err.into_compile_error().into(), + None => parse_quote! { const_serialize }, + }; match input.data { syn::Data::Struct(data) => match data.fields { @@ -30,7 +53,7 @@ pub fn derive_parse(input: TokenStream) -> TokenStream { let ty = &input.ident; let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); let mut where_clause = where_clause.cloned(); - add_bounds(&mut where_clause, &input.generics); + add_bounds(&mut where_clause, &input.generics, &krate); let field_names = data.fields.iter().enumerate().map(|(i, field)| { field .ident @@ -43,14 +66,14 @@ pub fn derive_parse(input: TokenStream) -> TokenStream { }); let field_types = data.fields.iter().map(|field| &field.ty); quote! { - unsafe impl #impl_generics const_serialize::SerializeConst for #ty #ty_generics #where_clause { - const MEMORY_LAYOUT: const_serialize::Layout = const_serialize::Layout::Struct(const_serialize::StructLayout::new( + unsafe impl #impl_generics #krate::SerializeConst for #ty #ty_generics #where_clause { + const MEMORY_LAYOUT: #krate::Layout = #krate::Layout::Struct(#krate::StructLayout::new( std::mem::size_of::(), &[#( - const_serialize::StructFieldLayout::new( + #krate::StructFieldLayout::new( stringify!(#field_names), std::mem::offset_of!(#ty, #field_names), - <#field_types as const_serialize::SerializeConst>::MEMORY_LAYOUT, + <#field_types as #krate::SerializeConst>::MEMORY_LAYOUT, ), )*], )); @@ -61,10 +84,10 @@ pub fn derive_parse(input: TokenStream) -> TokenStream { let ty = &input.ident; let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); let mut where_clause = where_clause.cloned(); - add_bounds(&mut where_clause, &input.generics); + add_bounds(&mut where_clause, &input.generics, &krate); quote! { - unsafe impl #impl_generics const_serialize::SerializeConst for #ty #ty_generics #where_clause { - const MEMORY_LAYOUT: const_serialize::Layout = const_serialize::Layout::Struct(const_serialize::StructLayout::new( + unsafe impl #impl_generics #krate::SerializeConst for #ty #ty_generics #where_clause { + const MEMORY_LAYOUT: #krate::Layout = #krate::Layout::Struct(#krate::StructLayout::new( std::mem::size_of::(), &[], )); @@ -138,7 +161,7 @@ pub fn derive_parse(input: TokenStream) -> TokenStream { let ty = &input.ident; let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); let mut where_clause = where_clause.cloned(); - add_bounds(&mut where_clause, &input.generics); + add_bounds(&mut where_clause, &input.generics, &krate); let mut last_discriminant = None; let variants = data.variants.iter().map(|variant| { let discriminant = variant @@ -164,18 +187,19 @@ pub fn derive_parse(input: TokenStream) -> TokenStream { quote! { { #[allow(unused)] - #[derive(const_serialize::SerializeConst)] + #[derive(#krate::SerializeConst)] + #[const_serialize(crate = #krate)] #[repr(C)] struct VariantStruct #generics { #( #field_names: #field_types, )* } - const_serialize::EnumVariant::new( + #krate::EnumVariant::new( stringify!(#variant_name), #discriminant as u32, - match VariantStruct::MEMORY_LAYOUT { - const_serialize::Layout::Struct(layout) => layout, + match ::MEMORY_LAYOUT { + #krate::Layout::Struct(layout) => layout, _ => panic!("VariantStruct::MEMORY_LAYOUT must be a struct"), }, ::std::mem::align_of::(), @@ -184,14 +208,14 @@ pub fn derive_parse(input: TokenStream) -> TokenStream { } }); quote! { - unsafe impl #impl_generics const_serialize::SerializeConst for #ty #ty_generics #where_clause { - const MEMORY_LAYOUT: const_serialize::Layout = const_serialize::Layout::Enum(const_serialize::EnumLayout::new( + unsafe impl #impl_generics #krate::SerializeConst for #ty #ty_generics #where_clause { + const MEMORY_LAYOUT: #krate::Layout = #krate::Layout::Enum(#krate::EnumLayout::new( ::std::mem::size_of::(), - const_serialize::PrimitiveLayout::new( + #krate::PrimitiveLayout::new( #discriminant_size as usize, ), { - const DATA: &'static [const_serialize::EnumVariant] = &[ + const DATA: &'static [#krate::EnumVariant] = &[ #( #variants, )* diff --git a/packages/manganis/manganis-core/src/options.rs b/packages/manganis/manganis-core/src/options.rs index dd383ab4d8..bed2cf4651 100644 --- a/packages/manganis/manganis-core/src/options.rs +++ b/packages/manganis/manganis-core/src/options.rs @@ -107,7 +107,7 @@ impl AssetOptionsBuilder<()> { impl AssetOptionsBuilder { /// Create a new asset options builder with the given variant - pub(crate) const fn variant(variant: T) -> Self { + pub const fn variant(variant: T) -> Self { Self { add_hash: true, variant, diff --git a/packages/manganis/manganis-macro/src/linker.rs b/packages/manganis/manganis-macro/src/linker.rs index 116d0c63b2..f2f9a408bb 100644 --- a/packages/manganis/manganis-macro/src/linker.rs +++ b/packages/manganis/manganis-macro/src/linker.rs @@ -8,7 +8,7 @@ use quote::ToTokens; /// After linking, the "manganis" sections of the different object files will be merged. pub fn generate_link_section(asset: impl ToTokens, asset_hash: &str) -> TokenStream2 { let position = proc_macro2::Span::call_site(); - let export_name = syn::LitStr::new(&format!("__MANGANIS__{}", asset_hash), position); + let export_name = syn::LitStr::new(&format!("__ASSETS__{}", asset_hash), position); quote::quote! { // First serialize the asset into a constant sized buffer From 524d25f6ef3d4c4273d4681f8bbc12aaa786e41d Mon Sep 17 00:00:00 2001 From: Evan Almloff Date: Wed, 12 Nov 2025 11:54:47 -0600 Subject: [PATCH 10/20] bump const serialize version --- Cargo.lock | 32 +++++++++++------------ Cargo.toml | 4 +-- packages/const-serialize-macro/Cargo.toml | 2 +- packages/const-serialize/Cargo.toml | 2 +- 4 files changed, 20 insertions(+), 20 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 38b78d7939..34374385c2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4175,27 +4175,29 @@ dependencies = [ [[package]] name = "const-serialize" -version = "0.7.0" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd339aa356cc6452308fad2ee56623f900a8e68bc0ab9360a0ddb8270e5640c8" dependencies = [ - "const-serialize 0.7.0", - "const-serialize-macro 0.7.0", - "rand 0.9.2", + "const-serialize-macro 0.7.1", "serde", ] [[package]] name = "const-serialize" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd339aa356cc6452308fad2ee56623f900a8e68bc0ab9360a0ddb8270e5640c8" +version = "0.8.0" dependencies = [ - "const-serialize-macro 0.7.1", + "const-serialize 0.8.0", + "const-serialize-macro 0.8.0", + "rand 0.9.2", "serde", ] [[package]] name = "const-serialize-macro" -version = "0.7.0" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "797d158acb331e2a89d696343a27cd39bf7e36aaef33ba4799a5ef1526e24861" dependencies = [ "proc-macro2", "quote", @@ -4204,9 +4206,7 @@ dependencies = [ [[package]] name = "const-serialize-macro" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "797d158acb331e2a89d696343a27cd39bf7e36aaef33ba4799a5ef1526e24861" +version = "0.8.0" dependencies = [ "proc-macro2", "quote", @@ -5380,8 +5380,8 @@ dependencies = [ "clap", "console 0.16.1", "console-subscriber", - "const-serialize 0.7.0", "const-serialize 0.7.1", + "const-serialize 0.8.0", "convert_case 0.8.0", "crossterm 0.29.0", "ctrlc", @@ -5498,7 +5498,7 @@ dependencies = [ "browserslist-rs 0.19.0", "built 0.8.0", "codemap", - "const-serialize 0.7.0", + "const-serialize 0.8.0", "grass", "image", "imagequant", @@ -10771,7 +10771,7 @@ dependencies = [ name = "manganis" version = "0.7.0" dependencies = [ - "const-serialize 0.7.0", + "const-serialize 0.8.0", "manganis-core 0.7.0", "manganis-macro", ] @@ -10780,7 +10780,7 @@ dependencies = [ name = "manganis-core" version = "0.7.0" dependencies = [ - "const-serialize 0.7.0", + "const-serialize 0.8.0", "dioxus", "dioxus-cli-config", "dioxus-core-types", diff --git a/Cargo.toml b/Cargo.toml index daab6df754..c89b4604fe 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -194,8 +194,8 @@ dioxus-cli-telemetry = { path = "packages/cli-telemetry", version = "0.7.0" } dioxus-cli-config = { path = "packages/cli-config", version = "0.7.0" } # const-serialize -const-serialize = { path = "packages/const-serialize", version = "0.7.0" } -const-serialize-macro = { path = "packages/const-serialize-macro", version = "0.7.0" } +const-serialize = { path = "packages/const-serialize", version = "0.8.0" } +const-serialize-macro = { path = "packages/const-serialize-macro", version = "0.8.0" } # The version of const-serialize published with 0.7.0 and 0.7.1 that the CLI should still support const-serialize-07 = { package = "const-serialize", version = "0.7.1" } diff --git a/packages/const-serialize-macro/Cargo.toml b/packages/const-serialize-macro/Cargo.toml index 8c20662ab1..123efc864b 100644 --- a/packages/const-serialize-macro/Cargo.toml +++ b/packages/const-serialize-macro/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "const-serialize-macro" -version = { workspace = true } +version = "0.8.0" authors = ["Evan Almloff"] edition = "2021" description = "A macro to derive const serialize" diff --git a/packages/const-serialize/Cargo.toml b/packages/const-serialize/Cargo.toml index 9d4b4e2647..f65863d66e 100644 --- a/packages/const-serialize/Cargo.toml +++ b/packages/const-serialize/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "const-serialize" -version = { workspace = true } +version = "0.8.0" authors = ["Evan Almloff"] edition = "2021" description = "A serialization framework that works in const contexts" From 9d99a876f6f1c52c92e6ed69c7bee96fe118708c Mon Sep 17 00:00:00 2001 From: Evan Almloff Date: Wed, 12 Nov 2025 12:01:22 -0600 Subject: [PATCH 11/20] fix asset conversion code --- packages/cli/src/build/assets.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/cli/src/build/assets.rs b/packages/cli/src/build/assets.rs index bbd24af35e..bfc2dec5ad 100644 --- a/packages/cli/src/build/assets.rs +++ b/packages/cli/src/build/assets.rs @@ -164,7 +164,7 @@ fn legacy_asset_to_modern_asset( _ => AssetOptions::builder().into_asset_options(), }; - BundledAsset::new(bundled_path, absolute_path, options) + BundledAsset::new(absolute_path, bundled_path, options) } fn modern_asset_to_legacy_asset(modern_asset: &BundledAsset) -> manganis_core_07::BundledAsset { @@ -219,7 +219,7 @@ fn modern_asset_to_legacy_asset(modern_asset: &BundledAsset) -> manganis_core_07 _ => manganis_core_07::AssetOptions::builder().into_asset_options(), }; - manganis_core_07::BundledAsset::new(bundled_path, absolute_path, options) + manganis_core_07::BundledAsset::new(absolute_path, bundled_path, options) } fn looks_like_manganis_symbol(name: &str) -> Option { From 896aaba5d1593cfc31c9dc70b02d7087e4633ff0 Mon Sep 17 00:00:00 2001 From: Evan Almloff Date: Wed, 12 Nov 2025 12:08:30 -0600 Subject: [PATCH 12/20] pull out manganis symbol struct from tuble --- packages/cli/src/build/assets.rs | 38 ++++++++++++++++++++++++-------- 1 file changed, 29 insertions(+), 9 deletions(-) diff --git a/packages/cli/src/build/assets.rs b/packages/cli/src/build/assets.rs index bfc2dec5ad..13c58601e4 100644 --- a/packages/cli/src/build/assets.rs +++ b/packages/cli/src/build/assets.rs @@ -232,12 +232,25 @@ fn looks_like_manganis_symbol(name: &str) -> Option { } } +/// An asset offset in the binary +#[derive(Clone, Copy)] +struct ManganisSymbolOffset { + version: ManganisVersion, + offset: u64, +} + +impl ManganisSymbolOffset { + fn new(version: ManganisVersion, offset: u64) -> Self { + Self { version, offset } + } +} + /// Find the offsets of any manganis symbols in the given file. fn find_symbol_offsets<'a, R: ReadRef<'a>>( path: &Path, file_contents: &[u8], file: &File<'a, R>, -) -> Result> { +) -> Result> { let pdb_file = find_pdb_file(path); match file.format() { @@ -285,7 +298,7 @@ fn find_pdb_file(path: &Path) -> Option { } /// Find the offsets of any manganis symbols in a pdb file. -fn find_pdb_symbol_offsets(pdb_file: &Path) -> Result> { +fn find_pdb_symbol_offsets(pdb_file: &Path) -> Result> { let pdb_file_handle = std::fs::File::open(pdb_file)?; let mut pdb_file = pdb::PDB::open(pdb_file_handle).context("Failed to open PDB file")?; let Ok(Some(sections)) = pdb_file.sections() else { @@ -314,7 +327,10 @@ fn find_pdb_symbol_offsets(pdb_file: &Path) -> Result Result>( file: &File<'a, R>, -) -> Result> { +) -> Result> { let mut offsets = Vec::new(); for (version, symbol, section) in manganis_symbols(file) { let virtual_address = symbol.address(); @@ -341,7 +357,7 @@ fn find_native_symbol_offsets<'a, R: ReadRef<'a>>( .try_into() .expect("Virtual address should be greater than or equal to section address"); let file_offset = section_range_start + section_relative_address; - offsets.push((version, file_offset)); + offsets.push(ManganisSymbolOffset::new(version, file_offset)); } Ok(offsets) @@ -367,7 +383,7 @@ fn eval_walrus_global_expr(module: &walrus::Module, expr: &walrus::ConstExpr) -> fn find_wasm_symbol_offsets<'a, R: ReadRef<'a>>( file_contents: &[u8], file: &File<'a, R>, -) -> Result> { +) -> Result> { let Some(section) = file .sections() .find(|section| section.name() == Ok("")) @@ -454,7 +470,7 @@ fn find_wasm_symbol_offsets<'a, R: ReadRef<'a>>( .expect("Virtual address should be greater than or equal to section address"); let file_offset = data_start_offset + section_relative_address; - offsets.push((version, file_offset)); + offsets.push(ManganisSymbolOffset::new(version, file_offset)); } Ok(offsets) @@ -480,7 +496,9 @@ pub(crate) async fn extract_assets_from_file(path: impl AsRef) -> Result) -> Result Date: Wed, 12 Nov 2025 12:18:07 -0600 Subject: [PATCH 13/20] test reading old asset versions --- Cargo.lock | 1016 +++++++++++++---- .../playwright-tests/cli-optimization.spec.js | 120 +- .../cli-optimization/Cargo.toml | 9 +- .../cli-optimization/src/main.rs | 3 + .../playwright-tests/playwright.config.js | 10 + 5 files changed, 848 insertions(+), 310 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8107d06dd6..aec6959143 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1584,7 +1584,7 @@ dependencies = [ name = "barebones-template-test" version = "0.1.0" dependencies = [ - "dioxus", + "dioxus 0.7.1", ] [[package]] @@ -1670,7 +1670,7 @@ version = "0.0.0" dependencies = [ "bevy", "color", - "dioxus", + "dioxus 0.7.1", "dioxus-native", "tracing-subscriber", "wgpu 26.0.1", @@ -3209,7 +3209,7 @@ name = "bluetooth-scanner" version = "0.1.1" dependencies = [ "btleplug", - "dioxus", + "dioxus 0.7.1", "futures", "futures-channel", "tokio", @@ -5367,36 +5367,36 @@ name = "dioxus" version = "0.7.1" dependencies = [ "criterion", - "dioxus", - "dioxus-asset-resolver", - "dioxus-cli-config", - "dioxus-config-macro", - "dioxus-config-macros", - "dioxus-core", - "dioxus-core-macro", + "dioxus 0.7.1", + "dioxus-asset-resolver 0.7.1", + "dioxus-cli-config 0.7.1", + "dioxus-config-macro 0.7.1", + "dioxus-config-macros 0.7.1", + "dioxus-core 0.7.1", + "dioxus-core-macro 0.7.1", "dioxus-desktop", - "dioxus-devtools", - "dioxus-document", - "dioxus-fullstack", - "dioxus-fullstack-macro", - "dioxus-history", - "dioxus-hooks", - "dioxus-html", + "dioxus-devtools 0.7.1", + "dioxus-document 0.7.1", + "dioxus-fullstack 0.7.1", + "dioxus-fullstack-macro 0.7.1", + "dioxus-history 0.7.1", + "dioxus-hooks 0.7.1", + "dioxus-html 0.7.1", "dioxus-liveview", - "dioxus-logger", + "dioxus-logger 0.7.1", "dioxus-native", "dioxus-router", "dioxus-server", - "dioxus-signals", + "dioxus-signals 0.7.1", "dioxus-ssr", - "dioxus-stores", - "dioxus-web", + "dioxus-stores 0.7.1", + "dioxus-web 0.7.1", "env_logger 0.11.8", "futures-util", - "manganis", + "manganis 0.7.1", "rand 0.9.2", "serde", - "subsecond", + "subsecond 0.7.1", "thiserror 2.0.17", "tokio", "tracing", @@ -5404,12 +5404,39 @@ dependencies = [ "wasm-splitter", ] +[[package]] +name = "dioxus" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f76e820919058a685a1fdbb2ef4888c73ac77d623c39a7dfde2aa812947246be" +dependencies = [ + "dioxus-asset-resolver 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-cli-config 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-config-macro 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-config-macros 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-core 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-core-macro 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-devtools 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-document 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-fullstack 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-history 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-hooks 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-html 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-logger 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-signals 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-stores 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-web 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "manganis 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "subsecond 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "warnings", +] + [[package]] name = "dioxus-asset-resolver" version = "0.7.1" dependencies = [ - "dioxus", - "dioxus-cli-config", + "dioxus 0.7.1", + "dioxus-cli-config 0.7.1", "http 1.3.1", "infer", "jni 0.21.1", @@ -5425,11 +5452,32 @@ dependencies = [ "web-sys", ] +[[package]] +name = "dioxus-asset-resolver" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f6a124667ce5565c39fe2f33af45c21fe459c5bfcf7a8074ad12c9e9da5817c" +dependencies = [ + "dioxus-cli-config 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "http 1.3.1", + "infer", + "jni 0.21.1", + "js-sys", + "ndk 0.9.0", + "ndk-context", + "ndk-sys 0.6.0+11769913", + "percent-encoding", + "thiserror 2.0.17", + "tokio", + "wasm-bindgen-futures", + "web-sys", +] + [[package]] name = "dioxus-autofmt" version = "0.7.1" dependencies = [ - "dioxus-rsx", + "dioxus-rsx 0.7.1", "pretty_assertions", "prettyplease", "proc-macro2", @@ -5481,17 +5529,17 @@ dependencies = [ "depinfo", "dioxus-autofmt", "dioxus-check", - "dioxus-cli-config", + "dioxus-cli-config 0.7.1", "dioxus-cli-opt", "dioxus-cli-telemetry", "dioxus-component-manifest", - "dioxus-core", - "dioxus-core-types", - "dioxus-devtools-types", + "dioxus-core 0.7.1", + "dioxus-core-types 0.7.1", + "dioxus-devtools-types 0.7.1", "dioxus-dx-wire-format", - "dioxus-fullstack", - "dioxus-html", - "dioxus-rsx", + "dioxus-fullstack 0.7.1", + "dioxus-html 0.7.1", + "dioxus-rsx 0.7.1", "dioxus-rsx-hotreload", "dioxus-rsx-rosetta", "dircpy", @@ -5517,9 +5565,9 @@ dependencies = [ "krates", "local-ip-address", "log", - "manganis", - "manganis-core 0.7.0", + "manganis 0.7.1", "manganis-core 0.7.1", + "manganis-core 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "memmap", "memoize", "notify", @@ -5545,7 +5593,7 @@ dependencies = [ "serde_json5", "shell-words", "strum 0.27.2", - "subsecond-types", + "subsecond-types 0.7.1", "syn 2.0.108", "tar", "target-lexicon 0.13.3", @@ -5583,6 +5631,15 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "dioxus-cli-config" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "babc8eaf90379352bc4820830749fd231feb9312433d4094b4e7b79d912b3d96" +dependencies = [ + "wasm-bindgen", +] + [[package]] name = "dioxus-cli-opt" version = "0.7.1" @@ -5596,8 +5653,8 @@ dependencies = [ "image", "imagequant", "lightningcss", - "manganis", - "manganis-core 0.7.0", + "manganis 0.7.1", + "manganis-core 0.7.1", "mozjpeg", "object 0.37.3", "png", @@ -5635,7 +5692,8 @@ dependencies = [ name = "dioxus-cli-optimization-test" version = "0.0.1" dependencies = [ - "dioxus", + "dioxus 0.7.1", + "dioxus 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "serde", "serde_json", ] @@ -5669,23 +5727,39 @@ dependencies = [ "quote", ] +[[package]] +name = "dioxus-config-macro" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30018b5b95567cee42febbb444d5e5e47dbe3e91fa6e44b9e571edad0184cd36" +dependencies = [ + "proc-macro2", + "quote", +] + [[package]] name = "dioxus-config-macros" version = "0.7.1" +[[package]] +name = "dioxus-config-macros" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a16b25f8761253ed5ffa4d0789376310fbbc1bbaa8190fc2f374db82c6285a1" + [[package]] name = "dioxus-core" version = "0.7.1" dependencies = [ "anyhow", "const_format", - "dioxus", - "dioxus-core-types", - "dioxus-html", + "dioxus 0.7.1", + "dioxus-core-types 0.7.1", + "dioxus-html 0.7.1", "dioxus-ssr", "futures-channel", "futures-util", - "generational-box", + "generational-box 0.7.1", "longest-increasing-subsequence", "pretty_assertions", "rand 0.9.2", @@ -5695,7 +5769,7 @@ dependencies = [ "serde", "slab", "slotmap", - "subsecond", + "subsecond 0.7.1", "sysinfo 0.35.2", "tokio", "tracing", @@ -5705,14 +5779,37 @@ dependencies = [ "web-sys", ] +[[package]] +name = "dioxus-core" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75468d08468919f783b0f7ee826802f4e8e66c5b5a0451245d861c211ca18216" +dependencies = [ + "anyhow", + "const_format", + "dioxus-core-types 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-channel", + "futures-util", + "generational-box 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "longest-increasing-subsequence", + "rustc-hash 2.1.1", + "rustversion", + "serde", + "slab", + "slotmap", + "subsecond 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "tracing", + "warnings", +] + [[package]] name = "dioxus-core-macro" version = "0.7.1" dependencies = [ "convert_case 0.8.0", - "dioxus", - "dioxus-html", - "dioxus-rsx", + "dioxus 0.7.1", + "dioxus-html 0.7.1", + "dioxus-rsx 0.7.1", "proc-macro2", "quote", "rustversion", @@ -5721,9 +5818,28 @@ dependencies = [ "trybuild", ] +[[package]] +name = "dioxus-core-macro" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f145abdb2a3f858456cb4382390863cf0398c228ad0733618f48891da7687be3" +dependencies = [ + "convert_case 0.8.0", + "dioxus-rsx 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2", + "quote", + "syn 2.0.108", +] + +[[package]] +name = "dioxus-core-types" +version = "0.7.1" + [[package]] name = "dioxus-core-types" version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36f5ecf5a51de06d78aded3b5f7516a258f53117cba718bc5706317a3c04c844" [[package]] name = "dioxus-desktop" @@ -5734,28 +5850,28 @@ dependencies = [ "bytes", "cocoa", "core-foundation 0.10.1", - "dioxus", - "dioxus-asset-resolver", - "dioxus-cli-config", - "dioxus-core", - "dioxus-devtools", - "dioxus-document", - "dioxus-history", - "dioxus-hooks", - "dioxus-html", - "dioxus-interpreter-js", - "dioxus-signals", + "dioxus 0.7.1", + "dioxus-asset-resolver 0.7.1", + "dioxus-cli-config 0.7.1", + "dioxus-core 0.7.1", + "dioxus-devtools 0.7.1", + "dioxus-document 0.7.1", + "dioxus-history 0.7.1", + "dioxus-hooks 0.7.1", + "dioxus-html 0.7.1", + "dioxus-interpreter-js 0.7.1", + "dioxus-signals 0.7.1", "dioxus-ssr", "dunce", "exitcode", "futures-channel", "futures-util", - "generational-box", + "generational-box 0.7.1", "global-hotkey", "http-range", "infer", "jni 0.21.1", - "lazy-js-bundle", + "lazy-js-bundle 0.7.1", "libc", "muda", "ndk 0.9.0", @@ -5788,15 +5904,15 @@ dependencies = [ name = "dioxus-devtools" version = "0.7.1" dependencies = [ - "dioxus-cli-config", - "dioxus-core", - "dioxus-devtools-types", - "dioxus-signals", + "dioxus-cli-config 0.7.1", + "dioxus-core 0.7.1", + "dioxus-devtools-types 0.7.1", + "dioxus-signals 0.7.1", "futures-channel", "futures-util", "serde", "serde_json", - "subsecond", + "subsecond 0.7.1", "thiserror 2.0.17", "tokio", "tracing", @@ -5804,28 +5920,77 @@ dependencies = [ "warnings", ] +[[package]] +name = "dioxus-devtools" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4eb2c5019b7fa72e8e6b21ba99e9263bd390c9a30bbf09793b72f4b57ed7c3d7" +dependencies = [ + "dioxus-cli-config 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-core 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-devtools-types 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-signals 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "serde", + "serde_json", + "subsecond 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "thiserror 2.0.17", + "tracing", + "tungstenite 0.27.0", + "warnings", +] + +[[package]] +name = "dioxus-devtools-types" +version = "0.7.1" +dependencies = [ + "dioxus-core 0.7.1", + "serde", + "subsecond-types 0.7.1", +] + [[package]] name = "dioxus-devtools-types" version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b007cec5b8548281921c4e4678926a3936e9d6757e951380685cc6121a6f974" dependencies = [ - "dioxus-core", + "dioxus-core 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "serde", - "subsecond-types", + "subsecond-types 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "dioxus-document" version = "0.7.1" dependencies = [ - "dioxus", - "dioxus-core", - "dioxus-core-macro", - "dioxus-core-types", - "dioxus-html", + "dioxus 0.7.1", + "dioxus-core 0.7.1", + "dioxus-core-macro 0.7.1", + "dioxus-core-types 0.7.1", + "dioxus-html 0.7.1", "futures-channel", "futures-util", - "generational-box", - "lazy-js-bundle", + "generational-box 0.7.1", + "lazy-js-bundle 0.7.1", + "serde", + "serde_json", + "tracing", +] + +[[package]] +name = "dioxus-document" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c55bcae9aaf150d4a141c61b3826da5a7ac23dfff09726568525cd46336e9a2" +dependencies = [ + "dioxus-core 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-core-macro 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-core-types 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-html 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-channel", + "futures-util", + "generational-box 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy-js-bundle 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "serde", "serde_json", "tracing", @@ -5836,10 +6001,10 @@ name = "dioxus-dx-wire-format" version = "0.7.1" dependencies = [ "cargo_metadata", - "manganis-core 0.7.0", + "manganis-core 0.7.1", "serde", "serde_json", - "subsecond-types", + "subsecond-types 0.7.1", ] [[package]] @@ -5852,10 +6017,10 @@ dependencies = [ "base64 0.22.1", "bytes", "ciborium", - "dioxus", - "dioxus-html", + "dioxus 0.7.1", + "dioxus-html 0.7.1", "dioxus-ssr", - "dioxus-stores", + "dioxus-stores 0.7.1", "form_urlencoded", "futures", "futures-util", @@ -5907,16 +6072,16 @@ dependencies = [ "const_format", "content_disposition", "derive_more 2.0.1", - "dioxus", - "dioxus-asset-resolver", - "dioxus-cli-config", - "dioxus-core", - "dioxus-fullstack-core", - "dioxus-fullstack-macro", - "dioxus-hooks", - "dioxus-html", + "dioxus 0.7.1", + "dioxus-asset-resolver 0.7.1", + "dioxus-cli-config 0.7.1", + "dioxus-core 0.7.1", + "dioxus-fullstack-core 0.7.1", + "dioxus-fullstack-macro 0.7.1", + "dioxus-hooks 0.7.1", + "dioxus-html 0.7.1", "dioxus-server", - "dioxus-signals", + "dioxus-signals 0.7.1", "form_urlencoded", "futures", "futures-channel", @@ -5957,6 +6122,63 @@ dependencies = [ "xxhash-rust", ] +[[package]] +name = "dioxus-fullstack" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff04cef82d6639eb15186f626298645dbd92978bf66dc3efd2e5984a2ff4a1ff" +dependencies = [ + "anyhow", + "async-stream", + "async-tungstenite", + "axum 0.8.6", + "axum-core 0.5.5", + "base64 0.22.1", + "bytes", + "ciborium", + "const-str 0.7.0", + "const_format", + "content_disposition", + "derive_more 2.0.1", + "dioxus-asset-resolver 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-cli-config 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-core 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-fullstack-core 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-fullstack-macro 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-hooks 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-html 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-signals 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "form_urlencoded", + "futures", + "futures-channel", + "futures-util", + "gloo-net", + "headers", + "http 1.3.1", + "http-body 1.0.1", + "http-body-util", + "js-sys", + "mime", + "pin-project", + "reqwest 0.12.24", + "rustversion", + "send_wrapper", + "serde", + "serde_json", + "serde_qs", + "serde_urlencoded", + "thiserror 2.0.17", + "tokio-util", + "tracing", + "tungstenite 0.27.0", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-streams", + "web-sys", + "xxhash-rust", +] + [[package]] name = "dioxus-fullstack-core" version = "0.7.1" @@ -5965,16 +6187,44 @@ dependencies = [ "axum-core 0.5.5", "base64 0.22.1", "ciborium", - "dioxus", - "dioxus-core", - "dioxus-document", - "dioxus-fullstack", - "dioxus-history", - "dioxus-hooks", - "dioxus-signals", + "dioxus 0.7.1", + "dioxus-core 0.7.1", + "dioxus-document 0.7.1", + "dioxus-fullstack 0.7.1", + "dioxus-history 0.7.1", + "dioxus-hooks 0.7.1", + "dioxus-signals 0.7.1", "futures-channel", "futures-util", - "generational-box", + "generational-box 0.7.1", + "http 1.3.1", + "inventory", + "parking_lot", + "serde", + "serde_json", + "thiserror 2.0.17", + "tokio", + "tracing", +] + +[[package]] +name = "dioxus-fullstack-core" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41281c7cd4d311a50933256e19a5d91d0d950ad350dd3232bd4321fdd3a59fb0" +dependencies = [ + "anyhow", + "axum-core 0.5.5", + "base64 0.22.1", + "ciborium", + "dioxus-core 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-document 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-history 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-hooks 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-signals 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-channel", + "futures-util", + "generational-box 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "http 1.3.1", "inventory", "parking_lot", @@ -5992,7 +6242,7 @@ dependencies = [ "axum 0.8.6", "const_format", "convert_case 0.8.0", - "dioxus", + "dioxus 0.7.1", "proc-macro2", "quote", "serde", @@ -6001,12 +6251,36 @@ dependencies = [ "xxhash-rust", ] +[[package]] +name = "dioxus-fullstack-macro" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae73023c8b8fee2692fc50a28063336f0b6930e86727e30c1047c92d30805b49" +dependencies = [ + "const_format", + "convert_case 0.8.0", + "proc-macro2", + "quote", + "syn 2.0.108", + "xxhash-rust", +] + [[package]] name = "dioxus-history" version = "0.7.1" dependencies = [ - "dioxus", - "dioxus-core", + "dioxus 0.7.1", + "dioxus-core 0.7.1", + "tracing", +] + +[[package]] +name = "dioxus-history" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dac73657da5c7a20629482d774b52f4a4f7cb57a520649f1d855d4073e809c98" +dependencies = [ + "dioxus-core 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "tracing", ] @@ -6014,12 +6288,12 @@ dependencies = [ name = "dioxus-hooks" version = "0.7.1" dependencies = [ - "dioxus", - "dioxus-core", - "dioxus-signals", + "dioxus 0.7.1", + "dioxus-core 0.7.1", + "dioxus-signals 0.7.1", "futures-channel", "futures-util", - "generational-box", + "generational-box 0.7.1", "reqwest 0.12.24", "rustversion", "slab", @@ -6029,29 +6303,46 @@ dependencies = [ "web-sys", ] +[[package]] +name = "dioxus-hooks" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ffd445f16d64939e06cd71a1c63a665f383fda6b7882f4c6f8f1bd6efca2046" +dependencies = [ + "dioxus-core 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-signals 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-channel", + "futures-util", + "generational-box 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rustversion", + "slab", + "tracing", + "warnings", +] + [[package]] name = "dioxus-html" version = "0.7.1" dependencies = [ "async-trait", "bytes", - "dioxus", - "dioxus-core", - "dioxus-core-macro", - "dioxus-core-types", - "dioxus-hooks", - "dioxus-html-internal-macro", - "dioxus-rsx", - "dioxus-web", + "dioxus 0.7.1", + "dioxus-core 0.7.1", + "dioxus-core-macro 0.7.1", + "dioxus-core-types 0.7.1", + "dioxus-hooks 0.7.1", + "dioxus-html-internal-macro 0.7.1", + "dioxus-rsx 0.7.1", + "dioxus-web 0.7.1", "enumset", "euclid", "futures-channel", "futures-util", - "generational-box", + "generational-box 0.7.1", "js-sys", "keyboard-types", - "lazy-js-bundle", - "manganis", + "lazy-js-bundle 0.7.1", + "manganis 0.7.1", "rustversion", "serde", "serde_json", @@ -6060,6 +6351,30 @@ dependencies = [ "tracing", ] +[[package]] +name = "dioxus-html" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f407fc73a9554a644872fcccc9faf762acad8f45158e3d67e42ab8dd42f4586" +dependencies = [ + "async-trait", + "bytes", + "dioxus-core 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-core-macro 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-core-types 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-hooks 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-html-internal-macro 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "enumset", + "euclid", + "futures-channel", + "futures-util", + "generational-box 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "keyboard-types", + "lazy-js-bundle 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rustversion", + "tracing", +] + [[package]] name = "dioxus-html-internal-macro" version = "0.7.1" @@ -6071,15 +6386,27 @@ dependencies = [ "trybuild", ] +[[package]] +name = "dioxus-html-internal-macro" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a968aae4bc92de87cbac3d0d043803b25a7c62c187841e61adcc9b49917c2b2a" +dependencies = [ + "convert_case 0.8.0", + "proc-macro2", + "quote", + "syn 2.0.108", +] + [[package]] name = "dioxus-interpreter-js" version = "0.7.1" dependencies = [ - "dioxus-core", - "dioxus-core-types", - "dioxus-html", + "dioxus-core 0.7.1", + "dioxus-core-types 0.7.1", + "dioxus-html 0.7.1", "js-sys", - "lazy-js-bundle", + "lazy-js-bundle 0.7.1", "rustc-hash 2.1.1", "serde", "sledgehammer_bindgen", @@ -6089,22 +6416,38 @@ dependencies = [ "web-sys", ] +[[package]] +name = "dioxus-interpreter-js" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83ab170d89308399205f8ad3d43d8d419affe317016b41ca0695186f7593cba2" +dependencies = [ + "js-sys", + "lazy-js-bundle 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-hash 2.1.1", + "sledgehammer_bindgen", + "sledgehammer_utils", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + [[package]] name = "dioxus-liveview" version = "0.7.1" dependencies = [ "axum 0.8.6", - "dioxus", - "dioxus-cli-config", - "dioxus-core", - "dioxus-devtools", - "dioxus-document", - "dioxus-history", - "dioxus-html", - "dioxus-interpreter-js", + "dioxus 0.7.1", + "dioxus-cli-config 0.7.1", + "dioxus-core 0.7.1", + "dioxus-devtools 0.7.1", + "dioxus-document 0.7.1", + "dioxus-history 0.7.1", + "dioxus-html 0.7.1", + "dioxus-interpreter-js 0.7.1", "futures-channel", "futures-util", - "generational-box", + "generational-box 0.7.1", "rustc-hash 2.1.1", "serde", "serde_json", @@ -6121,8 +6464,20 @@ dependencies = [ name = "dioxus-logger" version = "0.7.1" dependencies = [ - "dioxus", - "dioxus-cli-config", + "dioxus 0.7.1", + "dioxus-cli-config 0.7.1", + "tracing", + "tracing-subscriber", + "tracing-wasm", +] + +[[package]] +name = "dioxus-logger" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42237934c6a67f5ed9a8c37e47ca980ee7cfec9e783a9a1f8c2e36c8b96ae74b" +dependencies = [ + "dioxus-cli-config 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "tracing", "tracing-subscriber", "tracing-wasm", @@ -6141,13 +6496,13 @@ dependencies = [ "blitz-paint", "blitz-shell", "blitz-traits", - "dioxus-asset-resolver", - "dioxus-cli-config", - "dioxus-core", - "dioxus-devtools", - "dioxus-document", - "dioxus-history", - "dioxus-html", + "dioxus-asset-resolver 0.7.1", + "dioxus-cli-config 0.7.1", + "dioxus-core 0.7.1", + "dioxus-devtools 0.7.1", + "dioxus-document 0.7.1", + "dioxus-history 0.7.1", + "dioxus-html 0.7.1", "dioxus-native-dom", "futures-util", "keyboard-types", @@ -6164,9 +6519,9 @@ version = "0.7.1" dependencies = [ "blitz-dom", "blitz-traits", - "dioxus", - "dioxus-core", - "dioxus-html", + "dioxus 0.7.1", + "dioxus-core 0.7.1", + "dioxus-html 0.7.1", "futures-util", "keyboard-types", "rustc-hash 2.1.1", @@ -6177,21 +6532,21 @@ dependencies = [ name = "dioxus-playwright-default-features-disabled-test" version = "0.1.0" dependencies = [ - "dioxus", + "dioxus 0.7.1", ] [[package]] name = "dioxus-playwright-fullstack-error-codes-test" version = "0.1.0" dependencies = [ - "dioxus", + "dioxus 0.7.1", ] [[package]] name = "dioxus-playwright-fullstack-errors-test" version = "0.1.0" dependencies = [ - "dioxus", + "dioxus 0.7.1", "serde", "tokio", ] @@ -6200,7 +6555,7 @@ dependencies = [ name = "dioxus-playwright-fullstack-hydration-order-test" version = "0.1.0" dependencies = [ - "dioxus", + "dioxus 0.7.1", "serde", "tokio", ] @@ -6209,7 +6564,7 @@ dependencies = [ name = "dioxus-playwright-fullstack-mounted-test" version = "0.1.0" dependencies = [ - "dioxus", + "dioxus 0.7.1", "serde", "tokio", ] @@ -6218,7 +6573,7 @@ dependencies = [ name = "dioxus-playwright-fullstack-routing-test" version = "0.1.0" dependencies = [ - "dioxus", + "dioxus 0.7.1", "serde", "tokio", ] @@ -6227,14 +6582,14 @@ dependencies = [ name = "dioxus-playwright-fullstack-spread-test" version = "0.1.0" dependencies = [ - "dioxus", + "dioxus 0.7.1", ] [[package]] name = "dioxus-playwright-fullstack-test" version = "0.1.0" dependencies = [ - "dioxus", + "dioxus 0.7.1", "futures", "serde", "tokio", @@ -6245,7 +6600,7 @@ name = "dioxus-playwright-liveview-test" version = "0.0.1" dependencies = [ "axum 0.8.6", - "dioxus", + "dioxus 0.7.1", "dioxus-liveview", "tokio", ] @@ -6254,21 +6609,21 @@ dependencies = [ name = "dioxus-playwright-web-hash-routing-test" version = "0.0.1" dependencies = [ - "dioxus", + "dioxus 0.7.1", ] [[package]] name = "dioxus-playwright-web-routing-test" version = "0.0.1" dependencies = [ - "dioxus", + "dioxus 0.7.1", ] [[package]] name = "dioxus-playwright-web-test" version = "0.0.1" dependencies = [ - "dioxus", + "dioxus 0.7.1", "serde_json", "tracing", "tracing-wasm", @@ -6280,7 +6635,7 @@ dependencies = [ name = "dioxus-pwa-example" version = "0.1.0" dependencies = [ - "dioxus", + "dioxus 0.7.1", ] [[package]] @@ -6291,17 +6646,17 @@ dependencies = [ "base64 0.22.1", "ciborium", "criterion", - "dioxus", - "dioxus-cli-config", - "dioxus-core", - "dioxus-core-macro", - "dioxus-fullstack-core", - "dioxus-history", - "dioxus-hooks", - "dioxus-html", + "dioxus 0.7.1", + "dioxus-cli-config 0.7.1", + "dioxus-core 0.7.1", + "dioxus-core-macro 0.7.1", + "dioxus-fullstack-core 0.7.1", + "dioxus-history 0.7.1", + "dioxus-hooks 0.7.1", + "dioxus-html 0.7.1", "dioxus-router", "dioxus-router-macro", - "dioxus-signals", + "dioxus-signals 0.7.1", "dioxus-ssr", "percent-encoding", "rustversion", @@ -6317,7 +6672,7 @@ version = "0.7.1" dependencies = [ "base16", "digest", - "dioxus", + "dioxus 0.7.1", "proc-macro2", "quote", "sha2", @@ -6337,13 +6692,25 @@ dependencies = [ "syn 2.0.108", ] +[[package]] +name = "dioxus-rsx" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f026380dfda8b93ad995c0a90a62a17b8afeb246baff1b781a52c7b1b3ebd791" +dependencies = [ + "proc-macro2", + "proc-macro2-diagnostics", + "quote", + "syn 2.0.108", +] + [[package]] name = "dioxus-rsx-hotreload" version = "0.7.1" dependencies = [ - "dioxus-core", - "dioxus-core-types", - "dioxus-rsx", + "dioxus-core 0.7.1", + "dioxus-core-types 0.7.1", + "dioxus-rsx 0.7.1", "internment", "proc-macro2", "proc-macro2-diagnostics", @@ -6358,8 +6725,8 @@ version = "0.7.1" dependencies = [ "convert_case 0.8.0", "dioxus-autofmt", - "dioxus-html", - "dioxus-rsx", + "dioxus-html 0.7.1", + "dioxus-rsx 0.7.1", "html_parser", "htmlentity", "pretty_assertions", @@ -6380,26 +6747,26 @@ dependencies = [ "chrono", "ciborium", "dashmap 6.1.0", - "dioxus", - "dioxus-cli-config", - "dioxus-core", - "dioxus-core-macro", - "dioxus-devtools", - "dioxus-document", - "dioxus-fullstack-core", - "dioxus-history", - "dioxus-hooks", - "dioxus-html", - "dioxus-interpreter-js", - "dioxus-logger", + "dioxus 0.7.1", + "dioxus-cli-config 0.7.1", + "dioxus-core 0.7.1", + "dioxus-core-macro 0.7.1", + "dioxus-devtools 0.7.1", + "dioxus-document 0.7.1", + "dioxus-fullstack-core 0.7.1", + "dioxus-history 0.7.1", + "dioxus-hooks 0.7.1", + "dioxus-html 0.7.1", + "dioxus-interpreter-js 0.7.1", + "dioxus-logger 0.7.1", "dioxus-router", - "dioxus-signals", + "dioxus-signals 0.7.1", "dioxus-ssr", "enumset", "futures", "futures-channel", "futures-util", - "generational-box", + "generational-box 0.7.1", "http 1.3.1", "http-body-util", "hyper 1.7.0", @@ -6416,7 +6783,7 @@ dependencies = [ "serde", "serde_json", "serde_qs", - "subsecond", + "subsecond 0.7.1", "thiserror 2.0.17", "tokio", "tokio-tungstenite 0.27.0", @@ -6434,11 +6801,11 @@ dependencies = [ name = "dioxus-signals" version = "0.7.1" dependencies = [ - "dioxus", - "dioxus-core", + "dioxus 0.7.1", + "dioxus-core 0.7.1", "futures-channel", "futures-util", - "generational-box", + "generational-box 0.7.1", "parking_lot", "rand 0.9.2", "reqwest 0.12.24", @@ -6450,14 +6817,30 @@ dependencies = [ "warnings", ] +[[package]] +name = "dioxus-signals" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3895cc17ff5b43ada07743111be586e7a927ed7ec511457020e4235e13e63fe6" +dependencies = [ + "dioxus-core 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-channel", + "futures-util", + "generational-box 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "parking_lot", + "rustc-hash 2.1.1", + "tracing", + "warnings", +] + [[package]] name = "dioxus-ssr" version = "0.7.1" dependencies = [ "askama_escape", - "dioxus", - "dioxus-core", - "dioxus-core-types", + "dioxus 0.7.1", + "dioxus-core 0.7.1", + "dioxus-core-types 0.7.1", "rustc-hash 2.1.1", ] @@ -6465,10 +6848,21 @@ dependencies = [ name = "dioxus-stores" version = "0.7.1" dependencies = [ - "dioxus", - "dioxus-core", - "dioxus-signals", - "dioxus-stores-macro", + "dioxus 0.7.1", + "dioxus-core 0.7.1", + "dioxus-signals 0.7.1", + "dioxus-stores-macro 0.7.1", +] + +[[package]] +name = "dioxus-stores" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8521729ac35f362476ac4eb7d1c4ab79e7e92a0facfdea3ee978c0ddf7108d37" +dependencies = [ + "dioxus-core 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-signals 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-stores-macro 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -6476,8 +6870,20 @@ name = "dioxus-stores-macro" version = "0.7.1" dependencies = [ "convert_case 0.8.0", - "dioxus", - "dioxus-stores", + "dioxus 0.7.1", + "dioxus-stores 0.7.1", + "proc-macro2", + "quote", + "syn 2.0.108", +] + +[[package]] +name = "dioxus-stores-macro" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23a733d2684dc843e81954f6176b3353e4cfc71b6978a8e464591bb5536f610b" +dependencies = [ + "convert_case 0.8.0", "proc-macro2", "quote", "syn 2.0.108", @@ -6487,8 +6893,8 @@ dependencies = [ name = "dioxus-tailwind" version = "0.0.0" dependencies = [ - "dioxus", - "manganis", + "dioxus 0.7.1", + "manganis 0.7.1", ] [[package]] @@ -6496,26 +6902,26 @@ name = "dioxus-web" version = "0.7.1" dependencies = [ "ciborium", - "dioxus", - "dioxus-cli-config", - "dioxus-core", - "dioxus-core-types", - "dioxus-devtools", - "dioxus-document", - "dioxus-fullstack-core", - "dioxus-history", - "dioxus-html", - "dioxus-interpreter-js", - "dioxus-signals", + "dioxus 0.7.1", + "dioxus-cli-config 0.7.1", + "dioxus-core 0.7.1", + "dioxus-core-types 0.7.1", + "dioxus-devtools 0.7.1", + "dioxus-document 0.7.1", + "dioxus-fullstack-core 0.7.1", + "dioxus-history 0.7.1", + "dioxus-html 0.7.1", + "dioxus-interpreter-js 0.7.1", + "dioxus-signals 0.7.1", "dioxus-ssr", - "dioxus-web", + "dioxus-web 0.7.1", "futures-channel", "futures-util", - "generational-box", + "generational-box 0.7.1", "gloo-dialogs", "gloo-timers", "js-sys", - "lazy-js-bundle", + "lazy-js-bundle 0.7.1", "rustc-hash 2.1.1", "send_wrapper", "serde", @@ -6530,6 +6936,39 @@ dependencies = [ "web-sys", ] +[[package]] +name = "dioxus-web" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76155ecd44535e7c096ec8c5aac4a945899e47567ead4869babdaa74f3f9bca0" +dependencies = [ + "dioxus-cli-config 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-core 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-core-types 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-devtools 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-document 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-history 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-html 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-interpreter-js 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-signals 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-channel", + "futures-util", + "generational-box 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "gloo-timers", + "js-sys", + "lazy-js-bundle 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-hash 2.1.1", + "send_wrapper", + "serde", + "serde-wasm-bindgen", + "serde_json", + "tracing", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-streams", + "web-sys", +] + [[package]] name = "dircpy" version = "0.3.19" @@ -6770,7 +7209,7 @@ name = "ecommerce-site" version = "0.1.1" dependencies = [ "chrono", - "dioxus", + "dioxus 0.7.1", "reqwest 0.12.24", "serde", ] @@ -7284,7 +7723,7 @@ dependencies = [ name = "file-explorer" version = "0.1.0" dependencies = [ - "dioxus", + "dioxus 0.7.1", "open", ] @@ -7569,8 +8008,8 @@ dependencies = [ "axum_session", "axum_session_auth", "axum_session_sqlx", - "dioxus", - "dioxus-web", + "dioxus 0.7.1", + "dioxus-web 0.7.1", "execute", "http 1.3.1", "serde", @@ -7584,7 +8023,7 @@ dependencies = [ name = "fullstack-desktop-example" version = "0.1.0" dependencies = [ - "dioxus", + "dioxus 0.7.1", "serde", ] @@ -7593,7 +8032,7 @@ name = "fullstack-hackernews-example" version = "0.1.0" dependencies = [ "chrono", - "dioxus", + "dioxus 0.7.1", "reqwest 0.12.24", "serde", ] @@ -7603,7 +8042,7 @@ name = "fullstack-hello-world-example" version = "0.1.0" dependencies = [ "anyhow", - "dioxus", + "dioxus 0.7.1", "reqwest 0.12.24", "serde", "serde_json", @@ -7615,7 +8054,7 @@ name = "fullstack-router-example" version = "0.1.0" dependencies = [ "axum 0.8.6", - "dioxus", + "dioxus 0.7.1", "serde", "tokio", ] @@ -7853,6 +8292,16 @@ dependencies = [ "tracing", ] +[[package]] +name = "generational-box" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3c1ae09dfd2d455484a54b56129b9821241c4b0e412227806b6c3730cd18a29" +dependencies = [ + "parking_lot", + "tracing", +] + [[package]] name = "generic-array" version = "0.14.9" @@ -8820,14 +9269,14 @@ dependencies = [ name = "harness-default-to-non-default" version = "0.0.1" dependencies = [ - "dioxus", + "dioxus 0.7.1", ] [[package]] name = "harness-fullstack-desktop" version = "0.0.1" dependencies = [ - "dioxus", + "dioxus 0.7.1", ] [[package]] @@ -8835,7 +9284,7 @@ name = "harness-fullstack-desktop-with-default" version = "0.0.1" dependencies = [ "anyhow", - "dioxus", + "dioxus 0.7.1", ] [[package]] @@ -8843,28 +9292,28 @@ name = "harness-fullstack-desktop-with-features" version = "0.0.1" dependencies = [ "anyhow", - "dioxus", + "dioxus 0.7.1", ] [[package]] name = "harness-fullstack-multi-target" version = "0.0.1" dependencies = [ - "dioxus", + "dioxus 0.7.1", ] [[package]] name = "harness-fullstack-multi-target-no-default" version = "0.0.1" dependencies = [ - "dioxus", + "dioxus 0.7.1", ] [[package]] name = "harness-fullstack-with-optional-tokio" version = "0.0.1" dependencies = [ - "dioxus", + "dioxus 0.7.1", "serde", "tokio", ] @@ -8880,14 +9329,14 @@ dependencies = [ name = "harness-renderer-swap" version = "0.0.1" dependencies = [ - "dioxus", + "dioxus 0.7.1", ] [[package]] name = "harness-simple-dedicated-client" version = "0.0.1" dependencies = [ - "dioxus", + "dioxus 0.7.1", ] [[package]] @@ -8898,42 +9347,42 @@ version = "0.0.1" name = "harness-simple-desktop" version = "0.0.1" dependencies = [ - "dioxus", + "dioxus 0.7.1", ] [[package]] name = "harness-simple-fullstack" version = "0.0.1" dependencies = [ - "dioxus", + "dioxus 0.7.1", ] [[package]] name = "harness-simple-fullstack-native-with-default" version = "0.0.1" dependencies = [ - "dioxus", + "dioxus 0.7.1", ] [[package]] name = "harness-simple-fullstack-with-default" version = "0.0.1" dependencies = [ - "dioxus", + "dioxus 0.7.1", ] [[package]] name = "harness-simple-mobile" version = "0.0.1" dependencies = [ - "dioxus", + "dioxus 0.7.1", ] [[package]] name = "harness-simple-web" version = "0.0.1" dependencies = [ - "dioxus", + "dioxus 0.7.1", ] [[package]] @@ -9175,7 +9624,7 @@ name = "hotdog" version = "0.1.0" dependencies = [ "anyhow", - "dioxus", + "dioxus 0.7.1", "reqwest 0.12.24", "rusqlite", "serde", @@ -10448,6 +10897,12 @@ dependencies = [ name = "lazy-js-bundle" version = "0.7.1" +[[package]] +name = "lazy-js-bundle" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "409273b42d0e3ae7c8ce6b8cfbc6a27b7c7d83bbb94fc7f93f22cc9b90eea078" + [[package]] name = "lazy_static" version = "1.5.0" @@ -10945,8 +11400,19 @@ name = "manganis" version = "0.7.1" dependencies = [ "const-serialize 0.8.0", - "manganis-core 0.7.0", - "manganis-macro", + "manganis-core 0.7.1", + "manganis-macro 0.7.1", +] + +[[package]] +name = "manganis" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "124f8f094eb75783b38209ce4d534b9617da4efac652802d9bafe05043a3ec95" +dependencies = [ + "const-serialize 0.7.1", + "manganis-core 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "manganis-macro 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -10954,10 +11420,10 @@ name = "manganis-core" version = "0.7.1" dependencies = [ "const-serialize 0.8.0", - "dioxus", - "dioxus-cli-config", - "dioxus-core-types", - "manganis", + "dioxus 0.7.1", + "dioxus-cli-config 0.7.1", + "dioxus-core-types 0.7.1", + "manganis 0.7.1", "serde", ] @@ -10968,6 +11434,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "41fbd1fb8c5aabcc54c6b02dbc968e1c89c28f3e543f2789ef9e3ce45dbdf5df" dependencies = [ "const-serialize 0.7.1", + "dioxus-cli-config 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dioxus-core-types 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "serde", ] @@ -10977,8 +11445,22 @@ version = "0.7.1" dependencies = [ "dunce", "macro-string", - "manganis", - "manganis-core 0.7.0", + "manganis 0.7.1", + "manganis-core 0.7.1", + "proc-macro2", + "quote", + "syn 2.0.108", +] + +[[package]] +name = "manganis-macro" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45d6fec2a8249739bb30b53a08ecbb217f76096c08f1053f38ec3981ba424c11" +dependencies = [ + "dunce", + "macro-string", + "manganis-core 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "proc-macro2", "quote", "syn 2.0.108", @@ -11429,7 +11911,7 @@ dependencies = [ "blitz-paint", "blitz-traits", "bytemuck", - "dioxus", + "dioxus 0.7.1", "dioxus-native-dom", "futures-util", "pollster 0.4.0", @@ -11453,9 +11935,9 @@ dependencies = [ "bytes", "crossbeam-channel", "data-url 0.3.2", - "dioxus", - "dioxus-asset-resolver", - "dioxus-devtools", + "dioxus 0.7.1", + "dioxus-asset-resolver 0.7.1", + "dioxus-devtools 0.7.1", "dioxus-native-dom", "paste", "rustc-hash 1.1.0", @@ -11563,7 +12045,7 @@ dependencies = [ name = "nested-suspense" version = "0.1.0" dependencies = [ - "dioxus", + "dioxus 0.7.1", "serde", "tokio", ] @@ -16112,7 +16594,7 @@ dependencies = [ name = "ssr-only" version = "0.7.1" dependencies = [ - "dioxus", + "dioxus 0.7.1", ] [[package]] @@ -16437,7 +16919,26 @@ dependencies = [ "memfd", "memmap2", "serde", - "subsecond-types", + "subsecond-types 0.7.1", + "thiserror 2.0.17", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "subsecond" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "834e8caec50249083ee6972a2f7645c4baadcb39d49ea801da1dc1d5e1c2ccb9" +dependencies = [ + "js-sys", + "libc", + "libloading 0.8.9", + "memfd", + "memmap2", + "serde", + "subsecond-types 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "thiserror 2.0.17", "wasm-bindgen", "wasm-bindgen-futures", @@ -16450,12 +16951,21 @@ version = "0.1.0" dependencies = [ "cross-tls-crate", "cross-tls-crate-dylib", - "dioxus-devtools", + "dioxus-devtools 0.7.1", +] + +[[package]] +name = "subsecond-types" +version = "0.7.1" +dependencies = [ + "serde", ] [[package]] name = "subsecond-types" version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6beffea67e72a7a530990b270fd0277971eae564fdc10c1e0080e928b477fab" dependencies = [ "serde", ] @@ -16490,7 +17000,7 @@ name = "suspense-carousel" version = "0.7.1" dependencies = [ "async-std", - "dioxus", + "dioxus 0.7.1", "serde", ] @@ -18995,7 +19505,7 @@ version = "0.1.0" dependencies = [ "anyhow", "async-compression", - "dioxus", + "dioxus 0.7.1", "dioxus-router", "futures", "getrandom 0.3.4", @@ -19598,7 +20108,7 @@ version = "0.0.0" dependencies = [ "bytemuck", "color", - "dioxus", + "dioxus 0.7.1", "dioxus-native", "tracing-subscriber", "wgpu 26.0.1", diff --git a/packages/playwright-tests/cli-optimization.spec.js b/packages/playwright-tests/cli-optimization.spec.js index 28b83d84c7..6f3e18e1d6 100644 --- a/packages/playwright-tests/cli-optimization.spec.js +++ b/packages/playwright-tests/cli-optimization.spec.js @@ -1,59 +1,67 @@ // @ts-check const { test, expect } = require("@playwright/test"); -test("optimized scripts run", async ({ page }) => { - await page.goto("http://localhost:8989"); - - // // Expect the page to load the script after optimizations have been applied. The script - // // should add an editor to the page that shows a main function - // const main = page.locator("#main"); - // await expect(main).toContainText("hi"); - - // Expect the page to contain an image with the id "some_image" - const image = page.locator("#some_image"); - await expect(image).toBeVisible(); - // Get the image src - const src = await image.getAttribute("src"); - - // Expect the page to contain an image with the id "some_image_with_the_same_url" - const image2 = page.locator("#some_image_with_the_same_url"); - await expect(image2).toBeVisible(); - // Get the image src - const src2 = await image2.getAttribute("src"); - - // Expect the urls to be different - expect(src).not.toEqual(src2); - - // Expect the page to contain an image with the id "some_image_without_hash" - const image3 = page.locator("#some_image_without_hash"); - await expect(image3).toBeVisible(); - // Get the image src - const src3 = await image3.getAttribute("src"); - // Expect the src to be without a hash - expect(src3).toEqual("/assets/toasts.avif"); -}); - -test("unused external assets are bundled", async ({ page }) => { - await page.goto("http://localhost:8989"); - - // Assert http://localhost:8989/assets/toasts.png is found even though it is not used in the page - const response = await page.request.get( - "http://localhost:8989/assets/toasts.png" - ); - // Expect the response to be ok - expect(response.status()).toBe(200); - // make sure the response is an image - expect(response.headers()["content-type"]).toBe("image/png"); -}); - -test("assets are resolved", async ({ page }) => { - await page.goto("http://localhost:8989"); - - // Expect the page to contain an element with the id "resolved-data" - const resolvedData = page.locator("#resolved-data"); - await expect(resolvedData).toBeVisible(); - // Expect the element to contain the text "List: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]" - await expect(resolvedData).toContainText( - "List: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]" - ); -}); +const test_variants = [ + { port: 9090, name: "0.7.1" }, + { port: 8989, name: "current version" }, +]; + +for (let { port, name } of test_variants) { + test(`optimized scripts run in ${name}`, async ({ page }) => { + await page.goto(`http://localhost:${port}`); + + // // Expect the page to load the script after optimizations have been applied. The script + // // should add an editor to the page that shows a main function + // const main = page.locator("#main"); + // await expect(main).toContainText("hi"); + + // Expect the page to contain an image with the id "some_image" + const image = page.locator("#some_image"); + await expect(image).toBeVisible(); + // Get the image src + const src = await image.getAttribute("src"); + + // Expect the page to contain an image with the id "some_image_with_the_same_url" + const image2 = page.locator("#some_image_with_the_same_url"); + await expect(image2).toBeVisible(); + // Get the image src + const src2 = await image2.getAttribute("src"); + + // Expect the urls to be different + expect(src).not.toEqual(src2); + + // Expect the page to contain an image with the id "some_image_without_hash" + const image3 = page.locator("#some_image_without_hash"); + await expect(image3).toBeVisible(); + // Get the image src + const src3 = await image3.getAttribute("src"); + // Expect the src to be without a hash + expect(src3).toEqual("/assets/toasts.avif"); + }); + + test(`unused external assets are bundled in ${name}`, async ({ page }) => { + await page.goto(`http://localhost:${port}`); + + // Assert http://localhost:9090/assets/toasts.png is found even though it is not used in the page + const response = await page.request.get( + "http://localhost:9090/assets/toasts.png" + ); + // Expect the response to be ok + expect(response.status()).toBe(200); + // make sure the response is an image + expect(response.headers()["content-type"]).toBe("image/png"); + }); + + test(`assets are resolved in ${name}`, async ({ page }) => { + await page.goto(`http://localhost:${port}`); + + // Expect the page to contain an element with the id "resolved-data" + const resolvedData = page.locator("#resolved-data"); + await expect(resolvedData).toBeVisible(); + // Expect the element to contain the text "List: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]" + await expect(resolvedData).toContainText( + "List: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]" + ); + }); + +} diff --git a/packages/playwright-tests/cli-optimization/Cargo.toml b/packages/playwright-tests/cli-optimization/Cargo.toml index 29d7ae2e47..e7519c8abb 100644 --- a/packages/playwright-tests/cli-optimization/Cargo.toml +++ b/packages/playwright-tests/cli-optimization/Cargo.toml @@ -7,7 +7,9 @@ license = "MIT OR Apache-2.0" publish = false [dependencies] -dioxus = { workspace = true, features = ["web"] } +dioxus = { workspace = true, features = ["web"], optional = true } +# We test both if the current version of dioxus works and if the CLI can understand assets from the old asset format +dioxus_07 = { package = "dioxus", version = "=0.7.1", features = ["web"], optional = true } serde = { workspace = true, features = ["derive"] } serde_json.workspace = true @@ -15,3 +17,8 @@ serde_json.workspace = true # reqwest = { workspace = true, features = ["blocking"] } # flate2 = "1.1.2" # tar = "0.4.44" + +[features] +default = ["dioxus"] +dioxus = ["dep:dioxus"] +dioxus_07 = ["dep:dioxus_07"] diff --git a/packages/playwright-tests/cli-optimization/src/main.rs b/packages/playwright-tests/cli-optimization/src/main.rs index cd7f590c35..252c3946cd 100644 --- a/packages/playwright-tests/cli-optimization/src/main.rs +++ b/packages/playwright-tests/cli-optimization/src/main.rs @@ -1,5 +1,8 @@ // This test checks the CLI optimizes assets correctly without breaking them +#[cfg(feature = "dioxus_07")] +use dioxus_07 as dioxus; + use dioxus::prelude::*; const SOME_IMAGE: Asset = asset!("/images/toasts.png", AssetOptions::image().with_avif()); diff --git a/packages/playwright-tests/playwright.config.js b/packages/playwright-tests/playwright.config.js index c90c80df3f..7e394ce6ce 100644 --- a/packages/playwright-tests/playwright.config.js +++ b/packages/playwright-tests/playwright.config.js @@ -172,6 +172,16 @@ module.exports = defineConfig({ reuseExistingServer: !process.env.CI, stdout: "pipe", }, + { + cwd: path.join(process.cwd(), "cli-optimization"), + // Remove the cache folder for the cli-optimization build to force a full cache reset + command: + 'cargo run --package dioxus-cli --release -- run --addr "127.0.0.1" --port 9090 --no-default-features --features dioxus_07', + port: 9090, + timeout: 50 * 60 * 1000, + reuseExistingServer: !process.env.CI, + stdout: "pipe", + }, { cwd: path.join(process.cwd(), "wasm-split-harness"), command: From ca9089c2870242483ea81c04ec3fcb639e3c5381 Mon Sep 17 00:00:00 2001 From: Evan Almloff Date: Wed, 12 Nov 2025 12:45:41 -0600 Subject: [PATCH 14/20] more list/array cleanup and bump rust version --- packages/cli/src/build/assets.rs | 2 +- packages/const-serialize/Cargo.toml | 2 +- packages/const-serialize/src/array.rs | 8 +++--- packages/const-serialize/src/cbor.rs | 3 +- packages/const-serialize/src/lib.rs | 8 +++--- packages/const-serialize/src/list.rs | 22 +++++++-------- packages/const-serialize/src/primitive.rs | 3 ++ packages/const-serialize/src/str.rs | 34 +++++++++++++++++------ packages/dioxus/Cargo.toml | 2 +- packages/generational-box/Cargo.toml | 2 +- 10 files changed, 54 insertions(+), 32 deletions(-) diff --git a/packages/cli/src/build/assets.rs b/packages/cli/src/build/assets.rs index 13c58601e4..2b39a4da50 100644 --- a/packages/cli/src/build/assets.rs +++ b/packages/cli/src/build/assets.rs @@ -524,7 +524,7 @@ pub(crate) async fn extract_assets_from_file(path: impl AsRef) -> Result Self { Self { len, item_layout } } @@ -22,7 +22,7 @@ unsafe impl SerializeConst for [T; N] { } /// Serialize a constant sized array that is stored at the pointer passed in -pub(crate) const unsafe fn serialize_const_list( +pub(crate) const unsafe fn serialize_const_array( ptr: *const (), mut to: ConstVec, layout: &ArrayLayout, @@ -38,8 +38,8 @@ pub(crate) const unsafe fn serialize_const_list( to } -/// Deserialize a list type into the out buffer at the offset passed in. Returns a new version of the buffer with the data added. -pub(crate) const fn deserialize_const_list<'a>( +/// Deserialize an array type into the out buffer at the offset passed in. Returns a new version of the buffer with the data added. +pub(crate) const fn deserialize_const_array<'a>( from: &'a [u8], layout: &ArrayLayout, mut out: &mut [MaybeUninit], diff --git a/packages/const-serialize/src/cbor.rs b/packages/const-serialize/src/cbor.rs index ba28110530..30e284e8ef 100644 --- a/packages/const-serialize/src/cbor.rs +++ b/packages/const-serialize/src/cbor.rs @@ -190,6 +190,7 @@ const fn write_major_type_and_u64( /// This is the number stored in the additional information field if the number is more than 24. const fn log2_bytes_for_number(number: u64) -> u8 { let required_bytes = ((64 - number.leading_zeros()).div_ceil(8)) as u8; + #[allow(clippy::match_overlapping_arm)] match required_bytes { ..=1 => 0, ..=2 => 1, @@ -232,7 +233,7 @@ pub(crate) const fn take_str(bytes: &[u8]) -> Result<(&str, &[u8]), ()> { let Ok((bytes, rest)) = take_bytes_from(rest, additional_information) else { return Err(()); }; - let Ok(string) = str::from_utf8(bytes) else { + let Ok(string) = std::str::from_utf8(bytes) else { return Err(()); }; Ok((string, rest)) diff --git a/packages/const-serialize/src/lib.rs b/packages/const-serialize/src/lib.rs index 1e6608951d..fa89945ea5 100644 --- a/packages/const-serialize/src/lib.rs +++ b/packages/const-serialize/src/lib.rs @@ -74,8 +74,8 @@ const unsafe fn serialize_const_ptr( match layout { Layout::Enum(layout) => serialize_const_enum(ptr, to, layout), Layout::Struct(layout) => serialize_const_struct(ptr, to, layout), - Layout::Array(layout) => serialize_const_list(ptr, to, layout), - Layout::List(layout) => serialize_const_array(ptr, to, layout), + Layout::Array(layout) => serialize_const_array(ptr, to, layout), + Layout::List(layout) => serialize_const_list(ptr, to, layout), Layout::Primitive(layout) => serialize_const_primitive(ptr, to, layout), } } @@ -118,8 +118,8 @@ const fn deserialize_const_ptr<'a>( match layout { Layout::Enum(layout) => deserialize_const_enum(from, layout, out), Layout::Struct(layout) => deserialize_const_struct(from, layout, out), - Layout::Array(layout) => deserialize_const_list(from, layout, out), - Layout::List(layout) => deserialize_const_array(from, layout, out), + Layout::Array(layout) => deserialize_const_array(from, layout, out), + Layout::List(layout) => deserialize_const_list(from, layout, out), Layout::Primitive(layout) => deserialize_const_primitive(from, layout, out), } } diff --git a/packages/const-serialize/src/list.rs b/packages/const-serialize/src/list.rs index f5a26b8f2a..1b94a2100b 100644 --- a/packages/const-serialize/src/list.rs +++ b/packages/const-serialize/src/list.rs @@ -1,9 +1,9 @@ use crate::*; -/// The layout for a dynamically sized array. The array layout is just a length and an item layout. +/// The layout for a dynamically sized list. The list layout is just a length and an item layout. #[derive(Debug, Copy, Clone)] pub struct ListLayout { - /// The size of the struct backing the array + /// The size of the struct backing the list pub(crate) size: usize, /// The byte offset of the length field len_offset: usize, @@ -34,13 +34,13 @@ impl ListLayout { } } -/// Serialize a dynamically sized array that is stored at the pointer passed in -pub(crate) const unsafe fn serialize_const_array( +/// Serialize a dynamically sized list that is stored at the pointer passed in +pub(crate) const unsafe fn serialize_const_list( ptr: *const (), mut to: ConstVec, layout: &ListLayout, ) -> ConstVec { - // Read the length of the array + // Read the length of the list let len_ptr = ptr.wrapping_byte_offset(layout.len_offset as _); let len = layout.len_layout.read(len_ptr as *const u8) as usize; @@ -64,8 +64,8 @@ pub(crate) const unsafe fn serialize_const_array( to } -/// Deserialize a array type into the out buffer at the offset passed in. Returns a new version of the buffer with the data added. -pub(crate) const fn deserialize_const_array<'a>( +/// Deserialize a list type into the out buffer at the offset passed in. Returns a new version of the buffer with the data added. +pub(crate) const fn deserialize_const_list<'a>( from: &'a [u8], layout: &ListLayout, out: &mut [MaybeUninit], @@ -80,24 +80,24 @@ pub(crate) const fn deserialize_const_array<'a>( let Ok((bytes, new_from)) = take_bytes(from) else { return None; }; - // Write out the length of the array + // Write out the length of the list layout.len_layout.write(bytes.len() as u32, len_out); let Some((_, data_out)) = out.split_at_mut_checked(layout.data_offset) else { return None; }; let mut offset = 0; while offset < bytes.len() { - data_out[offset].write(bytes[offset]); + data_out[offset] = MaybeUninit::new(bytes[offset]); offset += 1; } Some(new_from) } - // Otherwise, serialize as an array of objects + // Otherwise, serialize as an list of objects else { let Ok((len, mut from)) = take_array(from) else { return None; }; - // Write out the length of the array + // Write out the length of the list layout.len_layout.write(len as u32, len_out); let Some((_, mut data_out)) = out.split_at_mut_checked(layout.data_offset) else { return None; diff --git a/packages/const-serialize/src/primitive.rs b/packages/const-serialize/src/primitive.rs index 5f0a6447b8..0c511c3887 100644 --- a/packages/const-serialize/src/primitive.rs +++ b/packages/const-serialize/src/primitive.rs @@ -14,6 +14,9 @@ impl PrimitiveLayout { } /// Read the value from the given pointer + /// + /// # Safety + /// The pointer must be valid for reads of `self.size` bytes. pub const unsafe fn read(self, byte_ptr: *const u8) -> u32 { let mut value = 0; let mut offset = 0; diff --git a/packages/const-serialize/src/str.rs b/packages/const-serialize/src/str.rs index 3d553eba85..f838e23505 100644 --- a/packages/const-serialize/src/str.rs +++ b/packages/const-serialize/src/str.rs @@ -60,7 +60,7 @@ impl ConstStr { let mut bytes = [MaybeUninit::uninit(); MAX_STR_SIZE]; let mut i = 0; while i < str_bytes.len() { - bytes[i].write(str_bytes[i]); + bytes[i] = MaybeUninit::new(str_bytes[i]); i += 1; } Self { @@ -69,12 +69,18 @@ impl ConstStr { } } - /// Get a reference to the string - pub const fn as_str(&self) -> &str { - let str_bytes = unsafe { + /// Get the bytes of the initialized portion of the string + const fn bytes(&self) -> &[u8] { + // Safety: All bytes up to the pointer are initialized + unsafe { &*(self.bytes.split_at(self.len as usize).0 as *const [MaybeUninit] as *const [u8]) - }; + } + } + + /// Get a reference to the string + pub const fn as_str(&self) -> &str { + let str_bytes = self.bytes(); match std::str::from_utf8(str_bytes) { Ok(s) => s, Err(_) => panic!( @@ -115,7 +121,7 @@ impl ConstStr { let new_len = len as usize + str_bytes.len(); let mut i = 0; while i < str_bytes.len() { - bytes[len as usize + i].write(str_bytes[i]); + bytes[len as usize + i] = MaybeUninit::new(str_bytes[i]); i += 1; } Self { @@ -126,7 +132,19 @@ impl ConstStr { /// Split the string at a byte index. The byte index must be a char boundary pub const fn split_at(self, index: usize) -> (Self, Self) { - let (left, right) = self.as_str().split_at(index); + let (left, right) = self.bytes().split_at(index); + let left = match std::str::from_utf8(left) { + Ok(s) => s, + Err(_) => { + panic!("Invalid utf8; you cannot split at a byte that is not a char boundary") + } + }; + let right = match std::str::from_utf8(right) { + Ok(s) => s, + Err(_) => { + panic!("Invalid utf8; you cannot split at a byte that is not a char boundary") + } + }; (Self::new(left), Self::new(right)) } @@ -239,7 +257,7 @@ impl Eq for ConstStr {} impl PartialOrd for ConstStr { fn partial_cmp(&self, other: &Self) -> Option { - self.as_str().partial_cmp(other.as_str()) + Some(self.cmp(other)) } } diff --git a/packages/dioxus/Cargo.toml b/packages/dioxus/Cargo.toml index 8644885374..94e605feec 100644 --- a/packages/dioxus/Cargo.toml +++ b/packages/dioxus/Cargo.toml @@ -8,7 +8,7 @@ license = "MIT OR Apache-2.0" repository = "https://github.com/DioxusLabs/dioxus/" homepage = "https://dioxuslabs.com" keywords = ["web", "desktop", "mobile", "gui", "wasm"] -rust-version = "1.80.0" +rust-version = "1.83.0" [dependencies] dioxus-core = { workspace = true } diff --git a/packages/generational-box/Cargo.toml b/packages/generational-box/Cargo.toml index 686c8c7c6c..3fb9180dbc 100644 --- a/packages/generational-box/Cargo.toml +++ b/packages/generational-box/Cargo.toml @@ -7,7 +7,7 @@ description = "A box backed by a generational runtime" license = "MIT OR Apache-2.0" repository = "https://github.com/DioxusLabs/dioxus/" keywords = ["generational", "box", "memory", "allocator"] -rust-version = "1.80.0" +rust-version = "1.83.0" [dependencies] parking_lot = { workspace = true } From 7395459f42f96eb5dba2eda238143632487c8423 Mon Sep 17 00:00:00 2001 From: Evan Almloff Date: Wed, 12 Nov 2025 12:53:20 -0600 Subject: [PATCH 15/20] fix clippy --- packages/const-serialize/src/cbor.rs | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/packages/const-serialize/src/cbor.rs b/packages/const-serialize/src/cbor.rs index 30e284e8ef..bc37cc1759 100644 --- a/packages/const-serialize/src/cbor.rs +++ b/packages/const-serialize/src/cbor.rs @@ -14,8 +14,8 @@ //! small number or how many of the next bytes are part of the first number. //! //! Resources: -//! The spec: https://www.rfc-editor.org/rfc/rfc8949.html -//! A playground to check examples against: https://cbor.me/ +//! The spec: +//! A playground to check examples against: use crate::ConstVec; @@ -462,17 +462,13 @@ mod tests { #[test] fn test_parse_byte() { for byte in 0..=255 { - let bytes = if byte < 24 { - [byte | 0b00000000, 0] - } else { - [0b00000000 | 24, byte] - }; + let bytes = if byte < 24 { [byte, 0] } else { [24, byte] }; let (item, _) = take_number(&bytes).unwrap(); assert_eq!(item, byte as _); } for byte in 1..=255 { let bytes = if byte < 24 { - [byte - 1 | 0b0010_0000, 0] + [(byte - 1) | 0b0010_0000, 0] } else { [0b0010_0000 | 24, byte - 1] }; From 68cdb26909239b04e24f8ca962bea66b0dac25ec Mon Sep 17 00:00:00 2001 From: Evan Almloff Date: Thu, 13 Nov 2025 11:27:57 -0600 Subject: [PATCH 16/20] switch cli opt port --- .../playwright-tests/cli-optimization.spec.js | 6 ++--- packages/playwright-tests/package-lock.json | 24 +++++++++---------- packages/playwright-tests/package.json | 2 +- .../playwright-tests/playwright.config.js | 4 ++-- 4 files changed, 18 insertions(+), 18 deletions(-) diff --git a/packages/playwright-tests/cli-optimization.spec.js b/packages/playwright-tests/cli-optimization.spec.js index 6f3e18e1d6..48e6a06f3a 100644 --- a/packages/playwright-tests/cli-optimization.spec.js +++ b/packages/playwright-tests/cli-optimization.spec.js @@ -2,7 +2,7 @@ const { test, expect } = require("@playwright/test"); const test_variants = [ - { port: 9090, name: "0.7.1" }, + { port: 9191, name: "0.7.1" }, { port: 8989, name: "current version" }, ]; @@ -42,9 +42,9 @@ for (let { port, name } of test_variants) { test(`unused external assets are bundled in ${name}`, async ({ page }) => { await page.goto(`http://localhost:${port}`); - // Assert http://localhost:9090/assets/toasts.png is found even though it is not used in the page + // Assert http://localhost:{port}/assets/toasts.png is found even though it is not used in the page const response = await page.request.get( - "http://localhost:9090/assets/toasts.png" + `http://localhost:${port}/assets/toasts.png` ); // Expect the response to be ok expect(response.status()).toBe(200); diff --git a/packages/playwright-tests/package-lock.json b/packages/playwright-tests/package-lock.json index 69bd204d00..f7e3523c8d 100644 --- a/packages/playwright-tests/package-lock.json +++ b/packages/playwright-tests/package-lock.json @@ -9,17 +9,17 @@ "version": "1.0.0", "license": "ISC", "devDependencies": { - "@playwright/test": "^1.53.1" + "@playwright/test": "^1.56.1" } }, "node_modules/@playwright/test": { - "version": "1.53.1", - "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.53.1.tgz", - "integrity": "sha512-Z4c23LHV0muZ8hfv4jw6HngPJkbbtZxTkxPNIg7cJcTc9C28N/p2q7g3JZS2SiKBBHJ3uM1dgDye66bB7LEk5w==", + "version": "1.56.1", + "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.56.1.tgz", + "integrity": "sha512-vSMYtL/zOcFpvJCW71Q/OEGQb7KYBPAdKh35WNSkaZA75JlAO8ED8UN6GUNTm3drWomcbcqRPFqQbLae8yBTdg==", "dev": true, "license": "Apache-2.0", "dependencies": { - "playwright": "1.53.1" + "playwright": "1.56.1" }, "bin": { "playwright": "cli.js" @@ -44,13 +44,13 @@ } }, "node_modules/playwright": { - "version": "1.53.1", - "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.53.1.tgz", - "integrity": "sha512-LJ13YLr/ocweuwxyGf1XNFWIU4M2zUSo149Qbp+A4cpwDjsxRPj7k6H25LBrEHiEwxvRbD8HdwvQmRMSvquhYw==", + "version": "1.56.1", + "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.56.1.tgz", + "integrity": "sha512-aFi5B0WovBHTEvpM3DzXTUaeN6eN0qWnTkKx4NQaH4Wvcmc153PdaY2UBdSYKaGYw+UyWXSVyxDUg5DoPEttjw==", "dev": true, "license": "Apache-2.0", "dependencies": { - "playwright-core": "1.53.1" + "playwright-core": "1.56.1" }, "bin": { "playwright": "cli.js" @@ -63,9 +63,9 @@ } }, "node_modules/playwright-core": { - "version": "1.53.1", - "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.53.1.tgz", - "integrity": "sha512-Z46Oq7tLAyT0lGoFx4DOuB1IA9D1TPj0QkYxpPVUnGDqHHvDpCftu1J2hM2PiWsNMoZh8+LQaarAWcDfPBc6zg==", + "version": "1.56.1", + "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.56.1.tgz", + "integrity": "sha512-hutraynyn31F+Bifme+Ps9Vq59hKuUCz7H1kDOcBs+2oGguKkWTU50bBWrtz34OUWmIwpBTWDxaRPXrIXkgvmQ==", "dev": true, "license": "Apache-2.0", "bin": { diff --git a/packages/playwright-tests/package.json b/packages/playwright-tests/package.json index 595cfc4c61..b52c1fdee7 100644 --- a/packages/playwright-tests/package.json +++ b/packages/playwright-tests/package.json @@ -12,6 +12,6 @@ "author": "", "license": "ISC", "devDependencies": { - "@playwright/test": "^1.53.1" + "@playwright/test": "^1.56.1" } } diff --git a/packages/playwright-tests/playwright.config.js b/packages/playwright-tests/playwright.config.js index 7e394ce6ce..e8f8f99790 100644 --- a/packages/playwright-tests/playwright.config.js +++ b/packages/playwright-tests/playwright.config.js @@ -176,8 +176,8 @@ module.exports = defineConfig({ cwd: path.join(process.cwd(), "cli-optimization"), // Remove the cache folder for the cli-optimization build to force a full cache reset command: - 'cargo run --package dioxus-cli --release -- run --addr "127.0.0.1" --port 9090 --no-default-features --features dioxus_07', - port: 9090, + 'cargo run --package dioxus-cli --release -- run --addr "127.0.0.1" --port 9191 --no-default-features --features dioxus_07', + port: 9191, timeout: 50 * 60 * 1000, reuseExistingServer: !process.env.CI, stdout: "pipe", From 46fe5277dfa3a606c3fde2261f88335f62b214b2 Mon Sep 17 00:00:00 2001 From: Evan Almloff Date: Thu, 13 Nov 2025 11:49:43 -0600 Subject: [PATCH 17/20] install playwright deps --- .github/workflows/main.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 215ee2015c..2bd1c235eb 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -261,6 +261,7 @@ jobs: run: | npm ci npm install -D @playwright/test + sudo npx playwright install-deps npx playwright install npx playwright test - uses: actions/upload-artifact@v4 From 03d81fcc778fdf6759bb04a356904fc8ec0e24ed Mon Sep 17 00:00:00 2001 From: ealmloff Date: Mon, 17 Nov 2025 13:05:52 -0600 Subject: [PATCH 18/20] split cli-optimization package to avoid two dioxus' in the same crate --- Cargo.lock | 8 ++ Cargo.toml | 1 + .../cli-optimization-07/.gitignore | 4 + .../cli-optimization-07/Cargo.toml | 18 ++++ .../cli-optimization-07/assets/data.json | 3 + .../cli-optimization-07/build.rs | 16 +++ .../cli-optimization-07/images/toasts.png | Bin 0 -> 20366 bytes .../cli-optimization-07/src/main.rs | 96 ++++++++++++++++++ .../cli-optimization/Cargo.toml | 8 +- .../playwright-tests/playwright.config.js | 4 +- 10 files changed, 149 insertions(+), 9 deletions(-) create mode 100644 packages/playwright-tests/cli-optimization-07/.gitignore create mode 100644 packages/playwright-tests/cli-optimization-07/Cargo.toml create mode 100644 packages/playwright-tests/cli-optimization-07/assets/data.json create mode 100644 packages/playwright-tests/cli-optimization-07/build.rs create mode 100644 packages/playwright-tests/cli-optimization-07/images/toasts.png create mode 100644 packages/playwright-tests/cli-optimization-07/src/main.rs diff --git a/Cargo.lock b/Cargo.lock index aec6959143..594d28618a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5693,6 +5693,14 @@ name = "dioxus-cli-optimization-test" version = "0.0.1" dependencies = [ "dioxus 0.7.1", + "serde", + "serde_json", +] + +[[package]] +name = "dioxus-cli-optimization-test-07" +version = "0.0.1" +dependencies = [ "dioxus 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "serde", "serde_json", diff --git a/Cargo.toml b/Cargo.toml index daa27cecd5..0651d1ce3c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -136,6 +136,7 @@ members = [ "packages/playwright-tests/suspense-carousel", "packages/playwright-tests/nested-suspense", "packages/playwright-tests/cli-optimization", + "packages/playwright-tests/cli-optimization-07", "packages/playwright-tests/wasm-split-harness", "packages/playwright-tests/default-features-disabled", "packages/playwright-tests/fullstack-error-codes", diff --git a/packages/playwright-tests/cli-optimization-07/.gitignore b/packages/playwright-tests/cli-optimization-07/.gitignore new file mode 100644 index 0000000000..0fdbab7369 --- /dev/null +++ b/packages/playwright-tests/cli-optimization-07/.gitignore @@ -0,0 +1,4 @@ +dist +target +monaco-editor +partial-monaco-editor \ No newline at end of file diff --git a/packages/playwright-tests/cli-optimization-07/Cargo.toml b/packages/playwright-tests/cli-optimization-07/Cargo.toml new file mode 100644 index 0000000000..3e47929b99 --- /dev/null +++ b/packages/playwright-tests/cli-optimization-07/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "dioxus-cli-optimization-test-07" +version = "0.0.1" +edition = "2021" +description = "Playwright test for Dioxus CLI optimization" +license = "MIT OR Apache-2.0" +publish = false + +[dependencies] +# We test both if the current version of dioxus works and if the CLI can understand assets from the old asset format +dioxus = { version = "=0.7.1", features = ["web"] } +serde = { workspace = true, features = ["derive"] } +serde_json.workspace = true + +# [build-dependencies] +# reqwest = { workspace = true, features = ["blocking"] } +# flate2 = "1.1.2" +# tar = "0.4.44" diff --git a/packages/playwright-tests/cli-optimization-07/assets/data.json b/packages/playwright-tests/cli-optimization-07/assets/data.json new file mode 100644 index 0000000000..f28e5498c8 --- /dev/null +++ b/packages/playwright-tests/cli-optimization-07/assets/data.json @@ -0,0 +1,3 @@ +{ + "list": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] +} diff --git a/packages/playwright-tests/cli-optimization-07/build.rs b/packages/playwright-tests/cli-optimization-07/build.rs new file mode 100644 index 0000000000..bc050c1025 --- /dev/null +++ b/packages/playwright-tests/cli-optimization-07/build.rs @@ -0,0 +1,16 @@ +fn main() { + // use std::path::PathBuf; + + // // If the monaco editor folder doesn't exist, download it + // let monaco_path = PathBuf::from("monaco-editor"); + // if monaco_path.exists() { + // return; + // } + + // let url = "https://registry.npmjs.org/monaco-editor/-/monaco-editor-0.52.2.tgz"; + // let bytes = reqwest::blocking::get(url).unwrap().bytes().unwrap(); + // let mut archive = tar::Archive::new(flate2::read::GzDecoder::new(bytes.as_ref())); + // let monaco_path_partial = PathBuf::from("partial-monaco-editor"); + // archive.unpack(&monaco_path_partial).unwrap(); + // std::fs::rename(monaco_path_partial, monaco_path).unwrap(); +} diff --git a/packages/playwright-tests/cli-optimization-07/images/toasts.png b/packages/playwright-tests/cli-optimization-07/images/toasts.png new file mode 100644 index 0000000000000000000000000000000000000000..382298f9c7c658635ad6e2635e98ba265148cace GIT binary patch literal 20366 zcmdSAbx@p9)aDr>f#3w!#$5vhhv313yF+kopz#jw5ZocS1a~JmG|;$vkjC8`8S>3+ z?NrUq?(BZGTeW|@)zznOS6!*M?m6f8JYnAyrO}XykYBxeg(mY=Lgm$~*FrCkCDQws z2+ch6!pp}8$FJJJSFcd8|2?l?rDYJj1YZMHq{UuUKuPvqz93kLDu}*%RUL!!VEpdY zD_wZv&hx)IeVuY-9%K}s8Oq3zr2 zgOP;0y{3m_l=pEZnK+%C^$W;Gs)?n&4;dn~VOX@BGEs=^L)5rqUwJnnk#t`hen%F~ zesnWov!%Ax@*P~f;9D7+X)f=$xo)=(W4gcl);Qk2;&Y!d1@ngU!CXX?Mn_UP5hHWy zWu=<>>BDo)kZ^@0l|KcAXXVnTib*OdC&=gI7SYQVew$Fmp;0Z~WeAUBP`kvD9KvDr zd>M}XpB?U0PWZYX`~duZ4)$KwgHJ3ej0Tnz+Q8}l%}J#{24G=LC(j;lJ&;}}qkoCn zI`n+TB&Cc~pu(!Z-!JupQ>gvNj_mPI9fnPr_sNFDXWq)4vhk<;nR42Sj8%1xXKJFDryRs^8e}zIVjN)C)atFo>w15 zjz=6u>pJyR96Ni!MkAx~{+%a+-*F_B!&T~Yd5%X~3!n3NQv77*QcGL`3{7im8OJpQ&?VDSviIp(sW*=-Km{#&C+fI1sfGU zS!y18-8@K-TBWb7WqOS1fu4kF_mXZ#II}(HmU(m5ykTa0mzK;IuoVkDHqUWxqU`?u z_q7X8{S9SP=?D3Vom)a|Ww4zcfe}YhF6ZpkjSqvCxY@YZ zJLNb<)+|EBLKDV?B#$;1Ps?oSa#wTdYZdb*-vP_k9X>tjJ3Hg4jQ*Ms$Xou43L*Gt z3-|Bkm3Rt0uK_vxB^@~NcvZv6yj2adtIOP>m!1V_MNk|b-QlQp(kQl?T$WV)b@b-W zb)mq@S|5b1XRW2BM2bOiMx=;mJLn{RsPk6$2J|d#&qbHyJ@v41hdya&anUF2Y#<~369`TB$|jBj2;8^9B~DI#h3Aty7h={Tw8;+} z*LD(35jt6^qw<0BzGQPjJotd>8uM(+#c9I3jVa70R`Ym%dW{Cl1-|TQ3ZS_%!QWvw z`^c>*=i!d65SO@VNgp5mV|SS`HcdTYjlk{LiLZKho99f-=u5f*(ARkRr7S`@P;}r) z+GWSxT^U>Y>Y3`uy#N`fhz~g2<7oZ{B8w>Zy{Est;&oVkBR$Ea8|Y@MOfb-*d`{GA z`mobv;UiCFH)*mlYNw=?UFOj>4E-n??S{%Hu*Z&b!1O}eG!x716$m|ffIR!t*lzaJ zbNXi5QHGg)Cp*9iK5+)nx^=WAGh2IW*X2#tLyu*3EqwQFQ7BmB80}~Y}FTPIGRslcihIAvRR4l{k_D5G2ltJ?x{5%T|+P0 zo24-eA(N2ym=tH@znTbpR!hZwkfnxAs~ZtCDTJ6c0-a?CVrz02v}?t-UbQy^lVQos zx~T%m7^KzxPf7#Ep+g=Z-C^v%J0ZLBTPRpb?+~veD%vI~=3^-SHhi0w{2EUle;#7j z9!n?upx{&2fTt7?&FYa*a|`QFGX?}#<$J02&@KBS*|@B7qj#d4`2}1SRBZ7(_ryLz z%xBVEujfM)lx`S6Q%(vCIf4RP5}hvr|m_9 zqx1)A?Yk82hRxi!@C_bcv$X6LYwJTw#5S;@c{d$i<}#KQuN@*rb7w*PP4I;m*DSi1 zg)m7)9E*Y0oe3gSZdsFSATUhWMTdVt-}o(hS!~+!b@48#@VUQS$Yzy6(C*JD?^;0S zkBxShj^@?VdV-oNj=Yi~w06w0f?+b?o#41z@ou$NM4U9Z0>=i!IG%!w@>7pCFDM;a zA$rEhEpc&Rl%6c?x-GKPr9i8xnck3=f4JH@R=4oUP0P3^H|~;u(a`zM&Nd~hHA$;T zc@X;w>XE9Hv4dy7ga`~A{z(59S1FK1bQoctt1uTIdaqig(T z?!bMFdllgB3+6tyTN`P^CZuYju>+GUux$Qxc5Mf0*GWa5Fev4tvgWq>Wa z(-1`HKYmdETBN-wn~0x|SToyq1r9D^@s#yYtKaoGn$noQujGhnyLY=t$#i!p^uL%zD zBes)}9X>s|p1&(m={lP-9bB>tX)XApg8Q4vs{J>4wBXBq^Ef%$FO>Ajd^zO1#t|W~ zf#;d?46r!T%cIofqdn8kp4XTEx<{$_W>x+d1>|&lFxDvL0E5D8J7GhoKndCy2)n0a z>eu?VMa5_<%#?&yL0fsLVV{R_66Y{9CF}5Ru@NVtYu~hr;G5~^#T*LkYiC;q8>gq` zTVT92vm%zrV8Tl^X!#~5`5Dh$k5 zIr2``9IOuT8Ik`L zWq>&c*kHqh2jPrPIvWI+ko?^sVz(q6md_&cc5)wMsuTmhRqa+(`U#1E$cSs)%)$MANkhjCaX~ms z`%;hk10XBQ1Um0zca)!#eGgJhbHaxmQ zC;n_#(lk*s_*0j;nsl84zR*;CnlAvfraR2<3=P|2LymtuWP)?(>01vcyVyr;FuD3k z29QtXRe{Wk^+l{966KDhq$1t-R@_v;gvW9&q8yLKQK?{C;8wNr}6>6 zTZ__Ef-e*2*IFCE-3h}?K>ueaLhfe(%E`^@+%u&4mw!yr@Tn_LtZ8;E6 z_9Qb440@gt-Te}DJy6pX?)b(;K%&GYeg_4jKsD5CUZ7#XC-)Xgm?_;$)GhX&%v8qu zOe*$)wglyxMWaGxX?Qd_J4vs$`swp98bHpWqSiX3QHIoC5k4$E`ZczWqVO*COLLUS z-c!$!S`VM_P&L(AN|0b+^i`BO@_7bH=N*dHwu+*!+)?`Xg*+?svzd04$S|HyI0YZ8 z99M_|Zi;r#tD-Pmq}Sr!xPi90xjg+%A{p3B=A>oFu-=-7UyQRPF-r(BiwM7ek64*( zl&qU4mu60>;z(jmW=u_4TK?xUb(4|MtR{HH_d&ygqq=$z^svd%CNSIMk5ryu;c1gvo+$qPx=$auH~u%uqCPO z*Ri=`UG2$ft9yM$>o9TTewSFRz2#RGYA0zz`&Ii{+$S@2#kmnhUV#LutZ#eRzim^s zLd|4B7F4Bf?F@gl`>_F7teuAHl1hN|7+$~KlAN7Vr5MPJL>T?!2m#$6% zV;kT5k4JJzg$b*NeqXX|SlqzPOn4vkZG(FbgW+o+s~BrdCcE!pXE#dTAm8~QdKc+7o{Lq6IP|~Y*J=_ykpKZgyYJC5; zcDfZ^$rI@#=^KX*XME|A4{G8IgMhWYZ+3PeA2TCr?#)GiE-uAjB(0HrYWK0wr;A@h zNENxf08Z<0xbO~PWF|zrQ$`j=U_RGu_APU--(JlkC$~_pRf4t!;`?N8SdMlIY4Iaw zFiuCglXk81MSa*B=7foyMuYO|-ng7Bv+eBj9^c=YN6zwf{lH`KCU3;x2}L7r^h{EZ ziZEx29X=I{Djv|E(j(-t8|@=;yE@;p1_@fJO8a6`mYDjbGhU^GeXR8aW3E$>K#O2m0t1>RdKxLu|k-AWHPU>U-FRxC~ zjFG+s({_;GT)M!H=O5TMkbqVo#M#VHXA)n;*MVR7^|%~3)BHCm{3*CTEAo`rT42cF zpT)BKy~$zuz#ARmdM>`|`N7g|%CuUvo3G`o=~WlRf@{{k&UYbND>jDKOA%kZQRtaS za`^af(`U4LMXYpk+S81-wPs%@<-L?Qp|gBfz;2ZAV??r$j*eA9CTx_bgMeWVD1|Y5 z-W{p@iQ2J4G*zeXGjHv}lPfnuugDc-hCpLP-9#C%`5Z5k8_Np{UjP~LbDMHe5q6-Uon$*U0##w@sr${(=&)|gWRgEtA9M$Opvhtdi5sk z*J!mBlGB*ps!92m&J157VIQ~R=v%3f{h-j_Kq%=bkRl_<`oha=DzkYl#rG?t=ch4Q zq7Lr8MZJM_;b~>PKaMm$rEU}fQlbG%?V!#B)bkuHPQ6s!?nQcpy?4L!HzewgUdd0# zBowTRckm6g3{p2TPyf(*l_<oNzT@9H!~C0%7e$WprtCeX ztP3kdTE~f5`$?_9 z>1&kNZ+a^EW4`_d@h(U$Y5%!p>@z`>3dz5?DN-SlS?CyNWLtZU*@7%?8&hwJJDP%W z|EHB$58Z7@Zqk3>f#X4r%*P%Pi+oq~dq$oJD(R8sXt29SC*!G^n`wrH+phd0MEfS@ zJfN@)T{fh3QJa`Q=PNb>(>)Z}ylx;*hO88~iU*6Oy9*eTHGk>hnUVz-5V7RH6jz)# z2XCZiIA|aC&Mw;pl>v6kZFqst{0*B^%>04t?!FcR@Vli(iH-}}FVo2+V``Fy!1Jld z+D{KhP2U*b4FIEkHhxP#am3L<7_9p5qOk!#yk*<*Wt$&)kli1L*cub`uNjg$>7DNS z{w8p3@!z@PwcAe|eA|6^^6e|5h754D`Szckw`i6$P{D7nj$un+SLO=)1&tBD06oqU zDsz`&>xsOasrBAFO0*t)z}fn1TORIyzLA7}xd2ly&@L+1enuSV3o%gPwxJhng3B~4ZjJE<@zep+bk2;AO; z$EWi+L<@v8{rL!RJnt5%l%jE6BphYK*#6s_$RF!zYnPds48CS_BW(zbR@uz>>5NWp z=-@gaa`TzQ;&;N^F~&pe%tA}2lgZWC#!eemv?(BrWebUGUkj&loilAZaoX}*{n*|F zftA_wbNqp=IEFz#J}d$WBWlj0)lMYK3o#kLajs&|$#=8{C2<(UXxnHH= zuU0p6Ry5}BP?5@I_%~DzIb>{i{5cUw$H5ovT{|TeuE7Ago^SD6=5t<%c~s+1=P7>1 z$}~}AeC!;;xSAw%4PVgLdNn_XM^Ptcoe@EAj~YtH+K8#=gb>ahaC@gHkok7vjiSKj z`EJJd9lky06CaZ@3WHgg=Q)VRADGLu<|LU>s~&1lWC_0@Ez<~Z`W-H(OXyL09~H?9 zi*VV=_?>s(EzY|zyOiF+=~rAkP|TC!3=R8vymS#s>=_XcAsd?l?rj#)Ijaz^xbfB< zo+?ROP(7p0@Udk&C)QVqE{jDaO+k52e+2LvD@3${+j@FE))^$~q9INV{2wn3Uen>r zewhi+8fldv)q{K+IWq109^49t2~_3#fkJw+d0`3mUbpEYH|nyM`J3zUV<(QjWC0W# zD{huVP6XPof}^*(Kq?H~@qyTG#rT>^eb5hD@}C@<>;|SPu(L2j(z8U`kF!+G`Hsp0 zJI3E>u%5)e3uoWdwaUdukHr(6H zV3rsR?1@lowKpzJBNGnlYCmHB zl57}pCVh}Z@7>65A2j+@cg@1HU$s1F>;%@lSLw@GkXqk8tgxdUta9-~|o$n7&Z-`@_Y>wL>B}Y3b&nJmNT_Wyr(r z(0Y;)7v@5Qw*p&m^KVZmz$>R)!`iL2 z8$<2Bnj;WBW{D$f#5RP32nQ%2120dkdNP+NrhJmywG;uxHI+@mpqb%@K+gl+kGL z0kLr=lpjm|A6UUwegOi=|H};Ge+Fer!dL(F)&2t{X)@}Utl1tbFHk3cDDZ5r(Q=hqiqq@ZBD|vK z1vgXWywaJPaU6MB2z3y>Wj!`+icI}pFgS2p(&2m)?fbcA;XmXe+w4-Z<_k@K^7G|1 zS1Ff?R|{#ZH%u$^e_>qK83LdQy*8JHe3{Z*9;dFWmu;j`5atDU{R<4}y{hw|t9Z;rf&@Lk%c0@99S&YjT}!h9H|MYWhCKgCNvC@UUMulM z!;xeVoe5uU;yg)vidi*!PpfZHIP3E=2n=jl6Py^gSQU17VN0?q^*u{AYr=a^&yJHg zz_W(26r27i;`5T1<5@kz&>L%Wn|;}agnzK*gTuq3UdqJZC*K>@Y;J%@fzP0@VaH5L zVkf@4(d|O3RLPJ2;P*FR2gB^WS^!P-Rj-z%S z%J_B)<114RRy~CrvOh0Lc~vZGY1~^8_yVDL7I)$Axx(?|IQN zkeJT@P++I(O(gbPyTrcqJ~najr56a9=yU@cmUgZ_xb@)yhs)7+1V9E{;8Or=n$oO+ zvw!k5Wa)+K(Ah3GYnE$U95*J79t**jKO;Due;xf)KJU_L zYu{6}*v1WD@R!1E;}D-gcL-u&^Il{5o&kTzPI-!1wp3)kE4*vv>9EjviUFklZ&)db zYvk>0sTGQVM+oVx6XFf=QEm_h|7)||R+k0S<~0n`2T{j1Aq{3KLe{ZNfdL;2gON2> zBP?daj@~e)T(52&O)uM`^N_Eoe96+xRlDqvwW1DT%+uYw+&zg43BK=vxjZl3d7q*H zRs3ZFqZh(={Lr_bN1&o99Zfd8qV?mFkX8zu%-@Y1GGo)RAG}81b8Jc4KU4ch7*(=a zc*2uejSMQiBGXa;QSdcuLWv}Rlu&I$zFp-ay1J!!&E?;Wj#>CDTt;V`%&wiK-0i_t zM{EWzt$dPPN>_i#s4(nuGHS6H!4K3@mcq#;-d z^%RG@V|-`HcCN0fa-P;%w}HVc7IpiE;ZbpjradV`-dKt|)#jO!&cXiW&FJi0*3jeig?3EJ4NTYs zBFe8rjyje$*J|LD86n_FW3(YN&;s5Nx2LBj&5&`Fhy}`6^mUs740p}G9+jb{*7wj^-yH z-{bb$wmy><%ku3FogS)61&KPFC(^2>J=&A+Z4@AN$D%QfsCDr5$(m-?DMw&l?0phZ zW}NX5n}7kcmw}W7!p1M`sSkuw6C8KsT2J}+*GB>?!bng?8~qxLH{T3-ER885IS(P9 zUTQwE?q80mo zzDGAOr6rCZwfDS9u5zLr2QVgQXw(_y=C0V>v{SS8kbuXF0GV6yfe4&1fD{W_O7=I7 z@C(t9R~Lr+nY|WLrYr5TYFqcD_f=a9> z;EP4&#aD)=WFA+WI{_Frk^fVig9B(8-@s!*4SoSkz~p{wM-;*a&Jm2DT#|`APGeFj zfp{%4I)1~^$A0hB7~Qj4px8_6>!OS~a7aWE6UF~dI*_I6y@kMr>)5hT{irt3XG4Nt zOw~49Wx9wQlNm4U3&{1Ix$LL9C`t{6XSOhH>d+)Q>+0dFuw(po+AGHJ)L$ z=QD!Q3omZIOfMfyTr5hd@A;A9)W*2N09&>KH!gJ4m9Y#xZEt)#V##-j&1t*l*h4_tH(=pPHdP8Egtlu!Dtm{&K z%J^t}F7Q4k!-#a!LklxDQRNpfUKqf~hVs!x<(h(YatXVG@6W?3o`?5>~09?(gQ># z;3`#j+-9G&$8KN^RP(O$M_6!7mOKm?@t*h%TGI{)TaPh##7g3Iz`KJiZIMEjx0qUD z1WhO+aGcPb8E)S+a&Jl!jYDo=J^XO|0^WgRa=cx)sS_ zps@%a&)<{e_# z2ac0FE~wQ^u2L5H7EfF47lg)&^d6p6dQewZ6@0qzwWPLg{~O0na7+QunofOgQKTJ# zuke-H=^+IkK4~`W{S}Q*!N*CQc8)Wt;7`|UL`Xsnw7}S@C@|{?SHM*ZGm4sGLw2G|T%huD7=8h3Y?QfPdfOAw2zX9Ks z3E}HTbwA-Hc=)ge zI2x5+bYk%Tqjq=o=cfMM;^U00dW1nG%HTyE=Y$oK9!ai(OvXFb>wg*IRCcSej}K;| zJQ1~%c^-cALZ3fc@JN31pqn$1Q%FP;E+8gxgKPHvYjE`9m_|Cls;O5|lK%R{qu<5N zHK?lzFi4!`#BBbP%dxciqQvf9AL8?4MEeF@uhkH99X4Z4<;~ zFUm3YTy3VdjLk?El#2ao!dkj~*8OzXB&B*#*m4eJiCH+AME^VKc4tL`O8JDn6#eI& zV=0Hn0HU!F;xJfF3lIlN1#Y(479HB+Hwo*B?{~5+v(By7XSRzjQT)NQgY6Y{Br>&1 zRK?fGd0muTh-u>m5DZV`u&&katk>^nhJK_OS=qA{C--IOA@@_1qQalbUZ3Rpj6h*E zZhxF1!OFA)@$Tto@eu$00$AvX?2H3B=gluDOuR1#tCzja6u*`WZzGmgIAzk<4@$wl zuv3p?g1p3Joee;~{vfZO_MV)1w4|K=nPds&yeBfP+i$FFD=S4NZ^KcJjt2DW*!>iT zMlC=D9m@NJSV?P`=wYa^$tsxG*N0-~#4iKM8!WJ>0AQ_8D~J^`mwwvj%$fMP$7A-T z?Z%@3=~4J$Y3+OW_Q1DgZx211F0>^<$nz1lz$e>@hQ)AH?&uqx(w?C^S%{u4=Jy@k zOSlnV!}D3A;|I)M>T~5}h5zlJ+6&4Xb3cv^zSMsA5!YWu7rkL94HT62NE0uoN{1&z zkK5|kKhSKsrJ&(%8HyzS~XJ9U{tPw zijYZ)(*F(K`%E${6JE%*=NK|oc@fB#^F^Z9NLs;(pD$?T^OKCj)kiK~Fak-Ot1ZvF z2(>5bhX%5QACRIZp;`#_eHIP`KPr=nXY&43#&4JYi#;;Q@RhY)pEFrq)*VX=rv_YSS64%gF0;6; zD8&UpzDzEt;`}`i3EhAvToe?-#&RLlV$N~X>msf8zFM)kFQ8}zj2u{2a`A5 z%6VK^Xkpk6`~~);zmB&^q?kQtj z*fRoEn2K0xFJMeoOdoaPh&hjf+nRG2G@j1~oGB%QwR8H;r3o6B(%}tQI!%tVd83HA z1J-<031u9Gre@>6Easu}@fMFO`gNSG8={1wY?<7qH=Vz?vz8s4#y(7H`EX01Hs<$5gURgn;&`%8-h=)4Jb;^_KT5h^zz(gS`A#4-Xo@vzlcY+!2HaiZbb8ZTi=C+C&jVORv<+v zbIqVwzys}E6s|tF3z6evPLz1V&@>Xv@G%{vvEuh`z0+2yLdGm>64FDZMmV#$IQj-CZ6)E zIxZQFW``8Zfq!`Y@1HYvr2rGcGRKp;*{<5=M{kZq{#%(Qh{A%^<+k7rZn*{jb4BR3 zbXplYWfiMogR027kYV%6_8DR`KPCF{-xO(qzw4gPc@#%9!y4$-id6|!ySaDqZa)&k zXXs#sQMb7<#XK)f%X+Hl zKa4)(+hIp{CSpq|R z$TNP~Az8?DP&rb8Ww)POx714!iKKrU@Ru|~We-%cP%evXOEZaulU9GbXi6?GsgUxa z`N>e?$y}Gz^(32cK{KUDQRy%WqDk!qzB)fRHsB9~X|7a`Il3HEVC+0ivCDcW;ABfMtD;KuM=y_I~vHg&oGT~0ojv0JdG7CV3%&c~^4 zQ53G?W2SgH>0GLHLguWW4I6Q>yG70V(_4LFXPO_HrKz0Iy?fdq=Kj-V_R#B`?0j1x zl8HiCbH6S86CNRGGm6Ll69dQH*=^3Um%u7l;uq;BHYLGhJ;}#~Y+)1kg)scacVtzE zdwk|u?thu=MGU0xXDXj&^qL$g8ZjKW9{LQuM@h#_sTxJf z;2|v=C-Ve~x~&gY>=Btf=nX*h{jd;K4{kZKyblIz>893nvuUHVAG0ithYP{oI(D*tD*F0dPn*b0M5_H&c{xkNmky4~K{6BQJ{kB=6L z{|New6h#7(>$b7Q$gG@{ynXYWPc*drX~8~nslNm`bgAdUtd&}4 z$0fY4k7_7sp^SZ9`yxS=f5OD&*R+eDR~hbnAhUNr9zp9ypACU3rRk-%e~)3G)Z|C!F%K;~`vCU|lk9LQl!^J%(oX!Ab+ zlo$H5jK>YxPJg)vy08ag@~6-%^^w=+9O0RB17Q^~g1S)>=aZrPEMZK(HQac2Q78LY zK@4)`7}mCIh>d%PCg!U8Yzn$;y~<2(NRS_QO%u0c^w2jBe$dRG$E&2|cF{JJ0lpjN zz!RzBCG-Qw_JB1`l9az~*b$W}771cj_YSs~+DyujA4`nqTMjY67bQh8;`xTa>%Nxs z4Zhbbm$RWKtiSs{ICV_POqv7Mw0FY-pWfqc%YK{59rzh}$LJKju+jD*`GBZY5R=<` z*44E=Ekb#{4S|P;PmWHZDB|i$6@`CM9%yVb4$9=I0}S5G#wGAtq-{5qs3b5cyyWk1 z0>-Pv81X`H^W?;0ddWjyHf#iiwTC;*o>qhOnX^+`$BJK{I{Ix7r1)m5`0ZrztJ!3R zP%VQ@^OpGVfDK@V0KL8Z(x)9~J;K%(0wh%l^TY2IcjOx|ExF5i(we8=u1+>h5qNlH zXGOS$3dngZq$mgDFG0UW&Pw8a#^mE`1pcrZi&_tVJJNK47jFOF%Mq=^dt8Qg?+`@6 zrkU={YXU<%!y_7H>$pJfbN$tkYB3{ionW0~Ejb249YUl_*@hNzIK|vL38K?$y9#C) zeH{>f(#y~a_tgvM)lw6cPnl<uEM6r`nv?`F{fL;-*MMTh$5Gz9SCFeFn`WRfU7wIkuO8Eqq=F+^g882{uEdt^ zqGhz(zVzcuQ`IC-&JSS!0aoM*vMT~^UINP?#ed+0t0a?m^&dX~?QIJKk6RhNUr5Va z(T*v+_UX58GvSIcdm>kz3P=7{-ydLHtp2(5+sdk{ru~KjP(2>2TtX4w44HH_Av&h; zTs49F!52Ho|0lMb{~oyHlcAvr^Z2%Tc%PdA;q`NACEVOQg>!3MFO!j>K-M`5{P11H zu4My2;W{CH*kK~_5>IsAQ|Wxn;CX!e_#Kk36Hi-$?$D{G`}=?3ER9BgCH{YD&k4|6 zGZ3)(uQ(=`XCulG_Fox`1>BcstM=Cu==;SP(LryX@7rT-e!2EusWwJ5+@_z z@~(5)n?V!t|As#&UhwFj0mgkJ;J*SEVwV?W!{@w}VkK~!(L6a~Y5!?B`5iE233SM{ zA|&LoG<33~a3EI*+}h=Udvpo5Z|WX?(wr@7c#J|9l^x)vV!rI@%hpV+~sL3qp zbFBwr!vBmE2Ku)1bu*U>2RI>ybWW~Ga^R`=wI&`v$>tuW6hLXz4=G0)zzmGMpd0UjGyi5G)>apNm1 zn-Ve4ISZ@Ie1Nqn%yy59W0XmZ%G6IaYUR74M`7rk+q;J8H!$A?z5hmSIN|FW7e~aa zTs$FAwNBHbr#KxCEq5gmTrFWPtvzxVy^ z4&4#nn>!JXM?mvLhSs^l1<1jBtn2|G@=3&ZDdi*y%U3+u6NwB>8=C?GqIP5zQtr1r5-FW@I-c6Om~ zE{oGX+hp8osPae9kKI`C^9?WkfS8v(uQdyD867~xAt7FAvd!smk^8sFutNJM66zDV zWT7O1o$)R}(=(O;Gs!~CW`y=Pllq}%w-Xk7b7vpkei61gS;!cB`}|eGN)^k!mmVo| zxAVG?5X&)E3Ck64#kbkQT^A438U)EHSx-BY%C(2_R~KMSc1fIS zEC`${pk)tgBl?AIRqjpLS+y@!6S4nEqc-)okR&Fj2hfyGTGbW`Kng2 z_FwgtNp%9@;>%q}eJoj)kKZtSo2R;!#s-|VkW zR^A(;;^o8`k|A&*1VCGpEf0y<{))KS=IVL{tr6cs4;eGR%WNuwC5#S~C}Rtejo|H< z&vau(jj^B!_CEd~F2&7LYc?M|o+HK6Xufp0+1l^Tj$!78X@w>^lBoy7Q2FFo!h0vM zSUsK4;N29g*<vT;7=VUtPKQ+zy7w17cjT!eJ%2AX-K95;)tPbZkzDE? zy+C{v*P5+z!U6=U_P+7XRJ4>TB7FC4OZ|@N&%@P|gD}?g9e%G1a)-`CcdVW=li`|p z3qpMKRRkIEQyUzjh&W*j0(#Wy3v4)aK4}OM3To~Z@}NWAmfVLo%a4BvNgnupCZ7Tb z39U5KeMoRA){#58HcIP#IK9LH_D9>;(4jrPxI`x-TOFZCpuHVk?1Ev&!-f=#v8@@p zXBnSB1u_~el;C~_xYUN$WFf^|5XeVkZ%dEs{_Tc8l1a#IHHP@_s0}kgJb(E%P}14# zA4g-3FRPy94hi*a-s>J-TOJ+sAu(<2`O7XY44~E<`HVlK1#x$Bc(T1Fpq}p6*aG?T zV(FQ3i3eTv?t{M6N6s5};*(M?GDad#KZ&wPh!$*-=o~d2?>(_6^ zt_+Z$Nth^OryiU?IP2$=wyu4y?4-;KwJs>d|AbUWHL*d{f)iWsUu@<_a8CSmOoIwg zF06XM6fNrmEcUBd($i3oeRwtPD|k!1k;5^uE})n2n_S=51}VT~3X`TZ8RfkyKteA5 z{g4~s-)d4O=t-S&Xu(gET+<7RYFx8<=;z618bJq&jl5-DySENCNlb8TJB4#bj}1k! z)dzHXli}u)MpqlnZT`soCCm?RWeCJeFdBGx26TsAp6Zn?)LVH_ED580C&{xrx?Du#knNyK*12pUC8uzQkV}F;Wj;;8gm;Bx6Yn8J|`HKLE zKSk5aAU9F+qGowV-8T=o`KY}g>$xFl*N}%s_dxzvln}|0)pcleD9E1q6-*BwDo%m( zF(~>#x%g!hl-^J(CA5lVRajXNAM!nX?ip?D>k}o6bRsV2glx|d?hV&la)xw2RDM1( z?^VvpBt^S$dZ9@fj!7-75?XooZpZWAE1#>?-q7o_s5S4l|4a{+eCYkO=|5Z*zlKrw z&iG+xv9q};)PH<#PS<5uJ=CPyfN*u(<6>+onYd-t(%LzSyOOzKgTh*df<@vJWB;@$ zP<4aC1hf^cxPT=jQmOJjhv%bx9M>&^@5PVx-2`RIUi9k-0z zFD1epsZ^Z>J=28;CbP5PX75jEYtF&UG6z>nQfN~sec?NpU5&?ih__bCWOFrjZa%I^ z-W>KEwNWfNo`*QZuPdj0Sl7Yu(1VCL-H2%+2>=2k{hDPt~0 zn`<7IndL5u5Sh8;wkcN2&fcB#`J8v>&3XHO`G52O{QlqH_lsXww0JOEv?^z1bMp!B z{fF0dSDFd7{9$eP?}|S%sqkcBR|-w;&_>*~8XlaNgh- zD&2j9iezO}$Y#nplfx~Omju+bYga4xG9{IEV$I^CENT)On&;R_(Fzj4NAHb`dY_** z6ub15UgtA$|4KnaRr{1D$jdae7ayy1B2vFTk>Q;tG(0ioR9*7zdkJ3Qi3QE6?HGtp zdAg9>^x?dpl>xwviREN#NR0qW@b7$GeK`8BYfp|pz9$9DW5zR;DG3f(4Hd?>`_C^` zQgskdUWE;wpeC{CyFkkASMEDw%mA+q6u4Nfu1qQ%G})P990pS_&T$#o`e}4tqkBa% zxHL}Lud+v9E1AeZMW-WD^(OIArG1u|6>55QL0Ql;q*>sOa0^>DBC!)2?MpgW!)M*u z{f~xR2|gJ}5lRkY$K^U~!ft+GhJkblt*i1odOotpZq}VjB+%$uQ?-hT4arguE!Jj6 zNmEE0yo8DiRo4^Q*J~s)cyXLRW*>!K)BwWgE7c|R9C(g<0=1Ll|D-%06Z5k)xMUT~ zCPtwTI3yvvOl+L~jz(#5>C1WmPJKye47Rr+BPlZCQ^KEvQvBPz#im5O_2ehggxPh-o`h|!T}vCgvH&gO75CbXcyXmJW6Ig$i#KUg+s-&&>ZmMr>d zIRrwFtQ#*v?Q@p`VYu2>(mA8dvJ5NtwN11uiIQrP`sUj5f5vn=pOS2x7hfET=AJte zwCy?!NjmkGJL}J8{|b->y%IL5gU~%n7F3z){dS5@FP8Geq!Xb~UNX)C=lz#-GIt0| z$3ND7-$hdnYr*9@+-HDYvVPtExhxFQP4NE`A3cxb9K70*D7+5!ruogl%xSWEHQ#Hc z70zNf6goq^wr{T;+!insvl0I=ERLtVPROdRCKn=}i}XIhin}l;3JEuv5`J}2HG-9{ zepR~R)oqx^p?I7@;(mkfz2}FzAP}+@V4-cN#nMinV5A%jj4#(?6ogqP73X?s-UN9 z>*nn5_s4bc>Wuup(y@-vH@A&z0I|iIw55bk6x1-!33OC%H{ROi&kl-b{BE*vO3)qu z>mkDIr)F!Tid+lIj8HA^GnEs85_)_(3iZ}o%~W^>|2Ij|RUFTQMSS9qH5SYDz&M2r zAA;2eaMZ4ha8J&DOXXeX6G2bySkyYRIlV=<#}p^2V{<;5ctIuWhe7@*uENZDkenJT z2|q%%+J%NMJsyj)QtVIKxRD#N^-ihe%pvn-$(a+t8#%gLx(lKE8WH{gp4U6(wU>hI zGcF16%`HnoQ&b??0|2-$WA|}p9%Kt$+kHLMkQa-cZm|;CE>ukpDS}&B7AdZbOR2p4 zK!jKJ*ZBo&m*@~Wlm>yPl2lhzMrl!Q-H4RPZ5l3QNuEvQv$sCZ#nf?!LjD?XcnLeO z=_?`2AhxvkBFMiN@^BLhTezdRV|~$Q-w-i!Guhj|N{q^RGK%X&U-#WThi!&siRehM zTZdaxdFKW=dq>Ni&~g%7w8QoNMYev`vm!-C<%@ZR^yoJG}&!wZ>k zhHK)OjKRjqCQQ(J;pHqB316?zzXG^DUMjc=StlG#j7Xv+RW@!fo%*?B@dY0A85uk@ zdWhV+;~@rHDV{U`kP*|**ukYFxh8CXY;X96<-E<2Za*+^I{i05T1h!c`S@L|p>$H* zYHc53a#mF-?Mtt<-+}ak!u)feYty>b?rdC^clG;Sn=PS|4v@svuM|{xyPti_U%X|c z)2OyIp4Rna0WtB`AaKm=lIBEXMFoO`c9=|SxBPL52^Enw|Lh^7$PS-_2V0%HEVUBv zbeV5D@5|q33nYTRdn>ygpZ7Y93kj9dyC}YI$0>l%_qlSWlc|e7ZnIz+v(vnKH#`DR z<)X(%`JhK=jVta*9b30_P^Rz>a8l*>RFtHPM&o{2|G4`)mHgQr9bf2vn9O&1ad zl<+F><6O%$nklD~i1se3sj~^%J1S9Ou)Y4Z0H;x0)f8cPk`_KRtEQlp{s+m$!tW>| z4mP3S)TB1QaNfHg;wV}UU*6r7;el&zhjl^WWA)E$icQva=ZngZAun9xl1=CDq zxx^3h#}gzL16bX}w)b{T*COP&&#Dx%viWcsd(nMKXNt-*A#JZ7{stCYgy~?Y>GEEE zzEyy(f4kf?yJhRP$yN2LNHnvVl+Eu8K%&bHQ+xZ>HjiA|RSS|ao$YQXprCe2 z6AB^uA#C)*8h(K=+o8pd3m#5_OAh=I_&3YZh*OXUThX(Vt;j4lA57Iu@z9XM72b^? z-#Vk12|qT_kjYY27cSN3(fxVBzV^=|j5)TiJs8gCzvetQ&jH$b$DDYevxRt)E_2n; z+u&WfnnEYN(IWm{UNa=tg4tbQDDK_oYwlj3+LnS2+MHB+@W`-=B~@n@I?T}TyE~`< zZAtpeuU%Qko37v|B79@HVtYX(=2-tv`81K(^D$v;CRo&TP?SN*w@w)Nnu<8fRb&6y zrh&(zgN Element { + // todo: test monaco more.... + // const MONACO_FOLDER: Asset = asset!("/monaco-editor/package/min/vs"); + // let script = format!("(() => {{ + // require.config({{ paths: {{ vs: '{MONACO_FOLDER}' }} }}); + + // require(['vs/editor/editor.main'], () => {{ + // var model = monaco.editor.createModel('fn main() {{\\n\\tprintln!(\\\"hi\\\")\\n}}', 'rust'); + // var editor = monaco.editor.create(document.getElementById('editor')); + // editor.setModel(model); + // }}) + // }})()"); + + rsx! { + div { + id: "editor", + width: "100vw", + height: "100vw", + } + // // Monaco script + // script { + // src: "{MONACO_FOLDER}/loader.js", + // "onload": script + // } + img { + id: "some_image", + src: "{SOME_IMAGE}" + } + img { + id: "some_image_with_the_same_url", + src: "{SOME_IMAGE_WITH_THE_SAME_URL}" + } + img { + id: "some_image_without_hash", + src: "{SOME_IMAGE_WITHOUT_HASH}" + } + LoadsAsset {} + } +} + +const JSON: Asset = asset!("/assets/data.json"); + +#[derive(Debug, Clone, serde::Deserialize)] +struct Data { + list: Vec, +} + +#[component] +fn LoadsAsset() -> Element { + let data = use_resource(|| async { + let bytes = dioxus::asset_resolver::read_asset_bytes(&JSON) + .await + .unwrap(); + serde_json::from_slice::(&bytes).unwrap() + }); + match data() { + Some(data) => rsx! { + div { + id: "resolved-data", + "List: {data.list:?}" + } + }, + None => rsx! { + div { + "Loading..." + } + }, + } +} diff --git a/packages/playwright-tests/cli-optimization/Cargo.toml b/packages/playwright-tests/cli-optimization/Cargo.toml index e7519c8abb..cb6d6c1ed9 100644 --- a/packages/playwright-tests/cli-optimization/Cargo.toml +++ b/packages/playwright-tests/cli-optimization/Cargo.toml @@ -7,9 +7,7 @@ license = "MIT OR Apache-2.0" publish = false [dependencies] -dioxus = { workspace = true, features = ["web"], optional = true } -# We test both if the current version of dioxus works and if the CLI can understand assets from the old asset format -dioxus_07 = { package = "dioxus", version = "=0.7.1", features = ["web"], optional = true } +dioxus = { workspace = true, features = ["web"] } serde = { workspace = true, features = ["derive"] } serde_json.workspace = true @@ -18,7 +16,3 @@ serde_json.workspace = true # flate2 = "1.1.2" # tar = "0.4.44" -[features] -default = ["dioxus"] -dioxus = ["dep:dioxus"] -dioxus_07 = ["dep:dioxus_07"] diff --git a/packages/playwright-tests/playwright.config.js b/packages/playwright-tests/playwright.config.js index e8f8f99790..edf9d2a861 100644 --- a/packages/playwright-tests/playwright.config.js +++ b/packages/playwright-tests/playwright.config.js @@ -173,10 +173,10 @@ module.exports = defineConfig({ stdout: "pipe", }, { - cwd: path.join(process.cwd(), "cli-optimization"), + cwd: path.join(process.cwd(), "cli-optimization-07"), // Remove the cache folder for the cli-optimization build to force a full cache reset command: - 'cargo run --package dioxus-cli --release -- run --addr "127.0.0.1" --port 9191 --no-default-features --features dioxus_07', + 'cargo run --package dioxus-cli --release -- run --addr "127.0.0.1" --port 9191', port: 9191, timeout: 50 * 60 * 1000, reuseExistingServer: !process.env.CI, From 4705c220a297f5c8c9cc7041e2cf1b01db7729ac Mon Sep 17 00:00:00 2001 From: ealmloff Date: Mon, 17 Nov 2025 13:17:51 -0600 Subject: [PATCH 19/20] fix clippy --- packages/playwright-tests/cli-optimization-07/src/main.rs | 3 --- packages/playwright-tests/cli-optimization/src/main.rs | 3 --- 2 files changed, 6 deletions(-) diff --git a/packages/playwright-tests/cli-optimization-07/src/main.rs b/packages/playwright-tests/cli-optimization-07/src/main.rs index 252c3946cd..cd7f590c35 100644 --- a/packages/playwright-tests/cli-optimization-07/src/main.rs +++ b/packages/playwright-tests/cli-optimization-07/src/main.rs @@ -1,8 +1,5 @@ // This test checks the CLI optimizes assets correctly without breaking them -#[cfg(feature = "dioxus_07")] -use dioxus_07 as dioxus; - use dioxus::prelude::*; const SOME_IMAGE: Asset = asset!("/images/toasts.png", AssetOptions::image().with_avif()); diff --git a/packages/playwright-tests/cli-optimization/src/main.rs b/packages/playwright-tests/cli-optimization/src/main.rs index 252c3946cd..cd7f590c35 100644 --- a/packages/playwright-tests/cli-optimization/src/main.rs +++ b/packages/playwright-tests/cli-optimization/src/main.rs @@ -1,8 +1,5 @@ // This test checks the CLI optimizes assets correctly without breaking them -#[cfg(feature = "dioxus_07")] -use dioxus_07 as dioxus; - use dioxus::prelude::*; const SOME_IMAGE: Asset = asset!("/images/toasts.png", AssetOptions::image().with_avif()); From 0282686492418deb81cd3023e879788276286d52 Mon Sep 17 00:00:00 2001 From: ealmloff Date: Tue, 18 Nov 2025 12:04:19 -0600 Subject: [PATCH 20/20] force the toast.png asset to be included in the linker stage #[used] has platform depedant behavior --- packages/cli/src/build/assets.rs | 8 ++++++-- packages/playwright-tests/cli-optimization-07/src/main.rs | 2 ++ 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/packages/cli/src/build/assets.rs b/packages/cli/src/build/assets.rs index 2b39a4da50..be9ec74d60 100644 --- a/packages/cli/src/build/assets.rs +++ b/packages/cli/src/build/assets.rs @@ -161,7 +161,9 @@ fn legacy_asset_to_modern_asset( .with_preload(js.preloaded()) .with_static_head(js.static_head()) .into_asset_options(), - _ => AssetOptions::builder().into_asset_options(), + _ => AssetOptions::builder() + .with_hash_suffix(add_hash) + .into_asset_options(), }; BundledAsset::new(absolute_path, bundled_path, options) @@ -216,7 +218,9 @@ fn modern_asset_to_legacy_asset(modern_asset: &BundledAsset) -> manganis_core_07 .with_preload(js.preloaded()) .with_static_head(js.static_head()) .into_asset_options(), - _ => manganis_core_07::AssetOptions::builder().into_asset_options(), + _ => manganis_core_07::AssetOptions::builder() + .with_hash_suffix(add_hash) + .into_asset_options(), }; manganis_core_07::BundledAsset::new(absolute_path, bundled_path, options) diff --git a/packages/playwright-tests/cli-optimization-07/src/main.rs b/packages/playwright-tests/cli-optimization-07/src/main.rs index cd7f590c35..5f8ef6f3f8 100644 --- a/packages/playwright-tests/cli-optimization-07/src/main.rs +++ b/packages/playwright-tests/cli-optimization-07/src/main.rs @@ -23,6 +23,8 @@ fn main() { #[component] fn App() -> Element { + // Force asset to be included in the final bundle. We shouldn't need this once #[used(linker)] is stable + _ = _ASSET; // todo: test monaco more.... // const MONACO_FOLDER: Asset = asset!("/monaco-editor/package/min/vs"); // let script = format!("(() => {{