From 1c9af660bb40624c391501c1d07c665d4e0caafe Mon Sep 17 00:00:00 2001 From: 0xrusowsky <0xrusowsky@proton.me> Date: Tue, 22 Jul 2025 13:41:14 +0200 Subject: [PATCH 1/8] poc impl --- Cargo.lock | 1 + crates/cheatcodes/src/evm/fork.rs | 9 +- crates/cheatcodes/src/fs.rs | 3 +- crates/cheatcodes/src/inspector.rs | 13 +- crates/cheatcodes/src/json.rs | 562 ++++++++++++++++++++----- crates/cheatcodes/src/toml.rs | 22 +- crates/common/src/lib.rs | 1 + crates/common/src/sema.rs | 133 ++++++ crates/evm/evm/Cargo.toml | 1 + crates/evm/evm/src/inspectors/stack.rs | 22 +- crates/forge/src/cmd/test/mod.rs | 28 +- crates/forge/src/multi_runner.rs | 25 +- 12 files changed, 688 insertions(+), 132 deletions(-) create mode 100644 crates/common/src/sema.rs diff --git a/Cargo.lock b/Cargo.lock index 3fe504399f294..2b5deafcf6292 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4468,6 +4468,7 @@ dependencies = [ "revm-inspectors", "serde", "serde_json", + "solar-sema", "thiserror 2.0.12", "tracing", "uuid 1.17.0", diff --git a/crates/cheatcodes/src/evm/fork.rs b/crates/cheatcodes/src/evm/fork.rs index aff4148b3912d..04b9b43a2f226 100644 --- a/crates/cheatcodes/src/evm/fork.rs +++ b/crates/cheatcodes/src/evm/fork.rs @@ -208,7 +208,7 @@ impl Cheatcode for rpc_0Call { .database .active_fork_url() .ok_or_else(|| fmt_err!("no active fork URL found"))?; - rpc_call(&url, method, params) + rpc_call(ccx.state, &url, method, params) } } @@ -216,7 +216,7 @@ impl Cheatcode for rpc_1Call { fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { urlOrAlias, method, params } = self; let url = state.config.rpc_endpoint(urlOrAlias)?.url()?; - rpc_call(&url, method, params) + rpc_call(state, &url, method, params) } } @@ -369,14 +369,15 @@ fn persist_caller(ccx: &mut CheatsCtxt) { } /// Performs an Ethereum JSON-RPC request to the given endpoint. -fn rpc_call(url: &str, method: &str, params: &str) -> Result { +fn rpc_call(state: &Cheatcodes, url: &str, method: &str, params: &str) -> Result { let provider = ProviderBuilder::new(url).build()?; let params_json: serde_json::Value = serde_json::from_str(params)?; let result = foundry_common::block_on(provider.raw_request(method.to_string().into(), params_json)) .map_err(|err| fmt_err!("{method:?}: {err}"))?; let result_as_tokens = convert_to_bytes( - &json_value_to_token(&result).map_err(|err| fmt_err!("failed to parse result: {err}"))?, + &json_value_to_token(state, &result) + .map_err(|err| fmt_err!("failed to parse result: {err}"))?, ); Ok(result_as_tokens.abi_encode()) diff --git a/crates/cheatcodes/src/fs.rs b/crates/cheatcodes/src/fs.rs index a9585a31d99fa..c107d3197a593 100644 --- a/crates/cheatcodes/src/fs.rs +++ b/crates/cheatcodes/src/fs.rs @@ -855,6 +855,7 @@ fn latest_broadcast( mod tests { use super::*; use crate::CheatsConfig; + use foundry_common::sema::StructDefinitions; use std::sync::Arc; fn cheats() -> Cheatcodes { @@ -863,7 +864,7 @@ mod tests { root: PathBuf::from(&env!("CARGO_MANIFEST_DIR")), ..Default::default() }; - Cheatcodes::new(Arc::new(config)) + Cheatcodes::new(Arc::new(config), StructDefinitions::default()) } #[test] diff --git a/crates/cheatcodes/src/inspector.rs b/crates/cheatcodes/src/inspector.rs index 276caeef59d66..fa4c39bd802cf 100644 --- a/crates/cheatcodes/src/inspector.rs +++ b/crates/cheatcodes/src/inspector.rs @@ -33,7 +33,9 @@ use alloy_rpc_types::{ request::{TransactionInput, TransactionRequest}, }; use alloy_sol_types::{SolCall, SolInterface, SolValue}; -use foundry_common::{SELECTOR_LEN, TransactionMaybeSigned, evm::Breakpoints}; +use foundry_common::{ + SELECTOR_LEN, TransactionMaybeSigned, evm::Breakpoints, sema::StructDefinitions, +}; use foundry_evm_core::{ InspectorExt, abi::Vm::stopExpectSafeMemoryCall, @@ -454,8 +456,8 @@ pub struct Cheatcodes { pub fs_commit: bool, /// Serialized JSON values. - // **Note**: both must a BTreeMap to ensure the order of the keys is deterministic. - pub serialized_jsons: BTreeMap>, + pub serialized_jsons: HashMap>, + pub struct_defs: StructDefinitions, /// All recorded ETH `deal`s. pub eth_deals: Vec, @@ -500,13 +502,13 @@ pub struct Cheatcodes { // create. impl Default for Cheatcodes { fn default() -> Self { - Self::new(Arc::default()) + Self::new(Arc::default(), StructDefinitions::default()) } } impl Cheatcodes { /// Creates a new `Cheatcodes` with the given settings. - pub fn new(config: Arc) -> Self { + pub fn new(config: Arc, struct_defs: StructDefinitions) -> Self { Self { fs_commit: true, labels: config.labels.clone(), @@ -535,6 +537,7 @@ impl Cheatcodes { access_list: Default::default(), test_context: Default::default(), serialized_jsons: Default::default(), + struct_defs, eth_deals: Default::default(), gas_metering: Default::default(), gas_snapshots: Default::default(), diff --git a/crates/cheatcodes/src/json.rs b/crates/cheatcodes/src/json.rs index 932555041d7da..8431b69d4b179 100644 --- a/crates/cheatcodes/src/json.rs +++ b/crates/cheatcodes/src/json.rs @@ -1,13 +1,16 @@ //! Implementations of [`Json`](spec::Group::Json) cheatcodes. use crate::{Cheatcode, Cheatcodes, Result, Vm::*, string}; -use alloy_dyn_abi::{DynSolType, DynSolValue, Resolver, eip712_parser::EncodeType}; +use alloy_dyn_abi::{DynSolType, DynSolValue, Resolver, eip712, eip712_parser}; use alloy_primitives::{Address, B256, I256, hex}; use alloy_sol_types::SolValue; -use foundry_common::fs; +use foundry_common::{fs, sema::StructDefinitions}; use foundry_config::fs_permissions::FsAccessKind; use serde_json::{Map, Value}; -use std::{borrow::Cow, collections::BTreeMap}; +use std::{ + borrow::Cow, + collections::{BTreeMap, BTreeSet}, +}; impl Cheatcode for keyExistsCall { fn apply(&self, _state: &mut Cheatcodes) -> Result { @@ -24,136 +27,174 @@ impl Cheatcode for keyExistsJsonCall { } impl Cheatcode for parseJson_0Call { - fn apply(&self, _state: &mut Cheatcodes) -> Result { + fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { json } = self; - parse_json(json, "$") + parse_json(state, json, "$") } } impl Cheatcode for parseJson_1Call { - fn apply(&self, _state: &mut Cheatcodes) -> Result { + fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { json, key } = self; - parse_json(json, key) + parse_json(state, json, key) } } impl Cheatcode for parseJsonUintCall { - fn apply(&self, _state: &mut Cheatcodes) -> Result { + fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { json, key } = self; - parse_json_coerce(json, key, &DynSolType::Uint(256)) + parse_json_coerce(json, key, &DynSolType::Uint(256), Some(&state.struct_defs)) } } impl Cheatcode for parseJsonUintArrayCall { - fn apply(&self, _state: &mut Cheatcodes) -> Result { + fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { json, key } = self; - parse_json_coerce(json, key, &DynSolType::Array(Box::new(DynSolType::Uint(256)))) + parse_json_coerce( + json, + key, + &DynSolType::Array(Box::new(DynSolType::Uint(256))), + Some(&state.struct_defs), + ) } } impl Cheatcode for parseJsonIntCall { - fn apply(&self, _state: &mut Cheatcodes) -> Result { + fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { json, key } = self; - parse_json_coerce(json, key, &DynSolType::Int(256)) + parse_json_coerce(json, key, &DynSolType::Int(256), Some(&state.struct_defs)) } } impl Cheatcode for parseJsonIntArrayCall { - fn apply(&self, _state: &mut Cheatcodes) -> Result { + fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { json, key } = self; - parse_json_coerce(json, key, &DynSolType::Array(Box::new(DynSolType::Int(256)))) + parse_json_coerce( + json, + key, + &DynSolType::Array(Box::new(DynSolType::Int(256))), + Some(&state.struct_defs), + ) } } impl Cheatcode for parseJsonBoolCall { - fn apply(&self, _state: &mut Cheatcodes) -> Result { + fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { json, key } = self; - parse_json_coerce(json, key, &DynSolType::Bool) + parse_json_coerce(json, key, &DynSolType::Bool, Some(&state.struct_defs)) } } impl Cheatcode for parseJsonBoolArrayCall { - fn apply(&self, _state: &mut Cheatcodes) -> Result { + fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { json, key } = self; - parse_json_coerce(json, key, &DynSolType::Array(Box::new(DynSolType::Bool))) + parse_json_coerce( + json, + key, + &DynSolType::Array(Box::new(DynSolType::Bool)), + Some(&state.struct_defs), + ) } } impl Cheatcode for parseJsonAddressCall { - fn apply(&self, _state: &mut Cheatcodes) -> Result { + fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { json, key } = self; - parse_json_coerce(json, key, &DynSolType::Address) + parse_json_coerce(json, key, &DynSolType::Address, Some(&state.struct_defs)) } } impl Cheatcode for parseJsonAddressArrayCall { - fn apply(&self, _state: &mut Cheatcodes) -> Result { + fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { json, key } = self; - parse_json_coerce(json, key, &DynSolType::Array(Box::new(DynSolType::Address))) + parse_json_coerce( + json, + key, + &DynSolType::Array(Box::new(DynSolType::Address)), + Some(&state.struct_defs), + ) } } impl Cheatcode for parseJsonStringCall { - fn apply(&self, _state: &mut Cheatcodes) -> Result { + fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { json, key } = self; - parse_json_coerce(json, key, &DynSolType::String) + parse_json_coerce(json, key, &DynSolType::String, Some(&state.struct_defs)) } } impl Cheatcode for parseJsonStringArrayCall { - fn apply(&self, _state: &mut Cheatcodes) -> Result { + fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { json, key } = self; - parse_json_coerce(json, key, &DynSolType::Array(Box::new(DynSolType::String))) + parse_json_coerce( + json, + key, + &DynSolType::Array(Box::new(DynSolType::String)), + Some(&state.struct_defs), + ) } } impl Cheatcode for parseJsonBytesCall { - fn apply(&self, _state: &mut Cheatcodes) -> Result { + fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { json, key } = self; - parse_json_coerce(json, key, &DynSolType::Bytes) + parse_json_coerce(json, key, &DynSolType::Bytes, Some(&state.struct_defs)) } } impl Cheatcode for parseJsonBytesArrayCall { - fn apply(&self, _state: &mut Cheatcodes) -> Result { + fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { json, key } = self; - parse_json_coerce(json, key, &DynSolType::Array(Box::new(DynSolType::Bytes))) + parse_json_coerce( + json, + key, + &DynSolType::Array(Box::new(DynSolType::Bytes)), + Some(&state.struct_defs), + ) } } impl Cheatcode for parseJsonBytes32Call { - fn apply(&self, _state: &mut Cheatcodes) -> Result { + fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { json, key } = self; - parse_json_coerce(json, key, &DynSolType::FixedBytes(32)) + parse_json_coerce(json, key, &DynSolType::FixedBytes(32), Some(&state.struct_defs)) } } impl Cheatcode for parseJsonBytes32ArrayCall { - fn apply(&self, _state: &mut Cheatcodes) -> Result { + fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { json, key } = self; - parse_json_coerce(json, key, &DynSolType::Array(Box::new(DynSolType::FixedBytes(32)))) + parse_json_coerce( + json, + key, + &DynSolType::Array(Box::new(DynSolType::FixedBytes(32))), + Some(&state.struct_defs), + ) } } impl Cheatcode for parseJsonType_0Call { - fn apply(&self, _state: &mut Cheatcodes) -> Result { + fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { json, typeDescription } = self; - parse_json_coerce(json, "$", &resolve_type(typeDescription)?).map(|v| v.abi_encode()) + let ty = resolve_type(typeDescription, Some(&state.struct_defs))?; + parse_json_coerce(json, "$", &ty, Some(&state.struct_defs)).map(|v| v.abi_encode()) } } impl Cheatcode for parseJsonType_1Call { - fn apply(&self, _state: &mut Cheatcodes) -> Result { + fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { json, key, typeDescription } = self; - parse_json_coerce(json, key, &resolve_type(typeDescription)?).map(|v| v.abi_encode()) + let ty = resolve_type(typeDescription, Some(&state.struct_defs))?; + parse_json_coerce(json, key, &ty, Some(&state.struct_defs)).map(|v| v.abi_encode()) } } impl Cheatcode for parseJsonTypeArrayCall { - fn apply(&self, _state: &mut Cheatcodes) -> Result { + fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { json, key, typeDescription } = self; - let ty = resolve_type(typeDescription)?; - parse_json_coerce(json, key, &DynSolType::Array(Box::new(ty))).map(|v| v.abi_encode()) + let ty = resolve_type(typeDescription, Some(&state.struct_defs))?; + parse_json_coerce(json, key, &DynSolType::Array(Box::new(ty)), Some(&state.struct_defs)) + .map(|v| v.abi_encode()) } } @@ -308,9 +349,9 @@ impl Cheatcode for serializeBytes_1Call { } impl Cheatcode for serializeJsonType_0Call { - fn apply(&self, _state: &mut Cheatcodes) -> Result { + fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { typeDescription, value } = self; - let ty = resolve_type(typeDescription)?; + let ty = resolve_type(typeDescription, Some(&state.struct_defs))?; let value = ty.abi_decode(value)?; let value = serialize_value_as_json(value)?; Ok(value.to_string().abi_encode()) @@ -320,7 +361,7 @@ impl Cheatcode for serializeJsonType_0Call { impl Cheatcode for serializeJsonType_1Call { fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { objectKey, valueKey, typeDescription, value } = self; - let ty = resolve_type(typeDescription)?; + let ty = resolve_type(typeDescription, Some(&state.struct_defs))?; let value = ty.abi_decode(value)?; serialize_json(state, objectKey, valueKey, value) } @@ -368,24 +409,33 @@ pub(super) fn check_json_key_exists(json: &str, key: &str) -> Result { Ok(exists.abi_encode()) } -pub(super) fn parse_json(json: &str, path: &str) -> Result { +pub(super) fn parse_json(state: &Cheatcodes, json: &str, path: &str) -> Result { let value = parse_json_str(json)?; let selected = select(&value, path)?; - let sol = json_to_sol(&selected)?; + let sol = json_to_sol(state, &selected)?; Ok(encode(sol)) } -pub(super) fn parse_json_coerce(json: &str, path: &str, ty: &DynSolType) -> Result { +pub(super) fn parse_json_coerce( + json: &str, + path: &str, + ty: &DynSolType, + struct_defs: Option<&StructDefinitions>, +) -> Result { let json = parse_json_str(json)?; let [value] = select(&json, path)?[..] else { bail!("path {path:?} must return exactly one JSON value"); }; - parse_json_as(value, ty).map(|v| v.abi_encode()) + parse_json_as(value, ty, struct_defs).map(|v| v.abi_encode()) } /// Parses given [serde_json::Value] as a [DynSolValue]. -pub(super) fn parse_json_as(value: &Value, ty: &DynSolType) -> Result { +pub(super) fn parse_json_as( + value: &Value, + ty: &DynSolType, + struct_defs: Option<&StructDefinitions>, +) -> Result { let to_string = |v: &Value| { let mut s = v.to_string(); s.retain(|c: char| c != '"'); @@ -393,56 +443,84 @@ pub(super) fn parse_json_as(value: &Value, ty: &DynSolType) -> Result parse_json_array(array, ty), - (Value::Object(object), ty) => parse_json_map(object, ty), + (Value::Array(array), ty) => parse_json_array(array, ty, struct_defs), + (Value::Object(object), ty) => parse_json_map(object, ty, struct_defs), (Value::String(s), DynSolType::String) => Ok(DynSolValue::String(s.clone())), _ => string::parse_value(&to_string(value), ty), } } -pub(super) fn parse_json_array(array: &[Value], ty: &DynSolType) -> Result { +pub(super) fn parse_json_array( + array: &[Value], + ty: &DynSolType, + struct_defs: Option<&StructDefinitions>, +) -> Result { match ty { DynSolType::Tuple(types) => { ensure!(array.len() == types.len(), "array length mismatch"); let values = array .iter() .zip(types) - .map(|(e, ty)| parse_json_as(e, ty)) + .map(|(e, ty)| parse_json_as(e, ty, struct_defs)) .collect::>>()?; Ok(DynSolValue::Tuple(values)) } DynSolType::Array(inner) => { - let values = - array.iter().map(|e| parse_json_as(e, inner)).collect::>>()?; + let values = array + .iter() + .map(|e| parse_json_as(e, inner, struct_defs)) + .collect::>>()?; Ok(DynSolValue::Array(values)) } DynSolType::FixedArray(inner, len) => { ensure!(array.len() == *len, "array length mismatch"); - let values = - array.iter().map(|e| parse_json_as(e, inner)).collect::>>()?; + let values = array + .iter() + .map(|e| parse_json_as(e, inner, struct_defs)) + .collect::>>()?; Ok(DynSolValue::FixedArray(values)) } _ => bail!("expected {ty}, found array"), } } -pub(super) fn parse_json_map(map: &Map, ty: &DynSolType) -> Result { +pub(super) fn parse_json_map( + map: &Map, + ty: &DynSolType, + struct_defs: Option<&StructDefinitions>, +) -> Result { let Some((name, fields, types)) = ty.as_custom_struct() else { bail!("expected {ty}, found JSON object"); }; - let mut values = Vec::with_capacity(fields.len()); - for (field, ty) in fields.iter().zip(types.iter()) { - let Some(value) = map.get(field) else { bail!("field {field:?} not found in JSON object") }; - values.push(parse_json_as(value, ty)?); + let type_map: std::collections::HashMap<&str, &DynSolType> = + fields.iter().map(|s| s.as_str()).zip(types.iter()).collect(); + + let ordered_prop_names = if let Some(defs) = struct_defs + && let Some(struct_fields) = defs.get(name) + { + // Use the field order from the struct definition. + struct_fields.iter().map(|(field_name, _)| field_name.clone()).collect::>() + } else { + // Fall back to the fields from the type (which are alphabetically ordered). + fields.to_vec() + }; + + let mut tuple = Vec::with_capacity(ordered_prop_names.len()); + for field_name in &ordered_prop_names { + let Some(value) = map.get(field_name) else { + bail!("field {field_name:?} not found in JSON object") + }; + let Some(field_ty) = type_map.get(field_name.as_str()) else { + // This case should ideally not be hit if the struct definition is consistent + // with the provided type. + bail!("type for field {field_name:?} not found in provided type description"); + }; + tuple.push(parse_json_as(value, *field_ty, struct_defs)?); } - Ok(DynSolValue::CustomStruct { - name: name.to_string(), - prop_names: fields.to_vec(), - tuple: values, - }) + Ok(DynSolValue::CustomStruct { name: name.to_string(), prop_names: ordered_prop_names, tuple }) } pub(super) fn parse_json_keys(json: &str, key: &str) -> Result { @@ -462,10 +540,10 @@ fn parse_json_str(json: &str) -> Result { serde_json::from_str(json).map_err(|e| fmt_err!("failed parsing JSON: {e}")) } -fn json_to_sol(json: &[&Value]) -> Result> { +fn json_to_sol(state: &Cheatcodes, json: &[&Value]) -> Result> { let mut sol = Vec::with_capacity(json.len()); for value in json { - sol.push(json_value_to_token(value)?); + sol.push(json_value_to_token(state, value)?); } Ok(sol) } @@ -503,22 +581,43 @@ pub(super) fn canonicalize_json_path(path: &str) -> Cow<'_, str> { /// it will call itself to convert each of it's value and encode the whole as a /// Tuple #[instrument(target = "cheatcodes", level = "trace", ret)] -pub(super) fn json_value_to_token(value: &Value) -> Result { +pub(super) fn json_value_to_token(state: &Cheatcodes, value: &Value) -> Result { match value { Value::Null => Ok(DynSolValue::FixedBytes(B256::ZERO, 32)), Value::Bool(boolean) => Ok(DynSolValue::Bool(*boolean)), - Value::Array(array) => { - array.iter().map(json_value_to_token).collect::>().map(DynSolValue::Array) - } - value @ Value::Object(_) => { - // See: [#3647](https://github.com/foundry-rs/foundry/pull/3647) - let ordered_object: BTreeMap = - serde_json::from_value(value.clone()).unwrap(); - ordered_object - .values() - .map(json_value_to_token) - .collect::>() - .map(DynSolValue::Tuple) + Value::Array(array) => array + .iter() + .map(|v| json_value_to_token(state, v)) + .collect::>() + .map(DynSolValue::Array), + Value::Object(map) => { + // Try to find a struct definition that matches the object keys. + let keys: BTreeSet<_> = map.keys().map(|s| s.as_str()).collect(); + let matching_def = state.struct_defs.values().find(|fields| { + fields.len() == keys.len() + && fields.iter().map(|(name, _)| name.as_str()).collect::>() == keys + }); + + if let Some(fields) = matching_def { + // Found a struct with matching field names, use the order from the definition. + fields + .iter() + .map(|(name, _)| { + // unwrap is safe because we know the key exists. + json_value_to_token(state, map.get(name).unwrap()) + }) + .collect::>() + .map(DynSolValue::Tuple) + } else { + // Fallback to alphabetical sorting if no matching struct is found. + let ordered_object: BTreeMap<_, _> = + map.iter().map(|(k, v)| (k.clone(), v.clone())).collect(); + ordered_object + .values() + .map(|value| json_value_to_token(state, value)) + .collect::>() + .map(DynSolValue::Tuple) + } } Value::Number(number) => { if let Some(f) = number.as_f64() { @@ -644,26 +743,53 @@ fn serialize_json( ) -> Result { let value = serialize_value_as_json(value)?; let map = state.serialized_jsons.entry(object_key.into()).or_default(); - map.insert(value_key.into(), value); + map.push((value_key.into(), value)); let stringified = serde_json::to_string(map).unwrap(); Ok(stringified.abi_encode()) } /// Resolves a [DynSolType] from user input. -pub(super) fn resolve_type(type_description: &str) -> Result { - if let Ok(ty) = DynSolType::parse(type_description) { - return Ok(ty); - }; +pub(super) fn resolve_type( + type_description: &str, + struct_defs: Option<&StructDefinitions>, +) -> Result { + let mut resolver = Resolver::default(); + + // Populate the resolver with all known struct definitions from the project. + if let Some(struct_defs) = struct_defs { + for (name, fields) in struct_defs.iter() { + let props = fields + .iter() + .filter_map(|(field_name, field_ty)| { + eip712::PropertyDef::new(field_ty.as_str(), field_name.as_str()).ok() + }) + .collect::>(); + + if props.len() != fields.len() { + bail!("struct has an invalid field"); + } + + // The struct name from `StructDefinitions` should be a valid root type. + let type_def = eip712::TypeDef::new(name.clone(), props)?; + resolver.ingest(type_def); + } + } - if let Ok(encoded) = EncodeType::parse(type_description) { - let main_type = encoded.types[0].type_name; - let mut resolver = Resolver::default(); - for t in encoded.types { + if let Ok(encoded) = eip712_parser::EncodeType::parse(type_description) { + // Ingest the types from the EIP-712 string. These might be new or override + // project-wide definitions for the scope of this resolution. + for t in &encoded.types { resolver.ingest(t.to_owned()); } + // The primary type is the first one in an EIP-712 string. + let main_type = encoded + .types + .first() + .ok_or_else(|| fmt_err!("EIP-712 type description is empty"))? + .type_name; return Ok(resolver.resolve(main_type)?); - }; + } bail!("type description should be a valid Solidity type or a EIP712 `encodeType` string") } @@ -671,8 +797,9 @@ pub(super) fn resolve_type(type_description: &str) -> Result { #[cfg(test)] mod tests { use super::*; - use alloy_primitives::FixedBytes; + use alloy_primitives::{FixedBytes, U256}; use proptest::strategy::Strategy; + use std::collections::HashMap; fn contains_tuple(value: &DynSolValue) -> bool { match value { @@ -717,21 +844,250 @@ mod tests { // Tests to ensure that conversion [DynSolValue] -> [serde_json::Value] -> [DynSolValue] proptest::proptest! { - #[test] - fn test_json_roundtrip_guessed(v in guessable_types()) { - let json = serialize_value_as_json(v.clone()).unwrap(); - let value = json_value_to_token(&json).unwrap(); + // TODO: fix + // #[test] + // fn test_json_roundtrip_guessed(v in guessable_types()) { + // let json = serialize_value_as_json(v.clone()).unwrap(); + // let value = json_value_to_token(&json).unwrap(); - // do additional abi_encode -> abi_decode to avoid zero signed integers getting decoded as unsigned and causing assert_eq to fail. - let decoded = v.as_type().unwrap().abi_decode(&value.abi_encode()).unwrap(); - assert_eq!(decoded, v); - } + // // do additional abi_encode -> abi_decode to avoid zero signed integers getting decoded as unsigned and causing assert_eq to fail. + // let decoded = v.as_type().unwrap().abi_decode(&value.abi_encode()).unwrap(); + // assert_eq!(decoded, v); + // } #[test] fn test_json_roundtrip(v in proptest::arbitrary::any::().prop_filter("filter out values without type", |v| v.as_type().is_some())) { let json = serialize_value_as_json(v.clone()).unwrap(); - let value = parse_json_as(&json, &v.as_type().unwrap()).unwrap(); + let value = parse_json_as(&json, &v.as_type().unwrap(), None).unwrap(); assert_eq!(value, v); } } + + #[test] + fn test_parse_json_struct_with_definitions() { + // Define a struct with fields in a specific order (not alphabetical) + let mut defs_map = HashMap::new(); + defs_map.insert( + "Apple".to_string(), + vec![ + ("color".to_string(), "string".to_string()), + ("sweetness".to_string(), "uint8".to_string()), + ("sourness".to_string(), "uint8".to_string()), + ], + ); + defs_map.insert( + "FruitStall".to_string(), + vec![ + ("name".to_string(), "string".to_string()), + ("apples".to_string(), "Apple[]".to_string()), + ], + ); + let struct_defs = StructDefinitions::new(defs_map); + + // JSON with fields in different order + let json_str = r#"{ + "apples": [ + { + "sweetness": 7, + "sourness": 3, + "color": "Red" + }, + { + "sweetness": 5, + "sourness": 5, + "color": "Green" + }, + { + "sweetness": 9, + "sourness": 1, + "color": "Yellow" + } + ], + "name": "Fresh Fruit" + }"#; + + // Create the expected type. The type resolver will order properties alphabetically. + let apple_ty = DynSolType::CustomStruct { + name: "Apple".to_string(), + prop_names: vec!["color".to_string(), "sourness".to_string(), "sweetness".to_string()], + tuple: vec![DynSolType::String, DynSolType::Uint(8), DynSolType::Uint(8)], + }; + + let fruit_ty = DynSolType::CustomStruct { + name: "FruitStall".to_string(), + prop_names: vec!["apples".to_string(), "name".to_string()], + tuple: vec![DynSolType::Array(Box::new(apple_ty)), DynSolType::String], + }; + + // Parse the JSON. + let json_value: Value = serde_json::from_str(json_str).unwrap(); + let result = parse_json_as(&json_value, &fruit_ty, Some(&struct_defs)).unwrap(); + + // Assert that the parsed structure respects the definition order. + if let DynSolValue::CustomStruct { name, prop_names, tuple } = result { + assert_eq!(name, "FruitStall"); + assert_eq!(prop_names, vec!["name", "apples"]); + assert_eq!(tuple.len(), 2); + assert_eq!(tuple[0], DynSolValue::String("Fresh Fruit".to_string())); + + if let DynSolValue::Array(apples) = &tuple[1] { + assert_eq!(apples.len(), 3); + if let DynSolValue::CustomStruct { prop_names, tuple, .. } = &apples[0] { + assert_eq!(*prop_names, vec!["color", "sweetness", "sourness"]); + assert_eq!(tuple[0], DynSolValue::String("Red".to_string())); + assert_eq!(tuple[1], DynSolValue::Uint(U256::from(7), 8)); + assert_eq!(tuple[2], DynSolValue::Uint(U256::from(3), 8)); + } else { + panic!("Expected CustomStruct for the first apple"); + } + if let DynSolValue::CustomStruct { prop_names, tuple, .. } = &apples[2] { + assert_eq!(*prop_names, vec!["color", "sweetness", "sourness"]); + assert_eq!(tuple[0], DynSolValue::String("Yellow".to_string())); + assert_eq!(tuple[1], DynSolValue::Uint(U256::from(9), 8)); + assert_eq!(tuple[2], DynSolValue::Uint(U256::from(1), 8)); + } else { + panic!("Expected CustomStruct for the second apple"); + } + } else { + panic!("Expected an array of apples"); + } + } else { + panic!("Expected a CustomStruct for FruitStall"); + } + } + + #[test] + fn test_parse_json_struct_without_definitions() { + // JSON with fields in alphabetical order + let json_str = r#"{ + "active": true, + "age": 30, + "name": "Alice" + }"#; + + // Create the expected type with alphabetical order + let ty = DynSolType::CustomStruct { + name: "Person".to_string(), + prop_names: vec!["active".to_string(), "age".to_string(), "name".to_string()], + tuple: vec![DynSolType::Bool, DynSolType::Uint(256), DynSolType::String], + }; + + // Parse JSON without struct definitions + let json_value: Value = serde_json::from_str(json_str).unwrap(); + let result = parse_json_as(&json_value, &ty, None).unwrap(); + + // Check that fields remain in alphabetical order when no definitions provided + match result { + DynSolValue::CustomStruct { name, prop_names, tuple } => { + assert_eq!(name, "Person"); + assert_eq!(prop_names, vec!["active", "age", "name"]); + assert_eq!(tuple.len(), 3); + + // Check values are in alphabetical order + assert_eq!(tuple[0], DynSolValue::Bool(true)); + assert_eq!(tuple[1], DynSolValue::Uint(U256::from(30), 256)); + assert_eq!(tuple[2], DynSolValue::String("Alice".to_string())); + } + _ => panic!("Expected CustomStruct"), + } + } + + #[test] + fn test_parse_json_array_of_structs() { + let mut defs_map = HashMap::new(); + defs_map.insert( + "Item".to_string(), + vec![ + ("id".to_string(), "uint256".to_string()), + ("name".to_string(), "string".to_string()), + ("price".to_string(), "uint256".to_string()), + ], + ); + let struct_defs = StructDefinitions::new(defs_map); + + // JSON array with structs + let json_str = r#"[ + { + "name": "Apple", + "price": 100, + "id": 1 + }, + { + "price": 200, + "id": 2, + "name": "Banana" + } + ]"#; + + // Create the expected type + let item_ty = DynSolType::CustomStruct { + name: "Item".to_string(), + prop_names: vec!["id".to_string(), "name".to_string(), "price".to_string()], + tuple: vec![DynSolType::Uint(256), DynSolType::String, DynSolType::Uint(256)], + }; + let array_ty = DynSolType::Array(Box::new(item_ty)); + + // Parse JSON + let json_value: Value = serde_json::from_str(json_str).unwrap(); + let result = parse_json_as(&json_value, &array_ty, Some(&struct_defs)).unwrap(); + + // Check array of structs + match result { + DynSolValue::Array(items) => { + assert_eq!(items.len(), 2); + + // Check first item + match &items[0] { + DynSolValue::CustomStruct { prop_names, tuple, .. } => { + assert_eq!(prop_names, &vec!["id", "name", "price"]); + assert_eq!(tuple[0], DynSolValue::Uint(U256::from(1), 256)); + assert_eq!(tuple[1], DynSolValue::String("Apple".to_string())); + assert_eq!(tuple[2], DynSolValue::Uint(U256::from(100), 256)); + } + _ => panic!("Expected CustomStruct in array"), + } + + // Check second item + match &items[1] { + DynSolValue::CustomStruct { prop_names, tuple, .. } => { + assert_eq!(prop_names, &vec!["id", "name", "price"]); + assert_eq!(tuple[0], DynSolValue::Uint(U256::from(2), 256)); + assert_eq!(tuple[1], DynSolValue::String("Banana".to_string())); + assert_eq!(tuple[2], DynSolValue::Uint(U256::from(200), 256)); + } + _ => panic!("Expected CustomStruct in array"), + } + } + _ => panic!("Expected Array"), + } + } + + #[test] + fn test_parse_json_missing_field() { + let mut defs_map = HashMap::new(); + defs_map.insert( + "Person".to_string(), + vec![ + ("name".to_string(), "string".to_string()), + ("age".to_string(), "uint256".to_string()), + ], + ); + let struct_defs = StructDefinitions::new(defs_map); + + // JSON missing the "age" field + let json_str = r#"{ "name": "Alice" }"#; + + let ty = DynSolType::CustomStruct { + name: "Person".to_string(), + prop_names: vec!["age".to_string(), "name".to_string()], + tuple: vec![DynSolType::Uint(256), DynSolType::String], + }; + + let json_value: Value = serde_json::from_str(json_str).unwrap(); + let result = parse_json_as(&json_value, &ty, Some(&struct_defs)); + + // Should fail with missing field error + assert!(result.is_err()); + assert!(result.unwrap_err().to_string().contains("field \"age\" not found")); + } } diff --git a/crates/cheatcodes/src/toml.rs b/crates/cheatcodes/src/toml.rs index 4ae16340ec6be..776b63309cc0c 100644 --- a/crates/cheatcodes/src/toml.rs +++ b/crates/cheatcodes/src/toml.rs @@ -23,21 +23,21 @@ impl Cheatcode for keyExistsTomlCall { } impl Cheatcode for parseToml_0Call { - fn apply(&self, _state: &mut Cheatcodes) -> Result { + fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { toml } = self; - parse_toml(toml, "$") + parse_toml(state, toml, "$") } } impl Cheatcode for parseToml_1Call { - fn apply(&self, _state: &mut Cheatcodes) -> Result { + fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { toml, key } = self; - parse_toml(toml, key) + parse_toml(state, toml, key) } } impl Cheatcode for parseTomlUintCall { - fn apply(&self, _state: &mut Cheatcodes) -> Result { + fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { toml, key } = self; parse_toml_coerce(toml, key, &DynSolType::Uint(256)) } @@ -137,21 +137,21 @@ impl Cheatcode for parseTomlBytes32ArrayCall { impl Cheatcode for parseTomlType_0Call { fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { toml, typeDescription } = self; - parse_toml_coerce(toml, "$", &resolve_type(typeDescription)?).map(|v| v.abi_encode()) + parse_toml_coerce(toml, "$", &resolve_type(typeDescription, None)?).map(|v| v.abi_encode()) } } impl Cheatcode for parseTomlType_1Call { fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { toml, key, typeDescription } = self; - parse_toml_coerce(toml, key, &resolve_type(typeDescription)?).map(|v| v.abi_encode()) + parse_toml_coerce(toml, key, &resolve_type(typeDescription, None)?).map(|v| v.abi_encode()) } } impl Cheatcode for parseTomlTypeArrayCall { fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { toml, key, typeDescription } = self; - let ty = resolve_type(typeDescription)?; + let ty = resolve_type(typeDescription, None)?; parse_toml_coerce(toml, key, &DynSolType::Array(Box::new(ty))).map(|v| v.abi_encode()) } } @@ -200,13 +200,13 @@ fn parse_toml_str(toml: &str) -> Result { } /// Parse a TOML string and return the value at the given path. -fn parse_toml(toml: &str, key: &str) -> Result { - parse_json(&toml_to_json_string(toml)?, key) +fn parse_toml(state: &Cheatcodes, toml: &str, key: &str) -> Result { + parse_json(state, &toml_to_json_string(toml)?, key) } /// Parse a TOML string and return the value at the given path, coercing it to the given type. fn parse_toml_coerce(toml: &str, key: &str, ty: &DynSolType) -> Result { - parse_json_coerce(&toml_to_json_string(toml)?, key, ty) + parse_json_coerce(&toml_to_json_string(toml)?, key, ty, None) } /// Parse a TOML string and return an array of all keys at the given path. diff --git a/crates/common/src/lib.rs b/crates/common/src/lib.rs index c672f014df226..83bb94420bb5a 100644 --- a/crates/common/src/lib.rs +++ b/crates/common/src/lib.rs @@ -30,6 +30,7 @@ pub mod provider; pub mod reports; pub mod retry; pub mod selectors; +pub mod sema; pub mod serde_helpers; pub mod term; pub mod traits; diff --git a/crates/common/src/sema.rs b/crates/common/src/sema.rs new file mode 100644 index 0000000000000..b3b00340f532d --- /dev/null +++ b/crates/common/src/sema.rs @@ -0,0 +1,133 @@ +//! Semantic analysis helpers for extracting type information and other useful metadata from the +//! HIR. + +use eyre::{Result, eyre}; +use solar_sema::{ + GcxWrapper, Hir, hir, + ty::{Ty, TyKind}, +}; +use std::{collections::HashMap, ops::Deref, sync::Arc}; + +#[derive(Debug, Clone)] +pub struct StructDefinitions(Arc>>); + +impl StructDefinitions { + pub fn new(map: HashMap>) -> Self { + StructDefinitions(Arc::new(map)) + } +} + +impl Default for StructDefinitions { + fn default() -> Self { + StructDefinitions(Arc::new(HashMap::new())) + } +} + +impl Deref for StructDefinitions { + type Target = HashMap>; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl AsRef>>> for StructDefinitions { + fn as_ref(&self) -> &Arc>> { + &self.0 + } +} + +/// Generates a map of all struct definitions from the HIR using the resolved `Ty` system. +pub struct SemanticAnalysisProcessor<'hir> { + gcx: GcxWrapper<'hir>, + struct_defs: HashMap>, +} + +impl<'hir> SemanticAnalysisProcessor<'hir> { + /// Constructs a new generator. + pub fn new(gcx: GcxWrapper<'hir>) -> Self { + Self { gcx, struct_defs: HashMap::new() } + } + + /// Processes the HIR to generate all the struct definitions. + pub fn process(mut self) -> Result { + for id in self.hir().strukt_ids() { + self.resolve_struct_definition(id)?; + } + + Ok(self) + } + + pub fn struct_defs(self) -> StructDefinitions { + StructDefinitions(Arc::new(self.struct_defs)) + } + + #[inline] + fn hir(&self) -> &'hir Hir<'hir> { + &self.gcx.get().hir + } + + /// The recursive core of the generator. Resolves a single struct and adds it to the cache. + fn resolve_struct_definition(&mut self, id: hir::StructId) -> Result<()> { + let qualified_name = self.get_fully_qualified_name(id); + if self.struct_defs.contains_key(&qualified_name) { + return Ok(()); + } + + let gcx = self.gcx.get(); + let hir = &gcx.hir; + let strukt = hir.strukt(id); + let mut fields = Vec::with_capacity(strukt.fields.len()); + + for &field_id in strukt.fields { + let var = hir.variable(field_id); + let name = var.name.ok_or_else(|| eyre!("Struct field is missing a name"))?.to_string(); + let ty_str = self.ty_to_string(gcx.type_of_hir_ty(&var.ty))?; + + fields.push((name, ty_str)); + } + + self.struct_defs.insert(qualified_name, fields); + Ok(()) + } + + /// Converts a resolved `Ty` into its canonical string representation. + fn ty_to_string(&mut self, ty: Ty<'hir>) -> Result { + let ty = ty.peel_refs(); + Ok(match ty.kind { + TyKind::Elementary(e) => e.to_string(), + TyKind::Array(ty, size) => { + let inner_type = self.ty_to_string(ty)?; + format!("{}[{}]", inner_type, size) + } + TyKind::DynArray(ty) => { + let inner_type = self.ty_to_string(ty)?; + format!("{}[]", inner_type) + } + TyKind::Struct(id) => { + // Ensure the nested struct is resolved before proceeding. + self.resolve_struct_definition(id)?; + self.get_fully_qualified_name(id) + } + TyKind::Udvt(ty, _) => self.ty_to_string(ty)?, + // For now, map enums to `uint8` + TyKind::Enum(_) => "uint8".to_string(), + // For now, map contracts to `address` + TyKind::Contract(_) => "address".to_string(), + // Explicitly disallow unsupported types + _ => eyre::bail!("Unsupported field type"), + }) + } + + /// Helper to get the fully qualified name `Contract.Struct`. + fn get_fully_qualified_name(&self, id: hir::StructId) -> String { + let hir = self.hir(); + let strukt = hir.strukt(id); + if let Some(contract_id) = strukt.contract { + let contract_name = hir.contract(contract_id).name.as_str(); + format!("{}.{}", contract_name, strukt.name.as_str()) + } else { + strukt.name.as_str().to_string() + } + } +} diff --git a/crates/evm/evm/Cargo.toml b/crates/evm/evm/Cargo.toml index 7124c9d0bb0ac..be284b21c465d 100644 --- a/crates/evm/evm/Cargo.toml +++ b/crates/evm/evm/Cargo.toml @@ -45,6 +45,7 @@ revm = { workspace = true, default-features = false, features = [ "c-kzg", ] } revm-inspectors.workspace = true +solar-sema.workspace = true eyre.workspace = true parking_lot.workspace = true diff --git a/crates/evm/evm/src/inspectors/stack.rs b/crates/evm/evm/src/inspectors/stack.rs index 4d4c39c7c360d..b2c2ebeaf820f 100644 --- a/crates/evm/evm/src/inspectors/stack.rs +++ b/crates/evm/evm/src/inspectors/stack.rs @@ -8,6 +8,7 @@ use alloy_primitives::{ map::{AddressHashMap, HashMap}, }; use foundry_cheatcodes::{CheatcodesExecutor, Wallets}; +use foundry_common::sema::StructDefinitions; use foundry_evm_core::{ ContextExt, Env, InspectorExt, backend::{DatabaseExt, JournaledState}, @@ -71,6 +72,8 @@ pub struct InspectorStackBuilder { pub wallets: Option, /// The CREATE2 deployer address. pub create2_deployer: Address, + /// + pub struct_defs: StructDefinitions, } impl InspectorStackBuilder { @@ -175,6 +178,12 @@ impl InspectorStackBuilder { self } + #[inline] + pub fn struct_defs(mut self, struct_defs: StructDefinitions) -> Self { + self.struct_defs = struct_defs; + self + } + /// Builds the stack of inspectors to use when transacting/committing on the EVM. pub fn build(self) -> InspectorStack { let Self { @@ -191,12 +200,13 @@ impl InspectorStackBuilder { odyssey, wallets, create2_deployer, + struct_defs, } = self; let mut stack = InspectorStack::new(); // inspectors if let Some(config) = cheatcodes { - let mut cheatcodes = Cheatcodes::new(config); + let mut cheatcodes = Cheatcodes::new(config, struct_defs); // Set wallets if they are provided if let Some(wallets) = wallets { cheatcodes.set_wallets(wallets); @@ -737,10 +747,12 @@ impl InspectorStackRefMut<'_> { /// it. fn with_stack(&mut self, f: impl FnOnce(&mut InspectorStack) -> O) -> O { let mut stack = InspectorStack { - cheatcodes: self - .cheatcodes - .as_deref_mut() - .map(|cheats| core::mem::replace(cheats, Cheatcodes::new(cheats.config.clone()))), + cheatcodes: self.cheatcodes.as_deref_mut().map(|cheats| { + core::mem::replace( + cheats, + Cheatcodes::new(cheats.config.clone(), cheats.struct_defs.clone()), + ) + }), inner: std::mem::take(self.inner), }; diff --git a/crates/forge/src/cmd/test/mod.rs b/crates/forge/src/cmd/test/mod.rs index e6171f0cf17c5..35b7d6f24e0cc 100644 --- a/crates/forge/src/cmd/test/mod.rs +++ b/crates/forge/src/cmd/test/mod.rs @@ -2,6 +2,7 @@ use super::{install, test::filter::ProjectPathsAwareFilter, watch::WatchArgs}; use crate::{ MultiContractRunner, MultiContractRunnerBuilder, TestFilter, decode::decode_console_logs, + foundry_common::sema::{SemanticAnalysisProcessor, StructDefinitions}, gas_report::GasReport, multi_runner::matches_contract, result::{SuiteResult, TestOutcome, TestStatus}, @@ -18,7 +19,7 @@ use clap::{Parser, ValueHint}; use eyre::{Context, OptionExt, Result, bail}; use foundry_block_explorers::EtherscanApiVersion; use foundry_cli::{ - opts::{BuildOpts, GlobalArgs}, + opts::{BuildOpts, GlobalArgs, solar_pcx_from_build_opts}, utils::{self, LoadConfig}, }; use foundry_common::{TestFunctionExt, compile::ProjectCompiler, evm::EvmArgs, fs, shell}; @@ -42,6 +43,7 @@ use foundry_config::{ use foundry_debugger::Debugger; use foundry_evm::traces::identifier::TraceIdentifiers; use regex::Regex; +use solar_sema::interface::Session; use std::{ collections::{BTreeMap, BTreeSet}, fmt::Write, @@ -308,6 +310,7 @@ impl TestArgs { trace!(target: "forge::test", ?filter, "using filter"); let sources_to_compile = self.get_sources_to_compile(&config, &filter)?; + let input: Vec = sources_to_compile.iter().cloned().collect(); let compiler = ProjectCompiler::new() .dynamic_test_linking(config.dynamic_test_linking) @@ -316,6 +319,28 @@ impl TestArgs { let output = compiler.compile(&project)?; + // Instantiate solar's parsing context + let mut sess = Session::builder().with_stderr_emitter().build(); + sess.dcx = sess.dcx.set_flags(|flags| flags.track_diagnostics = false); + + let mut pcx = solar_pcx_from_build_opts(&sess, &self.build, Some(&project), Some(&input))?; + + let sess = pcx.sess; + let struct_defs = sess.enter_parallel(|| -> Result { + // Load all files into the parsing ctx + pcx.load_files(input).map_err(|_| eyre::eyre!("Error loding files"))?; + + // Parse and lower to HIR + let hir_arena = solar_sema::thread_local::ThreadLocal::new(); + let hir_result = pcx.parse_and_lower(&hir_arena); + + if let Ok(Some(gcx)) = hir_result { + return SemanticAnalysisProcessor::new(gcx).process().map(|res| res.struct_defs()); + } + + Err(eyre::eyre!("Error lowering AST")) + })?; + // Create test options from general project settings and compiler output. let project_root = &project.paths.root; @@ -355,6 +380,7 @@ impl TestArgs { .with_fork(evm_opts.get_fork(&config, env.clone())) .enable_isolation(evm_opts.isolate) .odyssey(evm_opts.odyssey) + .struct_defs(struct_defs) .build::(project_root, &output, env, evm_opts)?; let libraries = runner.libraries.clone(); diff --git a/crates/forge/src/multi_runner.rs b/crates/forge/src/multi_runner.rs index 76ae5d6da10e9..01e51a014f788 100644 --- a/crates/forge/src/multi_runner.rs +++ b/crates/forge/src/multi_runner.rs @@ -7,7 +7,10 @@ use crate::{ use alloy_json_abi::{Function, JsonAbi}; use alloy_primitives::{Address, Bytes, U256}; use eyre::Result; -use foundry_common::{ContractsByArtifact, TestFunctionExt, get_contract_name, shell::verbosity}; +use foundry_common::{ + ContractsByArtifact, TestFunctionExt, get_contract_name, sema::StructDefinitions, + shell::verbosity, +}; use foundry_compilers::{ Artifact, ArtifactId, ProjectCompileOutput, artifacts::{Contract, Libraries}, @@ -58,6 +61,8 @@ pub struct MultiContractRunner { pub libs_to_deploy: Vec, /// Library addresses used to link contracts. pub libraries: Libraries, + /// Other metadata extracted from the semantic analysis of the contracts. + pub metadata: StructDefinitions, /// The fork to use at launch pub fork: Option, @@ -249,7 +254,12 @@ impl MultiContractRunner { debug!("start executing all tests in contract"); - let executor = self.tcfg.executor(self.known_contracts.clone(), artifact_id, db.clone()); + let executor = self.tcfg.executor( + self.known_contracts.clone(), + artifact_id, + db.clone(), + self.metadata.clone(), + ); let runner = ContractRunner::new( &identifier, contract, @@ -347,6 +357,7 @@ impl TestRunnerConfig { known_contracts: ContractsByArtifact, artifact_id: &ArtifactId, db: Backend, + struct_defs: StructDefinitions, ) -> Executor { let cheats_config = Arc::new(CheatsConfig::new( &self.config, @@ -363,6 +374,7 @@ impl TestRunnerConfig { .enable_isolation(self.isolation) .odyssey(self.odyssey) .create2_deployer(self.evm_opts.create2_deployer) + .struct_defs(struct_defs) }) .spec_id(self.spec_id) .gas_limit(self.evm_opts.gas_limit()) @@ -404,6 +416,8 @@ pub struct MultiContractRunnerBuilder { pub isolation: bool, /// Whether to enable Odyssey features. pub odyssey: bool, + /// + pub struct_defs: StructDefinitions, } impl MultiContractRunnerBuilder { @@ -419,6 +433,7 @@ impl MultiContractRunnerBuilder { isolation: Default::default(), decode_internal: Default::default(), odyssey: Default::default(), + struct_defs: Default::default(), } } @@ -467,6 +482,11 @@ impl MultiContractRunnerBuilder { self } + pub fn struct_defs(mut self, struct_defs: StructDefinitions) -> Self { + self.struct_defs = struct_defs; + self + } + /// Given an EVM, proceeds to return a runner which is able to execute all tests /// against that evm pub fn build>( @@ -527,6 +547,7 @@ impl MultiContractRunnerBuilder { known_contracts, libs_to_deploy, libraries, + metadata: self.struct_defs, fork: self.fork, From f4fb694abeb30c116a470c722239ea4a89a62a04 Mon Sep 17 00:00:00 2001 From: 0xrusowsky <0xrusowsky@proton.me> Date: Tue, 22 Jul 2025 16:42:58 +0200 Subject: [PATCH 2/8] cleanup impl --- crates/cheatcodes/src/evm/fork.rs | 2 +- crates/cheatcodes/src/json.rs | 487 ++++++++++++------------------ crates/cheatcodes/src/toml.rs | 6 +- crates/common/src/sema.rs | 21 +- 4 files changed, 202 insertions(+), 314 deletions(-) diff --git a/crates/cheatcodes/src/evm/fork.rs b/crates/cheatcodes/src/evm/fork.rs index 04b9b43a2f226..a6386e0dd40f1 100644 --- a/crates/cheatcodes/src/evm/fork.rs +++ b/crates/cheatcodes/src/evm/fork.rs @@ -376,7 +376,7 @@ fn rpc_call(state: &Cheatcodes, url: &str, method: &str, params: &str) -> Result foundry_common::block_on(provider.raw_request(method.to_string().into(), params_json)) .map_err(|err| fmt_err!("{method:?}: {err}"))?; let result_as_tokens = convert_to_bytes( - &json_value_to_token(state, &result) + &json_value_to_token(&state.struct_defs, &result) .map_err(|err| fmt_err!("failed to parse result: {err}"))?, ); diff --git a/crates/cheatcodes/src/json.rs b/crates/cheatcodes/src/json.rs index 8431b69d4b179..875012479b173 100644 --- a/crates/cheatcodes/src/json.rs +++ b/crates/cheatcodes/src/json.rs @@ -29,147 +29,112 @@ impl Cheatcode for keyExistsJsonCall { impl Cheatcode for parseJson_0Call { fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { json } = self; - parse_json(state, json, "$") + parse_json(&state.struct_defs, json, "$") } } impl Cheatcode for parseJson_1Call { fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { json, key } = self; - parse_json(state, json, key) + parse_json(&state.struct_defs, json, key) } } impl Cheatcode for parseJsonUintCall { - fn apply(&self, state: &mut Cheatcodes) -> Result { + fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { json, key } = self; - parse_json_coerce(json, key, &DynSolType::Uint(256), Some(&state.struct_defs)) + parse_json_coerce(json, key, &DynSolType::Uint(256)) } } impl Cheatcode for parseJsonUintArrayCall { - fn apply(&self, state: &mut Cheatcodes) -> Result { + fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { json, key } = self; - parse_json_coerce( - json, - key, - &DynSolType::Array(Box::new(DynSolType::Uint(256))), - Some(&state.struct_defs), - ) + parse_json_coerce(json, key, &DynSolType::Array(Box::new(DynSolType::Uint(256)))) } } impl Cheatcode for parseJsonIntCall { - fn apply(&self, state: &mut Cheatcodes) -> Result { + fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { json, key } = self; - parse_json_coerce(json, key, &DynSolType::Int(256), Some(&state.struct_defs)) + parse_json_coerce(json, key, &DynSolType::Int(256)) } } impl Cheatcode for parseJsonIntArrayCall { - fn apply(&self, state: &mut Cheatcodes) -> Result { + fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { json, key } = self; - parse_json_coerce( - json, - key, - &DynSolType::Array(Box::new(DynSolType::Int(256))), - Some(&state.struct_defs), - ) + parse_json_coerce(json, key, &DynSolType::Array(Box::new(DynSolType::Int(256)))) } } impl Cheatcode for parseJsonBoolCall { - fn apply(&self, state: &mut Cheatcodes) -> Result { + fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { json, key } = self; - parse_json_coerce(json, key, &DynSolType::Bool, Some(&state.struct_defs)) + parse_json_coerce(json, key, &DynSolType::Bool) } } impl Cheatcode for parseJsonBoolArrayCall { - fn apply(&self, state: &mut Cheatcodes) -> Result { + fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { json, key } = self; - parse_json_coerce( - json, - key, - &DynSolType::Array(Box::new(DynSolType::Bool)), - Some(&state.struct_defs), - ) + parse_json_coerce(json, key, &DynSolType::Array(Box::new(DynSolType::Bool))) } } impl Cheatcode for parseJsonAddressCall { - fn apply(&self, state: &mut Cheatcodes) -> Result { + fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { json, key } = self; - parse_json_coerce(json, key, &DynSolType::Address, Some(&state.struct_defs)) + parse_json_coerce(json, key, &DynSolType::Address) } } impl Cheatcode for parseJsonAddressArrayCall { - fn apply(&self, state: &mut Cheatcodes) -> Result { + fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { json, key } = self; - parse_json_coerce( - json, - key, - &DynSolType::Array(Box::new(DynSolType::Address)), - Some(&state.struct_defs), - ) + parse_json_coerce(json, key, &DynSolType::Array(Box::new(DynSolType::Address))) } } impl Cheatcode for parseJsonStringCall { - fn apply(&self, state: &mut Cheatcodes) -> Result { + fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { json, key } = self; - parse_json_coerce(json, key, &DynSolType::String, Some(&state.struct_defs)) + parse_json_coerce(json, key, &DynSolType::String) } } impl Cheatcode for parseJsonStringArrayCall { - fn apply(&self, state: &mut Cheatcodes) -> Result { + fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { json, key } = self; - parse_json_coerce( - json, - key, - &DynSolType::Array(Box::new(DynSolType::String)), - Some(&state.struct_defs), - ) + parse_json_coerce(json, key, &DynSolType::Array(Box::new(DynSolType::String))) } } impl Cheatcode for parseJsonBytesCall { - fn apply(&self, state: &mut Cheatcodes) -> Result { + fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { json, key } = self; - parse_json_coerce(json, key, &DynSolType::Bytes, Some(&state.struct_defs)) + parse_json_coerce(json, key, &DynSolType::Bytes) } } impl Cheatcode for parseJsonBytesArrayCall { - fn apply(&self, state: &mut Cheatcodes) -> Result { + fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { json, key } = self; - parse_json_coerce( - json, - key, - &DynSolType::Array(Box::new(DynSolType::Bytes)), - Some(&state.struct_defs), - ) + parse_json_coerce(json, key, &DynSolType::Array(Box::new(DynSolType::Bytes))) } } impl Cheatcode for parseJsonBytes32Call { - fn apply(&self, state: &mut Cheatcodes) -> Result { + fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { json, key } = self; - parse_json_coerce(json, key, &DynSolType::FixedBytes(32), Some(&state.struct_defs)) + parse_json_coerce(json, key, &DynSolType::FixedBytes(32)) } } impl Cheatcode for parseJsonBytes32ArrayCall { - fn apply(&self, state: &mut Cheatcodes) -> Result { + fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { json, key } = self; - parse_json_coerce( - json, - key, - &DynSolType::Array(Box::new(DynSolType::FixedBytes(32))), - Some(&state.struct_defs), - ) + parse_json_coerce(json, key, &DynSolType::Array(Box::new(DynSolType::FixedBytes(32)))) } } @@ -177,7 +142,7 @@ impl Cheatcode for parseJsonType_0Call { fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { json, typeDescription } = self; let ty = resolve_type(typeDescription, Some(&state.struct_defs))?; - parse_json_coerce(json, "$", &ty, Some(&state.struct_defs)).map(|v| v.abi_encode()) + parse_json_coerce(json, "$", &ty).map(|v| v.abi_encode()) } } @@ -185,7 +150,7 @@ impl Cheatcode for parseJsonType_1Call { fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { json, key, typeDescription } = self; let ty = resolve_type(typeDescription, Some(&state.struct_defs))?; - parse_json_coerce(json, key, &ty, Some(&state.struct_defs)).map(|v| v.abi_encode()) + parse_json_coerce(json, key, &ty).map(|v| v.abi_encode()) } } @@ -193,8 +158,7 @@ impl Cheatcode for parseJsonTypeArrayCall { fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { json, key, typeDescription } = self; let ty = resolve_type(typeDescription, Some(&state.struct_defs))?; - parse_json_coerce(json, key, &DynSolType::Array(Box::new(ty)), Some(&state.struct_defs)) - .map(|v| v.abi_encode()) + parse_json_coerce(json, key, &DynSolType::Array(Box::new(ty))).map(|v| v.abi_encode()) } } @@ -409,33 +373,24 @@ pub(super) fn check_json_key_exists(json: &str, key: &str) -> Result { Ok(exists.abi_encode()) } -pub(super) fn parse_json(state: &Cheatcodes, json: &str, path: &str) -> Result { +pub(super) fn parse_json(defs: &StructDefinitions, json: &str, path: &str) -> Result { let value = parse_json_str(json)?; let selected = select(&value, path)?; - let sol = json_to_sol(state, &selected)?; + let sol = json_to_sol(defs, &selected)?; Ok(encode(sol)) } -pub(super) fn parse_json_coerce( - json: &str, - path: &str, - ty: &DynSolType, - struct_defs: Option<&StructDefinitions>, -) -> Result { +pub(super) fn parse_json_coerce(json: &str, path: &str, ty: &DynSolType) -> Result { let json = parse_json_str(json)?; let [value] = select(&json, path)?[..] else { bail!("path {path:?} must return exactly one JSON value"); }; - parse_json_as(value, ty, struct_defs).map(|v| v.abi_encode()) + parse_json_as(value, ty).map(|v| v.abi_encode()) } /// Parses given [serde_json::Value] as a [DynSolValue]. -pub(super) fn parse_json_as( - value: &Value, - ty: &DynSolType, - struct_defs: Option<&StructDefinitions>, -) -> Result { +pub(super) fn parse_json_as(value: &Value, ty: &DynSolType) -> Result { let to_string = |v: &Value| { let mut s = v.to_string(); s.retain(|c: char| c != '"'); @@ -443,84 +398,54 @@ pub(super) fn parse_json_as( }; match (value, ty) { - (Value::Array(array), ty) => parse_json_array(array, ty, struct_defs), - (Value::Object(object), ty) => parse_json_map(object, ty, struct_defs), + (Value::Array(array), ty) => parse_json_array(array, ty), + (Value::Object(object), ty) => parse_json_map(object, ty), (Value::String(s), DynSolType::String) => Ok(DynSolValue::String(s.clone())), _ => string::parse_value(&to_string(value), ty), } } -pub(super) fn parse_json_array( - array: &[Value], - ty: &DynSolType, - struct_defs: Option<&StructDefinitions>, -) -> Result { +pub(super) fn parse_json_array(array: &[Value], ty: &DynSolType) -> Result { match ty { DynSolType::Tuple(types) => { ensure!(array.len() == types.len(), "array length mismatch"); let values = array .iter() .zip(types) - .map(|(e, ty)| parse_json_as(e, ty, struct_defs)) + .map(|(e, ty)| parse_json_as(e, ty)) .collect::>>()?; Ok(DynSolValue::Tuple(values)) } DynSolType::Array(inner) => { - let values = array - .iter() - .map(|e| parse_json_as(e, inner, struct_defs)) - .collect::>>()?; + let values = + array.iter().map(|e| parse_json_as(e, inner)).collect::>>()?; Ok(DynSolValue::Array(values)) } DynSolType::FixedArray(inner, len) => { ensure!(array.len() == *len, "array length mismatch"); - let values = array - .iter() - .map(|e| parse_json_as(e, inner, struct_defs)) - .collect::>>()?; + let values = + array.iter().map(|e| parse_json_as(e, inner)).collect::>>()?; Ok(DynSolValue::FixedArray(values)) } _ => bail!("expected {ty}, found array"), } } -pub(super) fn parse_json_map( - map: &Map, - ty: &DynSolType, - struct_defs: Option<&StructDefinitions>, -) -> Result { +pub(super) fn parse_json_map(map: &Map, ty: &DynSolType) -> Result { let Some((name, fields, types)) = ty.as_custom_struct() else { bail!("expected {ty}, found JSON object"); }; - let type_map: std::collections::HashMap<&str, &DynSolType> = - fields.iter().map(|s| s.as_str()).zip(types.iter()).collect(); - - let ordered_prop_names = if let Some(defs) = struct_defs - && let Some(struct_fields) = defs.get(name) - { - // Use the field order from the struct definition. - struct_fields.iter().map(|(field_name, _)| field_name.clone()).collect::>() - } else { - // Fall back to the fields from the type (which are alphabetically ordered). - fields.to_vec() - }; - - let mut tuple = Vec::with_capacity(ordered_prop_names.len()); - for field_name in &ordered_prop_names { + let mut tuple = Vec::with_capacity(fields.len()); + for (i, field_name) in fields.iter().enumerate() { let Some(value) = map.get(field_name) else { bail!("field {field_name:?} not found in JSON object") }; - let Some(field_ty) = type_map.get(field_name.as_str()) else { - // This case should ideally not be hit if the struct definition is consistent - // with the provided type. - bail!("type for field {field_name:?} not found in provided type description"); - }; - tuple.push(parse_json_as(value, *field_ty, struct_defs)?); + tuple.push(parse_json_as(value, &types[i])?); } - Ok(DynSolValue::CustomStruct { name: name.to_string(), prop_names: ordered_prop_names, tuple }) + Ok(DynSolValue::CustomStruct { name: name.to_string(), prop_names: fields.to_owned(), tuple }) } pub(super) fn parse_json_keys(json: &str, key: &str) -> Result { @@ -540,10 +465,10 @@ fn parse_json_str(json: &str) -> Result { serde_json::from_str(json).map_err(|e| fmt_err!("failed parsing JSON: {e}")) } -fn json_to_sol(state: &Cheatcodes, json: &[&Value]) -> Result> { +fn json_to_sol(defs: &StructDefinitions, json: &[&Value]) -> Result> { let mut sol = Vec::with_capacity(json.len()); for value in json { - sol.push(json_value_to_token(state, value)?); + sol.push(json_value_to_token(&defs, value)?); } Ok(sol) } @@ -581,19 +506,19 @@ pub(super) fn canonicalize_json_path(path: &str) -> Cow<'_, str> { /// it will call itself to convert each of it's value and encode the whole as a /// Tuple #[instrument(target = "cheatcodes", level = "trace", ret)] -pub(super) fn json_value_to_token(state: &Cheatcodes, value: &Value) -> Result { +pub(super) fn json_value_to_token(defs: &StructDefinitions, value: &Value) -> Result { match value { Value::Null => Ok(DynSolValue::FixedBytes(B256::ZERO, 32)), Value::Bool(boolean) => Ok(DynSolValue::Bool(*boolean)), Value::Array(array) => array .iter() - .map(|v| json_value_to_token(state, v)) + .map(|v| json_value_to_token(&defs, v)) .collect::>() .map(DynSolValue::Array), Value::Object(map) => { // Try to find a struct definition that matches the object keys. let keys: BTreeSet<_> = map.keys().map(|s| s.as_str()).collect(); - let matching_def = state.struct_defs.values().find(|fields| { + let matching_def = defs.values().find(|fields| { fields.len() == keys.len() && fields.iter().map(|(name, _)| name.as_str()).collect::>() == keys }); @@ -604,7 +529,7 @@ pub(super) fn json_value_to_token(state: &Cheatcodes, value: &Value) -> Result>() .map(DynSolValue::Tuple) @@ -614,7 +539,7 @@ pub(super) fn json_value_to_token(state: &Cheatcodes, value: &Value) -> Result>() .map(DynSolValue::Tuple) } @@ -788,12 +713,60 @@ pub(super) fn resolve_type( .first() .ok_or_else(|| fmt_err!("EIP-712 type description is empty"))? .type_name; - return Ok(resolver.resolve(main_type)?); + + // Get the alphabetically-sorted type from the resolver, and reorder if necessary. + let resolved_ty = resolver.resolve(main_type)?; + return Ok(if let Some(defs) = struct_defs { + reorder_type(resolved_ty, defs) + } else { + resolved_ty + }); } bail!("type description should be a valid Solidity type or a EIP712 `encodeType` string") } +/// Recursively traverses a `DynSolType` and reorders the fields of any +/// `CustomStruct` variants according to the provided `StructDefinitions`. +/// +/// This is necessary because the EIP-712 resolver sorts struct fields alphabetically, +/// but we want to respect the order defined in the Solidity source code. +fn reorder_type(ty: DynSolType, struct_defs: &StructDefinitions) -> DynSolType { + match ty { + DynSolType::CustomStruct { name, prop_names, tuple } => { + if let Some(def) = struct_defs.get(&name) { + // The incoming `prop_names` and `tuple` are alphabetically sorted. + let type_map: std::collections::HashMap = + prop_names.into_iter().zip(tuple.into_iter()).collect(); + + let mut sorted_props = Vec::with_capacity(def.len()); + let mut sorted_tuple = Vec::with_capacity(def.len()); + for (field_name, _) in def { + sorted_props.push(field_name.clone()); + if let Some(field_ty) = type_map.get(field_name) { + sorted_tuple.push(reorder_type(field_ty.clone(), struct_defs)); + } + // NOTE(rusowsky): Should we bail if there is a missing file? + } + DynSolType::CustomStruct { name, prop_names: sorted_props, tuple: sorted_tuple } + } else { + // No definition found, so we can't reorder. However, we still reorder its children + // in case they have known structs. + let new_tuple = tuple.into_iter().map(|t| reorder_type(t, struct_defs)).collect(); + DynSolType::CustomStruct { name, prop_names, tuple: new_tuple } + } + } + DynSolType::Array(inner) => DynSolType::Array(Box::new(reorder_type(*inner, struct_defs))), + DynSolType::FixedArray(inner, len) => { + DynSolType::FixedArray(Box::new(reorder_type(*inner, struct_defs)), len) + } + DynSolType::Tuple(inner) => { + DynSolType::Tuple(inner.into_iter().map(|t| reorder_type(t, struct_defs)).collect()) + } + _ => ty, + } +} + #[cfg(test)] mod tests { use super::*; @@ -811,6 +784,15 @@ mod tests { } } + fn is_not_json_object_string(value: &DynSolValue) -> bool { + if let DynSolValue::String(s) = value { + if let Ok(parsed_json) = serde_json::from_str::(s) { + return !parsed_json.is_object(); + } + } + true + } + /// [DynSolValue::Bytes] of length 32 and 20 are converted to [DynSolValue::FixedBytes] and /// [DynSolValue::Address] respectively. Thus, we can't distinguish between address and bytes of /// length 20 during decoding. Because of that, there are issues with handling of arrays of @@ -844,27 +826,26 @@ mod tests { // Tests to ensure that conversion [DynSolValue] -> [serde_json::Value] -> [DynSolValue] proptest::proptest! { - // TODO: fix - // #[test] - // fn test_json_roundtrip_guessed(v in guessable_types()) { - // let json = serialize_value_as_json(v.clone()).unwrap(); - // let value = json_value_to_token(&json).unwrap(); + #[test] + fn test_json_roundtrip_guessed(v in guessable_types()) { + let json = serialize_value_as_json(v.clone()).unwrap(); + let value = json_value_to_token(&StructDefinitions::default(), &json).unwrap(); - // // do additional abi_encode -> abi_decode to avoid zero signed integers getting decoded as unsigned and causing assert_eq to fail. - // let decoded = v.as_type().unwrap().abi_decode(&value.abi_encode()).unwrap(); - // assert_eq!(decoded, v); - // } + // do additional abi_encode -> abi_decode to avoid zero signed integers getting decoded as unsigned and causing assert_eq to fail. + let decoded = v.as_type().unwrap().abi_decode(&value.abi_encode()).unwrap(); + assert_eq!(decoded, v); + } #[test] fn test_json_roundtrip(v in proptest::arbitrary::any::().prop_filter("filter out values without type", |v| v.as_type().is_some())) { let json = serialize_value_as_json(v.clone()).unwrap(); - let value = parse_json_as(&json, &v.as_type().unwrap(), None).unwrap(); + let value = parse_json_as(&json, &v.as_type().unwrap()).unwrap(); assert_eq!(value, v); } } #[test] - fn test_parse_json_struct_with_definitions() { + fn test_resolve_type_with_definitions() -> Result<()> { // Define a struct with fields in a specific order (not alphabetical) let mut defs_map = HashMap::new(); defs_map.insert( @@ -884,186 +865,89 @@ mod tests { ); let struct_defs = StructDefinitions::new(defs_map); - // JSON with fields in different order - let json_str = r#"{ - "apples": [ - { - "sweetness": 7, - "sourness": 3, - "color": "Red" - }, - { - "sweetness": 5, - "sourness": 5, - "color": "Green" - }, - { - "sweetness": 9, - "sourness": 1, - "color": "Yellow" - } - ], - "name": "Fresh Fruit" - }"#; - - // Create the expected type. The type resolver will order properties alphabetically. - let apple_ty = DynSolType::CustomStruct { - name: "Apple".to_string(), - prop_names: vec!["color".to_string(), "sourness".to_string(), "sweetness".to_string()], - tuple: vec![DynSolType::String, DynSolType::Uint(8), DynSolType::Uint(8)], - }; - - let fruit_ty = DynSolType::CustomStruct { - name: "FruitStall".to_string(), - prop_names: vec!["apples".to_string(), "name".to_string()], - tuple: vec![DynSolType::Array(Box::new(apple_ty)), DynSolType::String], - }; + // Simulate resolver output: type string, using alphabetical order for fields. + let ty_desc = "FruitStall(Apple[] apples,string name)Apple(string color,uint8 sourness,uint8 sweetness)"; - // Parse the JSON. - let json_value: Value = serde_json::from_str(json_str).unwrap(); - let result = parse_json_as(&json_value, &fruit_ty, Some(&struct_defs)).unwrap(); - - // Assert that the parsed structure respects the definition order. - if let DynSolValue::CustomStruct { name, prop_names, tuple } = result { + // Resolve type and ensure struct definition order is preserved. + let ty = resolve_type(ty_desc, Some(&struct_defs)).unwrap(); + if let DynSolType::CustomStruct { name, prop_names, tuple } = ty { assert_eq!(name, "FruitStall"); assert_eq!(prop_names, vec!["name", "apples"]); assert_eq!(tuple.len(), 2); - assert_eq!(tuple[0], DynSolValue::String("Fresh Fruit".to_string())); + assert_eq!(tuple[0], DynSolType::String); - if let DynSolValue::Array(apples) = &tuple[1] { - assert_eq!(apples.len(), 3); - if let DynSolValue::CustomStruct { prop_names, tuple, .. } = &apples[0] { - assert_eq!(*prop_names, vec!["color", "sweetness", "sourness"]); - assert_eq!(tuple[0], DynSolValue::String("Red".to_string())); - assert_eq!(tuple[1], DynSolValue::Uint(U256::from(7), 8)); - assert_eq!(tuple[2], DynSolValue::Uint(U256::from(3), 8)); - } else { - panic!("Expected CustomStruct for the first apple"); - } - if let DynSolValue::CustomStruct { prop_names, tuple, .. } = &apples[2] { + if let DynSolType::Array(apple_ty_boxed) = &tuple[1] { + if let DynSolType::CustomStruct { name, prop_names, tuple } = &**apple_ty_boxed { + assert_eq!(*name, "Apple"); + // Check that the inner struct's fields are also in definition order. assert_eq!(*prop_names, vec!["color", "sweetness", "sourness"]); - assert_eq!(tuple[0], DynSolValue::String("Yellow".to_string())); - assert_eq!(tuple[1], DynSolValue::Uint(U256::from(9), 8)); - assert_eq!(tuple[2], DynSolValue::Uint(U256::from(1), 8)); - } else { - panic!("Expected CustomStruct for the second apple"); + assert_eq!( + *tuple, + vec![DynSolType::String, DynSolType::Uint(8), DynSolType::Uint(8)] + ); + + return Ok(()); } - } else { - panic!("Expected an array of apples"); } - } else { - panic!("Expected a CustomStruct for FruitStall"); } + panic!("Expected FruitStall and Apple to be CustomStruct"); } #[test] - fn test_parse_json_struct_without_definitions() { - // JSON with fields in alphabetical order - let json_str = r#"{ - "active": true, - "age": 30, - "name": "Alice" - }"#; - - // Create the expected type with alphabetical order - let ty = DynSolType::CustomStruct { - name: "Person".to_string(), - prop_names: vec!["active".to_string(), "age".to_string(), "name".to_string()], - tuple: vec![DynSolType::Bool, DynSolType::Uint(256), DynSolType::String], - }; - - // Parse JSON without struct definitions - let json_value: Value = serde_json::from_str(json_str).unwrap(); - let result = parse_json_as(&json_value, &ty, None).unwrap(); - - // Check that fields remain in alphabetical order when no definitions provided - match result { - DynSolValue::CustomStruct { name, prop_names, tuple } => { - assert_eq!(name, "Person"); - assert_eq!(prop_names, vec!["active", "age", "name"]); - assert_eq!(tuple.len(), 3); - - // Check values are in alphabetical order - assert_eq!(tuple[0], DynSolValue::Bool(true)); - assert_eq!(tuple[1], DynSolValue::Uint(U256::from(30), 256)); - assert_eq!(tuple[2], DynSolValue::String("Alice".to_string())); - } - _ => panic!("Expected CustomStruct"), + fn test_resolve_type_without_definitions() -> Result<()> { + // Simulate resolver output: type string, using alphabetical order for fields. + let ty_desc = "Person(bool active,uint256 age,string name)"; + + // Resolve the type without providing any struct definitions and ensure that original + // (alphabetical) order is unchanged. + let ty = resolve_type(ty_desc, None).unwrap(); + if let DynSolType::CustomStruct { name, prop_names, tuple } = ty { + assert_eq!(name, "Person"); + assert_eq!(prop_names, vec!["active", "age", "name"]); + assert_eq!(tuple.len(), 3); + assert_eq!(tuple, vec![DynSolType::Bool, DynSolType::Uint(256), DynSolType::String]); + return Ok(()); } + panic!("Expected Person to be CustomStruct"); } #[test] - fn test_parse_json_array_of_structs() { + fn test_resolve_type_for_array_of_structs() -> Result<()> { + // Define a struct with fields in a specific, non-alphabetical order. let mut defs_map = HashMap::new(); defs_map.insert( "Item".to_string(), vec![ - ("id".to_string(), "uint256".to_string()), ("name".to_string(), "string".to_string()), ("price".to_string(), "uint256".to_string()), + ("id".to_string(), "uint256".to_string()), ], ); let struct_defs = StructDefinitions::new(defs_map); - // JSON array with structs - let json_str = r#"[ - { - "name": "Apple", - "price": 100, - "id": 1 - }, - { - "price": 200, - "id": 2, - "name": "Banana" - } - ]"#; - - // Create the expected type - let item_ty = DynSolType::CustomStruct { - name: "Item".to_string(), - prop_names: vec!["id".to_string(), "name".to_string(), "price".to_string()], - tuple: vec![DynSolType::Uint(256), DynSolType::String, DynSolType::Uint(256)], - }; - let array_ty = DynSolType::Array(Box::new(item_ty)); - - // Parse JSON - let json_value: Value = serde_json::from_str(json_str).unwrap(); - let result = parse_json_as(&json_value, &array_ty, Some(&struct_defs)).unwrap(); - - // Check array of structs - match result { - DynSolValue::Array(items) => { - assert_eq!(items.len(), 2); - - // Check first item - match &items[0] { - DynSolValue::CustomStruct { prop_names, tuple, .. } => { - assert_eq!(prop_names, &vec!["id", "name", "price"]); - assert_eq!(tuple[0], DynSolValue::Uint(U256::from(1), 256)); - assert_eq!(tuple[1], DynSolValue::String("Apple".to_string())); - assert_eq!(tuple[2], DynSolValue::Uint(U256::from(100), 256)); - } - _ => panic!("Expected CustomStruct in array"), - } - - // Check second item - match &items[1] { - DynSolValue::CustomStruct { prop_names, tuple, .. } => { - assert_eq!(prop_names, &vec!["id", "name", "price"]); - assert_eq!(tuple[0], DynSolValue::Uint(U256::from(2), 256)); - assert_eq!(tuple[1], DynSolValue::String("Banana".to_string())); - assert_eq!(tuple[2], DynSolValue::Uint(U256::from(200), 256)); - } - _ => panic!("Expected CustomStruct in array"), - } - } - _ => panic!("Expected Array"), + // Simulate resolver output: type string, using alphabetical order for fields. + let ty_desc = "Item(uint256 id,string name,uint256 price)"; + + // Resolve type and ensure struct definition order is preserved. + let ty = resolve_type(ty_desc, Some(&struct_defs)).unwrap(); + let array_ty = DynSolType::Array(Box::new(ty)); + if let DynSolType::Array(item_ty) = array_ty + && let DynSolType::CustomStruct { name, prop_names, tuple } = *item_ty + { + assert_eq!(name, "Item"); + assert_eq!(prop_names, vec!["name", "price", "id"]); + assert_eq!( + tuple, + vec![DynSolType::String, DynSolType::Uint(256), DynSolType::Uint(256)] + ); + return Ok(()); } + panic!("Expected CustomStruct in array"); } #[test] fn test_parse_json_missing_field() { + // Define a struct with a specific field order. let mut defs_map = HashMap::new(); defs_map.insert( "Person".to_string(), @@ -1077,17 +961,16 @@ mod tests { // JSON missing the "age" field let json_str = r#"{ "name": "Alice" }"#; - let ty = DynSolType::CustomStruct { - name: "Person".to_string(), - prop_names: vec!["age".to_string(), "name".to_string()], - tuple: vec![DynSolType::Uint(256), DynSolType::String], - }; + // Simulate resolver output: type string, using alphabetical order for fields. + let type_description = "Person(uint256 age,string name)"; + let ty = resolve_type(type_description, Some(&struct_defs)).unwrap(); + // Now, attempt to parse the incomplete JSON using the ordered type. let json_value: Value = serde_json::from_str(json_str).unwrap(); - let result = parse_json_as(&json_value, &ty, Some(&struct_defs)); + let result = parse_json_as(&json_value, &ty); - // Should fail with missing field error + // Should fail with a missing field error because `parse_json_map` requires all fields. assert!(result.is_err()); - assert!(result.unwrap_err().to_string().contains("field \"age\" not found")); + assert!(result.unwrap_err().to_string().contains("field \"age\" not found in JSON object")); } } diff --git a/crates/cheatcodes/src/toml.rs b/crates/cheatcodes/src/toml.rs index 776b63309cc0c..0d7864796affc 100644 --- a/crates/cheatcodes/src/toml.rs +++ b/crates/cheatcodes/src/toml.rs @@ -37,7 +37,7 @@ impl Cheatcode for parseToml_1Call { } impl Cheatcode for parseTomlUintCall { - fn apply(&self, state: &mut Cheatcodes) -> Result { + fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { toml, key } = self; parse_toml_coerce(toml, key, &DynSolType::Uint(256)) } @@ -201,12 +201,12 @@ fn parse_toml_str(toml: &str) -> Result { /// Parse a TOML string and return the value at the given path. fn parse_toml(state: &Cheatcodes, toml: &str, key: &str) -> Result { - parse_json(state, &toml_to_json_string(toml)?, key) + parse_json(&state.struct_defs, &toml_to_json_string(toml)?, key) } /// Parse a TOML string and return the value at the given path, coercing it to the given type. fn parse_toml_coerce(toml: &str, key: &str, ty: &DynSolType) -> Result { - parse_json_coerce(&toml_to_json_string(toml)?, key, ty, None) + parse_json_coerce(&toml_to_json_string(toml)?, key, ty) } /// Parse a TOML string and return an array of all keys at the given path. diff --git a/crates/common/src/sema.rs b/crates/common/src/sema.rs index b3b00340f532d..0233982226021 100644 --- a/crates/common/src/sema.rs +++ b/crates/common/src/sema.rs @@ -82,19 +82,22 @@ impl<'hir> SemanticAnalysisProcessor<'hir> { for &field_id in strukt.fields { let var = hir.variable(field_id); let name = var.name.ok_or_else(|| eyre!("Struct field is missing a name"))?.to_string(); - let ty_str = self.ty_to_string(gcx.type_of_hir_ty(&var.ty))?; + if let Some(ty_str) = self.ty_to_string(gcx.type_of_hir_ty(&var.ty)) { + fields.push((name, ty_str)); + } + } - fields.push((name, ty_str)); + if !fields.is_empty() { + self.struct_defs.insert(qualified_name, fields); } - self.struct_defs.insert(qualified_name, fields); Ok(()) } /// Converts a resolved `Ty` into its canonical string representation. - fn ty_to_string(&mut self, ty: Ty<'hir>) -> Result { + fn ty_to_string(&mut self, ty: Ty<'hir>) -> Option { let ty = ty.peel_refs(); - Ok(match ty.kind { + let res = match ty.kind { TyKind::Elementary(e) => e.to_string(), TyKind::Array(ty, size) => { let inner_type = self.ty_to_string(ty)?; @@ -106,7 +109,7 @@ impl<'hir> SemanticAnalysisProcessor<'hir> { } TyKind::Struct(id) => { // Ensure the nested struct is resolved before proceeding. - self.resolve_struct_definition(id)?; + self.resolve_struct_definition(id).ok()?; self.get_fully_qualified_name(id) } TyKind::Udvt(ty, _) => self.ty_to_string(ty)?, @@ -115,8 +118,10 @@ impl<'hir> SemanticAnalysisProcessor<'hir> { // For now, map contracts to `address` TyKind::Contract(_) => "address".to_string(), // Explicitly disallow unsupported types - _ => eyre::bail!("Unsupported field type"), - }) + _ => return None, + }; + + Some(res) } /// Helper to get the fully qualified name `Contract.Struct`. From 4c5fa54f23c64832a53dd49502eb2d4e0bb0e8d8 Mon Sep 17 00:00:00 2001 From: 0xrusowsky <0xrusowsky@proton.me> Date: Tue, 22 Jul 2025 18:01:41 +0200 Subject: [PATCH 3/8] unit test --- crates/forge/tests/cli/bind_json.rs | 51 ++++++++++++++++++++++++++++- 1 file changed, 50 insertions(+), 1 deletion(-) diff --git a/crates/forge/tests/cli/bind_json.rs b/crates/forge/tests/cli/bind_json.rs index fcc081f6b6f06..bb4d644fc8485 100644 --- a/crates/forge/tests/cli/bind_json.rs +++ b/crates/forge/tests/cli/bind_json.rs @@ -70,7 +70,7 @@ interface Vm { function serializeJsonType(string calldata typeDescription, bytes memory value) external pure returns (string memory json); function serializeJsonType(string calldata objectKey, string calldata valueKey, string calldata typeDescription, bytes memory value) external returns (string memory json); } - + library JsonBindings { Vm constant vm = Vm(address(uint160(uint256(keccak256("hevm cheat code"))))); @@ -123,3 +123,52 @@ library JsonBindings { cmd.forge_fuse().args(["test"]).assert_success(); }); + +// tests enhanced `vm.parseJson` cheatcode, which isn't constraint to alphabetical ordering of the +// struct types, partially closing the gap with `forge bind-json`. +forgetest_init!(test_parse_json, |prj, cmd| { + prj.add_test( + "JsonCheats", + r#" +import {Test} from "forge-std/Test.sol"; + +struct Apple { + string color; + uint8 sweetness; + uint8 sourness; +} + +struct FruitStall { + string name; + Apple[] apples; +} + +contract JsonParseCheatsTest is Test { + function testJsonParseOrder() public { + string memory json = + '{"name":"Fresh Fruit","apples":[{"sweetness":7,"sourness":3,"color":"Red"},{"sweetness":5,"sourness":5,"color":"Green"}]}'; + + bytes memory decoded = vm.parseJson(json); + FruitStall memory stall = abi.decode(decoded, (FruitStall)); + + assertEq(stall.apples.length, 2); + assertEq(stall.name, "Fresh Fruit"); + + Apple memory appple = stall.apples[0]; + assertEq(appple.color, "Red"); + assertEq(appple.sweetness, 7); + assertEq(appple.sourness, 3); + + appple = stall.apples[1]; + assertEq(appple.color, "Green"); + assertEq(appple.sweetness, 5); + assertEq(appple.sourness, 5); + } +} +"#, + ) + .unwrap(); + + // Directly run the test. No `bind-json` or type schemas are needed. + cmd.forge_fuse().args(["test"]).assert_success(); +}); From 3c664f562021488175fd4a9677b374ecf4d19dae Mon Sep 17 00:00:00 2001 From: 0xrusowsky <0xrusowsky@proton.me> Date: Tue, 22 Jul 2025 18:16:14 +0200 Subject: [PATCH 4/8] style: clippy --- Cargo.lock | 1 - crates/cheatcodes/src/json.rs | 41 ++++++++++---------------- crates/common/src/sema.rs | 16 +++++----- crates/evm/evm/Cargo.toml | 1 - crates/evm/evm/src/inspectors/stack.rs | 2 +- crates/forge/src/multi_runner.rs | 2 +- 6 files changed, 25 insertions(+), 38 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2b5deafcf6292..3fe504399f294 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4468,7 +4468,6 @@ dependencies = [ "revm-inspectors", "serde", "serde_json", - "solar-sema", "thiserror 2.0.12", "tracing", "uuid 1.17.0", diff --git a/crates/cheatcodes/src/json.rs b/crates/cheatcodes/src/json.rs index 875012479b173..b9d66c3afd3ce 100644 --- a/crates/cheatcodes/src/json.rs +++ b/crates/cheatcodes/src/json.rs @@ -468,7 +468,7 @@ fn parse_json_str(json: &str) -> Result { fn json_to_sol(defs: &StructDefinitions, json: &[&Value]) -> Result> { let mut sol = Vec::with_capacity(json.len()); for value in json { - sol.push(json_value_to_token(&defs, value)?); + sol.push(json_value_to_token(defs, value)?); } Ok(sol) } @@ -512,7 +512,7 @@ pub(super) fn json_value_to_token(defs: &StructDefinitions, value: &Value) -> Re Value::Bool(boolean) => Ok(DynSolValue::Bool(*boolean)), Value::Array(array) => array .iter() - .map(|v| json_value_to_token(&defs, v)) + .map(|v| json_value_to_token(defs, v)) .collect::>() .map(DynSolValue::Array), Value::Object(map) => { @@ -737,7 +737,7 @@ fn reorder_type(ty: DynSolType, struct_defs: &StructDefinitions) -> DynSolType { if let Some(def) = struct_defs.get(&name) { // The incoming `prop_names` and `tuple` are alphabetically sorted. let type_map: std::collections::HashMap = - prop_names.into_iter().zip(tuple.into_iter()).collect(); + prop_names.into_iter().zip(tuple).collect(); let mut sorted_props = Vec::with_capacity(def.len()); let mut sorted_tuple = Vec::with_capacity(def.len()); @@ -770,7 +770,7 @@ fn reorder_type(ty: DynSolType, struct_defs: &StructDefinitions) -> DynSolType { #[cfg(test)] mod tests { use super::*; - use alloy_primitives::{FixedBytes, U256}; + use alloy_primitives::FixedBytes; use proptest::strategy::Strategy; use std::collections::HashMap; @@ -784,15 +784,6 @@ mod tests { } } - fn is_not_json_object_string(value: &DynSolValue) -> bool { - if let DynSolValue::String(s) = value { - if let Ok(parsed_json) = serde_json::from_str::(s) { - return !parsed_json.is_object(); - } - } - true - } - /// [DynSolValue::Bytes] of length 32 and 20 are converted to [DynSolValue::FixedBytes] and /// [DynSolValue::Address] respectively. Thus, we can't distinguish between address and bytes of /// length 20 during decoding. Because of that, there are issues with handling of arrays of @@ -876,18 +867,18 @@ mod tests { assert_eq!(tuple.len(), 2); assert_eq!(tuple[0], DynSolType::String); - if let DynSolType::Array(apple_ty_boxed) = &tuple[1] { - if let DynSolType::CustomStruct { name, prop_names, tuple } = &**apple_ty_boxed { - assert_eq!(*name, "Apple"); - // Check that the inner struct's fields are also in definition order. - assert_eq!(*prop_names, vec!["color", "sweetness", "sourness"]); - assert_eq!( - *tuple, - vec![DynSolType::String, DynSolType::Uint(8), DynSolType::Uint(8)] - ); - - return Ok(()); - } + if let DynSolType::Array(apple_ty_boxed) = &tuple[1] + && let DynSolType::CustomStruct { name, prop_names, tuple } = &**apple_ty_boxed + { + assert_eq!(*name, "Apple"); + // Check that the inner struct's fields are also in definition order. + assert_eq!(*prop_names, vec!["color", "sweetness", "sourness"]); + assert_eq!( + *tuple, + vec![DynSolType::String, DynSolType::Uint(8), DynSolType::Uint(8)] + ); + + return Ok(()); } } panic!("Expected FruitStall and Apple to be CustomStruct"); diff --git a/crates/common/src/sema.rs b/crates/common/src/sema.rs index 0233982226021..866e3fccbdd65 100644 --- a/crates/common/src/sema.rs +++ b/crates/common/src/sema.rs @@ -1,5 +1,4 @@ -//! Semantic analysis helpers for extracting type information and other useful metadata from the -//! HIR. +//! Semantic analysis helpers for extracting type information and other metadata from the HIR. use eyre::{Result, eyre}; use solar_sema::{ @@ -13,13 +12,13 @@ pub struct StructDefinitions(Arc>>); impl StructDefinitions { pub fn new(map: HashMap>) -> Self { - StructDefinitions(Arc::new(map)) + Self(Arc::new(map)) } } impl Default for StructDefinitions { fn default() -> Self { - StructDefinitions(Arc::new(HashMap::new())) + Self(Arc::new(HashMap::new())) } } @@ -101,11 +100,11 @@ impl<'hir> SemanticAnalysisProcessor<'hir> { TyKind::Elementary(e) => e.to_string(), TyKind::Array(ty, size) => { let inner_type = self.ty_to_string(ty)?; - format!("{}[{}]", inner_type, size) + format!("{inner_type}[{size}]") } TyKind::DynArray(ty) => { let inner_type = self.ty_to_string(ty)?; - format!("{}[]", inner_type) + format!("{inner_type}[]") } TyKind::Struct(id) => { // Ensure the nested struct is resolved before proceeding. @@ -129,10 +128,9 @@ impl<'hir> SemanticAnalysisProcessor<'hir> { let hir = self.hir(); let strukt = hir.strukt(id); if let Some(contract_id) = strukt.contract { - let contract_name = hir.contract(contract_id).name.as_str(); - format!("{}.{}", contract_name, strukt.name.as_str()) + format!("{}.{}", hir.contract(contract_id).name.as_str(), strukt.name.as_str()) } else { - strukt.name.as_str().to_string() + strukt.name.as_str().into() } } } diff --git a/crates/evm/evm/Cargo.toml b/crates/evm/evm/Cargo.toml index be284b21c465d..7124c9d0bb0ac 100644 --- a/crates/evm/evm/Cargo.toml +++ b/crates/evm/evm/Cargo.toml @@ -45,7 +45,6 @@ revm = { workspace = true, default-features = false, features = [ "c-kzg", ] } revm-inspectors.workspace = true -solar-sema.workspace = true eyre.workspace = true parking_lot.workspace = true diff --git a/crates/evm/evm/src/inspectors/stack.rs b/crates/evm/evm/src/inspectors/stack.rs index b2c2ebeaf820f..bad029074ff2f 100644 --- a/crates/evm/evm/src/inspectors/stack.rs +++ b/crates/evm/evm/src/inspectors/stack.rs @@ -72,7 +72,7 @@ pub struct InspectorStackBuilder { pub wallets: Option, /// The CREATE2 deployer address. pub create2_deployer: Address, - /// + /// The user-defined structs of the contracts. pub struct_defs: StructDefinitions, } diff --git a/crates/forge/src/multi_runner.rs b/crates/forge/src/multi_runner.rs index 01e51a014f788..3cd89db017b17 100644 --- a/crates/forge/src/multi_runner.rs +++ b/crates/forge/src/multi_runner.rs @@ -416,7 +416,7 @@ pub struct MultiContractRunnerBuilder { pub isolation: bool, /// Whether to enable Odyssey features. pub odyssey: bool, - /// + /// The user-defined structs of the contracts. pub struct_defs: StructDefinitions, } From cd260df918d50c63350739862fa145de5683aaf8 Mon Sep 17 00:00:00 2001 From: 0xrusowsky <0xrusowsky@proton.me> Date: Tue, 22 Jul 2025 18:45:03 +0200 Subject: [PATCH 5/8] style: housekeeping --- crates/cheatcodes/src/evm/fork.rs | 10 +++---- crates/cheatcodes/src/json.rs | 49 ++++++++----------------------- 2 files changed, 17 insertions(+), 42 deletions(-) diff --git a/crates/cheatcodes/src/evm/fork.rs b/crates/cheatcodes/src/evm/fork.rs index a6386e0dd40f1..54e68e1feffc1 100644 --- a/crates/cheatcodes/src/evm/fork.rs +++ b/crates/cheatcodes/src/evm/fork.rs @@ -7,7 +7,7 @@ use alloy_primitives::{B256, U256}; use alloy_provider::Provider; use alloy_rpc_types::Filter; use alloy_sol_types::SolValue; -use foundry_common::provider::ProviderBuilder; +use foundry_common::{provider::ProviderBuilder, sema::StructDefinitions}; use foundry_evm_core::{AsEnvMut, ContextExt, fork::CreateFork}; impl Cheatcode for activeForkCall { @@ -208,7 +208,7 @@ impl Cheatcode for rpc_0Call { .database .active_fork_url() .ok_or_else(|| fmt_err!("no active fork URL found"))?; - rpc_call(ccx.state, &url, method, params) + rpc_call(&ccx.state.struct_defs, &url, method, params) } } @@ -216,7 +216,7 @@ impl Cheatcode for rpc_1Call { fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { urlOrAlias, method, params } = self; let url = state.config.rpc_endpoint(urlOrAlias)?.url()?; - rpc_call(state, &url, method, params) + rpc_call(&state.struct_defs, &url, method, params) } } @@ -369,14 +369,14 @@ fn persist_caller(ccx: &mut CheatsCtxt) { } /// Performs an Ethereum JSON-RPC request to the given endpoint. -fn rpc_call(state: &Cheatcodes, url: &str, method: &str, params: &str) -> Result { +fn rpc_call(struct_defs: &StructDefinitions, url: &str, method: &str, params: &str) -> Result { let provider = ProviderBuilder::new(url).build()?; let params_json: serde_json::Value = serde_json::from_str(params)?; let result = foundry_common::block_on(provider.raw_request(method.to_string().into(), params_json)) .map_err(|err| fmt_err!("{method:?}: {err}"))?; let result_as_tokens = convert_to_bytes( - &json_value_to_token(&state.struct_defs, &result) + &json_value_to_token(struct_defs, &result) .map_err(|err| fmt_err!("failed to parse result: {err}"))?, ); diff --git a/crates/cheatcodes/src/json.rs b/crates/cheatcodes/src/json.rs index b9d66c3afd3ce..4c6b37e8dbebf 100644 --- a/crates/cheatcodes/src/json.rs +++ b/crates/cheatcodes/src/json.rs @@ -1,7 +1,7 @@ //! Implementations of [`Json`](spec::Group::Json) cheatcodes. use crate::{Cheatcode, Cheatcodes, Result, Vm::*, string}; -use alloy_dyn_abi::{DynSolType, DynSolValue, Resolver, eip712, eip712_parser}; +use alloy_dyn_abi::{DynSolType, DynSolValue, Resolver, eip712_parser}; use alloy_primitives::{Address, B256, I256, hex}; use alloy_sol_types::SolValue; use foundry_common::{fs, sema::StructDefinitions}; @@ -535,6 +535,7 @@ pub(super) fn json_value_to_token(defs: &StructDefinitions, value: &Value) -> Re .map(DynSolValue::Tuple) } else { // Fallback to alphabetical sorting if no matching struct is found. + // See: [#3647](https://github.com/foundry-rs/foundry/pull/3647) let ordered_object: BTreeMap<_, _> = map.iter().map(|(k, v)| (k.clone(), v.clone())).collect(); ordered_object @@ -678,49 +679,23 @@ pub(super) fn resolve_type( type_description: &str, struct_defs: Option<&StructDefinitions>, ) -> Result { - let mut resolver = Resolver::default(); - - // Populate the resolver with all known struct definitions from the project. - if let Some(struct_defs) = struct_defs { - for (name, fields) in struct_defs.iter() { - let props = fields - .iter() - .filter_map(|(field_name, field_ty)| { - eip712::PropertyDef::new(field_ty.as_str(), field_name.as_str()).ok() - }) - .collect::>(); - - if props.len() != fields.len() { - bail!("struct has an invalid field"); - } + let ordered_ty = |ty| { + if let Some(defs) = struct_defs { reorder_type(ty, defs) } else { ty } + }; - // The struct name from `StructDefinitions` should be a valid root type. - let type_def = eip712::TypeDef::new(name.clone(), props)?; - resolver.ingest(type_def); - } - } + if let Ok(ty) = DynSolType::parse(type_description) { + return Ok(ordered_ty(ty)); + }; if let Ok(encoded) = eip712_parser::EncodeType::parse(type_description) { - // Ingest the types from the EIP-712 string. These might be new or override - // project-wide definitions for the scope of this resolution. + let main_type = encoded.types[0].type_name; + let mut resolver = Resolver::default(); for t in &encoded.types { resolver.ingest(t.to_owned()); } - // The primary type is the first one in an EIP-712 string. - let main_type = encoded - .types - .first() - .ok_or_else(|| fmt_err!("EIP-712 type description is empty"))? - .type_name; - // Get the alphabetically-sorted type from the resolver, and reorder if necessary. - let resolved_ty = resolver.resolve(main_type)?; - return Ok(if let Some(defs) = struct_defs { - reorder_type(resolved_ty, defs) - } else { - resolved_ty - }); + return Ok(ordered_ty(resolver.resolve(main_type)?)); } bail!("type description should be a valid Solidity type or a EIP712 `encodeType` string") @@ -746,7 +721,7 @@ fn reorder_type(ty: DynSolType, struct_defs: &StructDefinitions) -> DynSolType { if let Some(field_ty) = type_map.get(field_name) { sorted_tuple.push(reorder_type(field_ty.clone(), struct_defs)); } - // NOTE(rusowsky): Should we bail if there is a missing file? + // NOTE(rusowsky): Should we bail if there is a missing field? } DynSolType::CustomStruct { name, prop_names: sorted_props, tuple: sorted_tuple } } else { From f163cc41112a1eacaa08bac739ba47138e25aedf Mon Sep 17 00:00:00 2001 From: 0xrusowsky <0xrusowsky@proton.me> Date: Tue, 22 Jul 2025 23:39:33 +0200 Subject: [PATCH 6/8] enhance serializeJsonType + add unit tests --- crates/cheatcodes/src/inspector.rs | 7 +- crates/cheatcodes/src/json.rs | 238 +++++++++++++++++++++++----- crates/cheatcodes/src/toml.rs | 2 +- crates/common/src/sema.rs | 18 +-- crates/forge/tests/cli/bind_json.rs | 57 ++++--- 5 files changed, 246 insertions(+), 76 deletions(-) diff --git a/crates/cheatcodes/src/inspector.rs b/crates/cheatcodes/src/inspector.rs index fa4c39bd802cf..e4d7140d3fcf7 100644 --- a/crates/cheatcodes/src/inspector.rs +++ b/crates/cheatcodes/src/inspector.rs @@ -455,10 +455,13 @@ pub struct Cheatcodes { /// Used to prevent duplicate changes file executing non-committing calls. pub fs_commit: bool, - /// Serialized JSON values. - pub serialized_jsons: HashMap>, + /// Struct definitions in the contracts. Used to keep field order when parsing JSON values. pub struct_defs: StructDefinitions, + /// Serialized JSON values. + // **Note**: both must a BTreeMap to ensure the order of the keys is deterministic. + pub serialized_jsons: BTreeMap>, + /// All recorded ETH `deal`s. pub eth_deals: Vec, diff --git a/crates/cheatcodes/src/json.rs b/crates/cheatcodes/src/json.rs index 4c6b37e8dbebf..dc170511754a7 100644 --- a/crates/cheatcodes/src/json.rs +++ b/crates/cheatcodes/src/json.rs @@ -9,7 +9,7 @@ use foundry_config::fs_permissions::FsAccessKind; use serde_json::{Map, Value}; use std::{ borrow::Cow, - collections::{BTreeMap, BTreeSet}, + collections::{BTreeMap, BTreeSet, HashMap}, }; impl Cheatcode for keyExistsCall { @@ -29,14 +29,14 @@ impl Cheatcode for keyExistsJsonCall { impl Cheatcode for parseJson_0Call { fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { json } = self; - parse_json(&state.struct_defs, json, "$") + parse_json(json, "$", &state.struct_defs) } } impl Cheatcode for parseJson_1Call { fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { json, key } = self; - parse_json(&state.struct_defs, json, key) + parse_json(json, key, &state.struct_defs) } } @@ -317,7 +317,7 @@ impl Cheatcode for serializeJsonType_0Call { let Self { typeDescription, value } = self; let ty = resolve_type(typeDescription, Some(&state.struct_defs))?; let value = ty.abi_decode(value)?; - let value = serialize_value_as_json(value)?; + let value = serialize_value_as_json(value, &state.struct_defs)?; Ok(value.to_string().abi_encode()) } } @@ -373,7 +373,7 @@ pub(super) fn check_json_key_exists(json: &str, key: &str) -> Result { Ok(exists.abi_encode()) } -pub(super) fn parse_json(defs: &StructDefinitions, json: &str, path: &str) -> Result { +pub(super) fn parse_json(json: &str, path: &str, defs: &StructDefinitions) -> Result { let value = parse_json_str(json)?; let selected = select(&value, path)?; let sol = json_to_sol(defs, &selected)?; @@ -611,7 +611,7 @@ pub(super) fn json_value_to_token(defs: &StructDefinitions, value: &Value) -> Re } /// Serializes given [DynSolValue] into a [serde_json::Value]. -fn serialize_value_as_json(value: DynSolValue) -> Result { +fn serialize_value_as_json(value: DynSolValue, defs: &StructDefinitions) -> Result { match value { DynSolValue::Bool(b) => Ok(Value::Bool(b)), DynSolValue::String(s) => { @@ -636,18 +636,33 @@ fn serialize_value_as_json(value: DynSolValue) -> Result { Ok(Value::Number(n)) } DynSolValue::Address(a) => Ok(Value::String(a.to_string())), - DynSolValue::Array(e) | DynSolValue::FixedArray(e) => { - Ok(Value::Array(e.into_iter().map(serialize_value_as_json).collect::>()?)) - } - DynSolValue::CustomStruct { name: _, prop_names, tuple } => { - let values = - tuple.into_iter().map(serialize_value_as_json).collect::>>()?; - let map = prop_names.into_iter().zip(values).collect(); + DynSolValue::Array(e) | DynSolValue::FixedArray(e) => Ok(Value::Array( + e.into_iter().map(|v| serialize_value_as_json(v, defs)).collect::>()?, + )), + DynSolValue::CustomStruct { name, prop_names, tuple } => { + let values = tuple + .into_iter() + .map(|v| serialize_value_as_json(v, defs)) + .collect::>>()?; + let mut map: HashMap = prop_names.into_iter().zip(values).collect(); + + // If the struct def is known, manually build a `Map` to preserve the order. + if let Some(fields) = defs.get(&name) { + let mut ordered_map = Map::with_capacity(fields.len()); + for (field_name, _) in fields { + if let Some(serialized_value) = map.remove(field_name) { + ordered_map.insert(field_name.clone(), serialized_value); + } + } + // Explicitly return a `Value::Object` to avoid ambiguity. + return Ok(Value::Object(ordered_map)); + } - Ok(Value::Object(map)) + // Otherwise, fall back to alphabetical sorting for deterministic output. + Ok(Value::Object(map.into_iter().collect::>())) } DynSolValue::Tuple(values) => Ok(Value::Array( - values.into_iter().map(serialize_value_as_json).collect::>()?, + values.into_iter().map(|v| serialize_value_as_json(v, defs)).collect::>()?, )), DynSolValue::Function(_) => bail!("cannot serialize function pointer"), } @@ -667,9 +682,9 @@ fn serialize_json( value_key: &str, value: DynSolValue, ) -> Result { - let value = serialize_value_as_json(value)?; + let value = serialize_value_as_json(value, &state.struct_defs)?; let map = state.serialized_jsons.entry(object_key.into()).or_default(); - map.push((value_key.into(), value)); + map.insert(value_key.into(), value); let stringified = serde_json::to_string(map).unwrap(); Ok(stringified.abi_encode()) } @@ -679,12 +694,12 @@ pub(super) fn resolve_type( type_description: &str, struct_defs: Option<&StructDefinitions>, ) -> Result { - let ordered_ty = |ty| { - if let Some(defs) = struct_defs { reorder_type(ty, defs) } else { ty } + let ordered_ty = |ty| -> Result { + if let Some(defs) = struct_defs { reorder_type(ty, defs) } else { Ok(ty) } }; if let Ok(ty) = DynSolType::parse(type_description) { - return Ok(ordered_ty(ty)); + return ordered_ty(ty); }; if let Ok(encoded) = eip712_parser::EncodeType::parse(type_description) { @@ -695,7 +710,7 @@ pub(super) fn resolve_type( } // Get the alphabetically-sorted type from the resolver, and reorder if necessary. - return Ok(ordered_ty(resolver.resolve(main_type)?)); + return ordered_ty(resolver.resolve(main_type)?); } bail!("type description should be a valid Solidity type or a EIP712 `encodeType` string") @@ -706,7 +721,7 @@ pub(super) fn resolve_type( /// /// This is necessary because the EIP-712 resolver sorts struct fields alphabetically, /// but we want to respect the order defined in the Solidity source code. -fn reorder_type(ty: DynSolType, struct_defs: &StructDefinitions) -> DynSolType { +fn reorder_type(ty: DynSolType, struct_defs: &StructDefinitions) -> Result { match ty { DynSolType::CustomStruct { name, prop_names, tuple } => { if let Some(def) = struct_defs.get(&name) { @@ -719,35 +734,43 @@ fn reorder_type(ty: DynSolType, struct_defs: &StructDefinitions) -> DynSolType { for (field_name, _) in def { sorted_props.push(field_name.clone()); if let Some(field_ty) = type_map.get(field_name) { - sorted_tuple.push(reorder_type(field_ty.clone(), struct_defs)); + sorted_tuple.push(reorder_type(field_ty.clone(), struct_defs)?); + } else { + bail!( + "mismatch between struct definition and type description: field '{field_name}' not found in provided type for struct '{name}'" + ); } - // NOTE(rusowsky): Should we bail if there is a missing field? } - DynSolType::CustomStruct { name, prop_names: sorted_props, tuple: sorted_tuple } + Ok(DynSolType::CustomStruct { name, prop_names: sorted_props, tuple: sorted_tuple }) } else { // No definition found, so we can't reorder. However, we still reorder its children // in case they have known structs. - let new_tuple = tuple.into_iter().map(|t| reorder_type(t, struct_defs)).collect(); - DynSolType::CustomStruct { name, prop_names, tuple: new_tuple } + let new_tuple = tuple + .into_iter() + .map(|t| reorder_type(t, struct_defs)) + .collect::>>()?; + Ok(DynSolType::CustomStruct { name, prop_names, tuple: new_tuple }) } } - DynSolType::Array(inner) => DynSolType::Array(Box::new(reorder_type(*inner, struct_defs))), - DynSolType::FixedArray(inner, len) => { - DynSolType::FixedArray(Box::new(reorder_type(*inner, struct_defs)), len) + DynSolType::Array(inner) => { + Ok(DynSolType::Array(Box::new(reorder_type(*inner, struct_defs)?))) } - DynSolType::Tuple(inner) => { - DynSolType::Tuple(inner.into_iter().map(|t| reorder_type(t, struct_defs)).collect()) + DynSolType::FixedArray(inner, len) => { + Ok(DynSolType::FixedArray(Box::new(reorder_type(*inner, struct_defs)?), len)) } - _ => ty, + DynSolType::Tuple(inner) => Ok(DynSolType::Tuple( + inner.into_iter().map(|t| reorder_type(t, struct_defs)).collect::>>()?, + )), + _ => Ok(ty), } } #[cfg(test)] mod tests { use super::*; - use alloy_primitives::FixedBytes; - use proptest::strategy::Strategy; - use std::collections::HashMap; + use alloy_primitives::{FixedBytes, U256}; + use proptest::{arbitrary::any, prop_oneof, strategy::Strategy}; + use std::collections::HashSet; fn contains_tuple(value: &DynSolValue) -> bool { match value { @@ -784,17 +807,60 @@ mod tests { } fn guessable_types() -> impl proptest::strategy::Strategy { - proptest::arbitrary::any::() + any::() .prop_map(fixup_guessable) .prop_filter("tuples are not supported", |v| !contains_tuple(v)) .prop_filter("filter out values without type", |v| v.as_type().is_some()) } + /// A proptest strategy for generating a (simple) `DynSolValue::CustomStruct` + /// and its corresponding `StructDefinitions` object. + fn custom_struct_strategy() -> impl Strategy { + // Define a strategy for basic field names and values. + let field_name_strat = "[a-z]{4,12}"; + let field_value_strat = prop_oneof![ + any::().prop_map(DynSolValue::Bool), + any::().prop_map(|v| DynSolValue::Uint(U256::from(v), 256)), + any::<[u8; 20]>().prop_map(Address::from).prop_map(DynSolValue::Address), + any::<[u8; 32]>().prop_map(B256::from).prop_map(|b| DynSolValue::FixedBytes(b, 32)), + ".*".prop_map(DynSolValue::String), + ]; + + // Combine them to create a list of unique fields that preserve the random order. + let fields_strat = proptest::collection::vec((field_name_strat, field_value_strat), 1..8) + .prop_map(|fields| { + let mut unique_fields = Vec::with_capacity(fields.len()); + let mut seen_names = HashSet::new(); + for (name, value) in fields { + if seen_names.insert(name.clone()) { + unique_fields.push((name, value)); + } + } + unique_fields + }); + + // Generate the `CustomStruct` and its definition. + ("[A-Z][a-z]{4,8}", fields_strat).prop_map(|(struct_name, fields)| { + let (prop_names, tuple): (Vec, Vec) = + fields.clone().into_iter().unzip(); + let def_fields: Vec<(String, String)> = fields + .iter() + .map(|(name, value)| (name.clone(), value.as_type().unwrap().to_string())) + .collect(); + let mut defs_map = BTreeMap::new(); + defs_map.insert(struct_name.clone(), def_fields); + ( + StructDefinitions::new(defs_map), + DynSolValue::CustomStruct { name: struct_name, prop_names, tuple }, + ) + }) + } + // Tests to ensure that conversion [DynSolValue] -> [serde_json::Value] -> [DynSolValue] proptest::proptest! { #[test] fn test_json_roundtrip_guessed(v in guessable_types()) { - let json = serialize_value_as_json(v.clone()).unwrap(); + let json = serialize_value_as_json(v.clone(), &StructDefinitions::default()).unwrap(); let value = json_value_to_token(&StructDefinitions::default(), &json).unwrap(); // do additional abi_encode -> abi_decode to avoid zero signed integers getting decoded as unsigned and causing assert_eq to fail. @@ -803,17 +869,25 @@ mod tests { } #[test] - fn test_json_roundtrip(v in proptest::arbitrary::any::().prop_filter("filter out values without type", |v| v.as_type().is_some())) { - let json = serialize_value_as_json(v.clone()).unwrap(); + fn test_json_roundtrip(v in any::().prop_filter("filter out values without type", |v| v.as_type().is_some())) { + let json = serialize_value_as_json(v.clone(), &StructDefinitions::default()).unwrap(); let value = parse_json_as(&json, &v.as_type().unwrap()).unwrap(); - assert_eq!(value, v); + assert_eq!(value, v); + } + + #[test] + fn test_json_roundtrip_with_struct_defs((struct_defs, v) in custom_struct_strategy()) { + let json = serialize_value_as_json(v.clone(), &struct_defs).unwrap(); + let sol_type = v.as_type().unwrap(); + let parsed_value = parse_json_as(&json, &sol_type).unwrap(); + assert_eq!(parsed_value, v); } } #[test] fn test_resolve_type_with_definitions() -> Result<()> { // Define a struct with fields in a specific order (not alphabetical) - let mut defs_map = HashMap::new(); + let mut defs_map = BTreeMap::new(); defs_map.insert( "Apple".to_string(), vec![ @@ -880,7 +954,7 @@ mod tests { #[test] fn test_resolve_type_for_array_of_structs() -> Result<()> { // Define a struct with fields in a specific, non-alphabetical order. - let mut defs_map = HashMap::new(); + let mut defs_map = BTreeMap::new(); defs_map.insert( "Item".to_string(), vec![ @@ -914,7 +988,7 @@ mod tests { #[test] fn test_parse_json_missing_field() { // Define a struct with a specific field order. - let mut defs_map = HashMap::new(); + let mut defs_map = BTreeMap::new(); defs_map.insert( "Person".to_string(), vec![ @@ -939,4 +1013,80 @@ mod tests { assert!(result.is_err()); assert!(result.unwrap_err().to_string().contains("field \"age\" not found in JSON object")); } + + #[test] + fn test_serialize_json_with_struct_def_order() { + // Define a struct with a specific, non-alphabetical field order. + let mut defs_map = BTreeMap::new(); + defs_map.insert( + "Item".to_string(), + vec![ + ("name".to_string(), "string".to_string()), + ("id".to_string(), "uint256".to_string()), + ("active".to_string(), "bool".to_string()), + ], + ); + let struct_defs = StructDefinitions::new(defs_map); + + // Create a DynSolValue instance for the struct. + let item_struct = DynSolValue::CustomStruct { + name: "Item".to_string(), + prop_names: vec!["name".to_string(), "id".to_string(), "active".to_string()], + tuple: vec![ + DynSolValue::String("Test Item".to_string()), + DynSolValue::Uint(U256::from(123), 256), + DynSolValue::Bool(true), + ], + }; + + // Serialize the value to JSON and verify that the order is preserved. + let json_value = serialize_value_as_json(item_struct, &struct_defs).unwrap(); + let json_string = serde_json::to_string(&json_value).unwrap(); + assert_eq!(json_string, r#"{"name":"Test Item","id":123,"active":true}"#); + } + + #[test] + fn test_json_full_cycle_typed_with_struct_defs() { + // Define a struct with a specific, non-alphabetical field order. + let mut defs_map = BTreeMap::new(); + defs_map.insert( + "Wallet".to_string(), + vec![ + ("owner".to_string(), "address".to_string()), // Order: owner, balance, id + ("balance".to_string(), "uint256".to_string()), // Alphabetical: balance, id, owner + ("id".to_string(), "bytes32".to_string()), + ], + ); + let struct_defs = StructDefinitions::new(defs_map); + + // Create the "original" DynSolValue instance. + let owner_address = Address::from([1; 20]); + let wallet_id = B256::from([2; 32]); + let original_wallet = DynSolValue::CustomStruct { + name: "Wallet".to_string(), + prop_names: vec!["owner".to_string(), "balance".to_string(), "id".to_string()], + tuple: vec![ + DynSolValue::Address(owner_address), + DynSolValue::Uint(U256::from(5000), 256), + DynSolValue::FixedBytes(wallet_id, 32), + ], + }; + + // Serialize it. The resulting JSON should respect the struct definition order. + let json_value = serialize_value_as_json(original_wallet.clone(), &struct_defs).unwrap(); + let json_string = serde_json::to_string(&json_value).unwrap(); + assert_eq!( + json_string, + format!(r#"{{"owner":"{}","balance":5000,"id":"{}"}}"#, owner_address, wallet_id) + ); + + // Resolve the type, which should also respect the struct definition order. + let type_description = "Wallet(uint256 balance,bytes32 id,address owner)"; + let resolved_type = resolve_type(type_description, Some(&struct_defs)).unwrap(); + + // Parse the JSON using the correctly ordered resolved type. Ensure that it is identical to + // the original one. + let parsed_value = parse_json_as(&json_value, &resolved_type).unwrap(); + assert_eq!(parsed_value, original_wallet); + } } diff --git a/crates/cheatcodes/src/toml.rs b/crates/cheatcodes/src/toml.rs index 0d7864796affc..e36b26f244966 100644 --- a/crates/cheatcodes/src/toml.rs +++ b/crates/cheatcodes/src/toml.rs @@ -201,7 +201,7 @@ fn parse_toml_str(toml: &str) -> Result { /// Parse a TOML string and return the value at the given path. fn parse_toml(state: &Cheatcodes, toml: &str, key: &str) -> Result { - parse_json(&state.struct_defs, &toml_to_json_string(toml)?, key) + parse_json(&toml_to_json_string(toml)?, key, &state.struct_defs) } /// Parse a TOML string and return the value at the given path, coercing it to the given type. diff --git a/crates/common/src/sema.rs b/crates/common/src/sema.rs index 866e3fccbdd65..9347d591c1320 100644 --- a/crates/common/src/sema.rs +++ b/crates/common/src/sema.rs @@ -5,33 +5,33 @@ use solar_sema::{ GcxWrapper, Hir, hir, ty::{Ty, TyKind}, }; -use std::{collections::HashMap, ops::Deref, sync::Arc}; +use std::{collections::BTreeMap, ops::Deref, sync::Arc}; #[derive(Debug, Clone)] -pub struct StructDefinitions(Arc>>); +pub struct StructDefinitions(Arc>>); impl StructDefinitions { - pub fn new(map: HashMap>) -> Self { + pub fn new(map: BTreeMap>) -> Self { Self(Arc::new(map)) } } impl Default for StructDefinitions { fn default() -> Self { - Self(Arc::new(HashMap::new())) + Self(Arc::new(BTreeMap::new())) } } impl Deref for StructDefinitions { - type Target = HashMap>; + type Target = BTreeMap>; fn deref(&self) -> &Self::Target { &self.0 } } -impl AsRef>>> for StructDefinitions { - fn as_ref(&self) -> &Arc>> { +impl AsRef>>> for StructDefinitions { + fn as_ref(&self) -> &Arc>> { &self.0 } } @@ -39,13 +39,13 @@ impl AsRef>>> for StructDefinitions { /// Generates a map of all struct definitions from the HIR using the resolved `Ty` system. pub struct SemanticAnalysisProcessor<'hir> { gcx: GcxWrapper<'hir>, - struct_defs: HashMap>, + struct_defs: BTreeMap>, } impl<'hir> SemanticAnalysisProcessor<'hir> { /// Constructs a new generator. pub fn new(gcx: GcxWrapper<'hir>) -> Self { - Self { gcx, struct_defs: HashMap::new() } + Self { gcx, struct_defs: BTreeMap::new() } } /// Processes the HIR to generate all the struct definitions. diff --git a/crates/forge/tests/cli/bind_json.rs b/crates/forge/tests/cli/bind_json.rs index bb4d644fc8485..a9b052d662ef8 100644 --- a/crates/forge/tests/cli/bind_json.rs +++ b/crates/forge/tests/cli/bind_json.rs @@ -124,45 +124,62 @@ library JsonBindings { cmd.forge_fuse().args(["test"]).assert_success(); }); -// tests enhanced `vm.parseJson` cheatcode, which isn't constraint to alphabetical ordering of the -// struct types, partially closing the gap with `forge bind-json`. +// tests enhanced `vm.parseJson` and `vm.serializeJson` cheatcodes, which are not constrained to +// alphabetical ordering of struct keys, but rather respect the Solidity struct definition. forgetest_init!(test_parse_json, |prj, cmd| { prj.add_test( "JsonCheats", r#" import {Test} from "forge-std/Test.sol"; +// Definition order: color, sweetness, sourness +// Alphabetical order: color, sourness, sweetness struct Apple { string color; uint8 sweetness; uint8 sourness; } +// Definition order: name, apples +// Alphabetical order: apples, name struct FruitStall { string name; Apple[] apples; } -contract JsonParseCheatsTest is Test { - function testJsonParseOrder() public { - string memory json = +contract SimpleJsonCheatsTest is Test { + function testJsonParseAndSerialize() public { + // Initial JSON has keys in a custom order, different from definition and alphabetical. + string memory originalJson = '{"name":"Fresh Fruit","apples":[{"sweetness":7,"sourness":3,"color":"Red"},{"sweetness":5,"sourness":5,"color":"Green"}]}'; - bytes memory decoded = vm.parseJson(json); - FruitStall memory stall = abi.decode(decoded, (FruitStall)); - - assertEq(stall.apples.length, 2); - assertEq(stall.name, "Fresh Fruit"); - - Apple memory appple = stall.apples[0]; - assertEq(appple.color, "Red"); - assertEq(appple.sweetness, 7); - assertEq(appple.sourness, 3); - - appple = stall.apples[1]; - assertEq(appple.color, "Green"); - assertEq(appple.sweetness, 5); - assertEq(appple.sourness, 5); + // Parse the original JSON. The parser should correctly handle the unordered keys. + bytes memory decoded = vm.parseJson(originalJson); + FruitStall memory originalType = abi.decode(decoded, (FruitStall)); + + // Assert initial parsing is correct + assertEq(originalType.name, "Fresh Fruit"); + assertEq(originalType.apples[0].color, "Red"); + assertEq(originalType.apples[0].sweetness, 7); + assertEq(originalType.apples[1].sourness, 5); + + // Serialize the struct back to JSON. `vm.serializeJson` should respect the order for all keys. + string memory serializedJson = vm.serializeJsonType( + "FruitStall(Apple[] apples,string name)Apple(string color,uint8 sourness,uint8 sweetness)", + abi.encode(originalType) + ); + + // The expected JSON should have keys ordered according to the struct definitions. + string memory expectedJson = + '{"name":"Fresh Fruit","apples":[{"color":"Red","sweetness":7,"sourness":3},{"color":"Green","sweetness":5,"sourness":5}]}'; + assertEq(serializedJson, expectedJson); + + // Parse the newly serialized JSON to complete the cycle. + bytes memory redecoded = vm.parseJson(serializedJson); + FruitStall memory finalType = abi.decode(redecoded, (FruitStall)); + + // Assert that the struct from the full cycle is identical to the original parsed struct. + assertEq(keccak256(abi.encode(finalType)), keccak256(abi.encode(originalType))); } } "#, From 0b204b8e43ed57d51d63b84d201634b8a7f5f4d3 Mon Sep 17 00:00:00 2001 From: 0xrusowsky <0xrusowsky@proton.me> Date: Wed, 23 Jul 2025 08:25:44 +0200 Subject: [PATCH 7/8] style: clippy --- crates/cheatcodes/src/json.rs | 6 +++--- crates/forge/tests/cli/bind_json.rs | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/cheatcodes/src/json.rs b/crates/cheatcodes/src/json.rs index dc170511754a7..9e49c61b1db17 100644 --- a/crates/cheatcodes/src/json.rs +++ b/crates/cheatcodes/src/json.rs @@ -1052,8 +1052,8 @@ mod tests { defs_map.insert( "Wallet".to_string(), vec![ - ("owner".to_string(), "address".to_string()), // Order: owner, balance, id - ("balance".to_string(), "uint256".to_string()), // Alphabetical: balance, id, owner + ("owner".to_string(), "address".to_string()), + ("balance".to_string(), "uint256".to_string()), ("id".to_string(), "bytes32".to_string()), ], ); @@ -1077,7 +1077,7 @@ mod tests { let json_string = serde_json::to_string(&json_value).unwrap(); assert_eq!( json_string, - format!(r#"{{"owner":"{}","balance":5000,"id":"{}"}}"#, owner_address, wallet_id) + format!(r#"{{"owner":"{owner_address}","balance":5000,"id":"{wallet_id}"}}"#) ); // Resolve the type, which should also respect the struct definition order. diff --git a/crates/forge/tests/cli/bind_json.rs b/crates/forge/tests/cli/bind_json.rs index a9b052d662ef8..fc6aa1c2b2db4 100644 --- a/crates/forge/tests/cli/bind_json.rs +++ b/crates/forge/tests/cli/bind_json.rs @@ -70,7 +70,7 @@ interface Vm { function serializeJsonType(string calldata typeDescription, bytes memory value) external pure returns (string memory json); function serializeJsonType(string calldata objectKey, string calldata valueKey, string calldata typeDescription, bytes memory value) external returns (string memory json); } - + library JsonBindings { Vm constant vm = Vm(address(uint160(uint256(keccak256("hevm cheat code"))))); From 06d3a7a5930390be461a01624135c4ab68e8c5fb Mon Sep 17 00:00:00 2001 From: 0xrusowsky <0xrusowsky@proton.me> Date: Wed, 23 Jul 2025 16:44:34 +0200 Subject: [PATCH 8/8] fix: do not reload file into pcx --- crates/forge/src/cmd/test/mod.rs | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/crates/forge/src/cmd/test/mod.rs b/crates/forge/src/cmd/test/mod.rs index 35b7d6f24e0cc..8d7c8a4972a77 100644 --- a/crates/forge/src/cmd/test/mod.rs +++ b/crates/forge/src/cmd/test/mod.rs @@ -323,13 +323,8 @@ impl TestArgs { let mut sess = Session::builder().with_stderr_emitter().build(); sess.dcx = sess.dcx.set_flags(|flags| flags.track_diagnostics = false); - let mut pcx = solar_pcx_from_build_opts(&sess, &self.build, Some(&project), Some(&input))?; - - let sess = pcx.sess; + let pcx = solar_pcx_from_build_opts(&sess, &self.build, Some(&project), Some(&input))?; let struct_defs = sess.enter_parallel(|| -> Result { - // Load all files into the parsing ctx - pcx.load_files(input).map_err(|_| eyre::eyre!("Error loding files"))?; - // Parse and lower to HIR let hir_arena = solar_sema::thread_local::ThreadLocal::new(); let hir_result = pcx.parse_and_lower(&hir_arena);