diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 054cfdd429..0330cebb3b 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -181,41 +181,29 @@ jobs: - name: Cargo check all targets. run: cargo check --all-targets - # Next, check subxt features. + # Next, check each subxt feature in isolation. # - `native` feature must always be enabled # - `web` feature is always ignored. - # - This means, don't check --no-default-features and don't try enabling --all-features; both will fail - name: Cargo hack; check each subxt feature - run: cargo hack -p subxt --each-feature check --exclude-no-default-features --exclude-all-features --exclude-features web --features native + run: cargo hack -p subxt --each-feature check --exclude-features web --features native + # Same with subxt-historic - name: Cargo hack; check each subxt feature - run: cargo hack -p subxt-historic --each-feature check --exclude-no-default-features --exclude-all-features --exclude-features web --features native + run: cargo hack -p subxt-historic --each-feature check --exclude-features web --features native - # Subxt-signer has the "subxt" features enabled in the "check all targets" test. Run it on its own to - # check it without. We can't enable subxt or web features here, so no cargo hack. - - name: Cargo check subxt-signer - run: | - cargo check -p subxt-signer - cargo check -p subxt-signer --no-default-features --features sr25519 - cargo check -p subxt-signer --no-default-features --features ecdsa - cargo check -p subxt-signer --no-default-features --features unstable-eth + # And with subxt-rpcs + - name: Cargo hack; check each subxt-rpcs feature + run: cargo hack -p subxt-rpcs --each-feature check --exclude-features web --features native - # Subxt-rpcs has a bunch of clients that can be exposed. Check that they all stand on their own. - - name: Cargo check subxt-rpcs - run: | - cargo check -p subxt-rpcs - cargo check -p subxt-rpcs --no-default-features --features native - cargo check -p subxt-rpcs --no-default-features --features native,subxt - cargo check -p subxt-rpcs --no-default-features --features native,jsonrpsee - cargo check -p subxt-rpcs --no-default-features --features native,reconnecting-rpc-client - cargo check -p subxt-rpcs --no-default-features --features native,mock-rpc-client - cargo check -p subxt-rpcs --no-default-features --features native,unstable-light-client - - # We can't enable web features here, so no cargo hack. + # And with subxt-signer (seems to work with a more basic check here; disable web if it becomes an issue). + - name: Cargo hack; check each subxt-signer feature + run: cargo hack -p subxt-signer --each-feature check + + # And for subxt-lightclient. - name: Cargo check subxt-lightclient - run: cargo check -p subxt-lightclient + run: cargo hack -p subxt-lightclient --each-feature check --exclude-features web --features native - # Next, check each other package in isolation. + # Next, check all other crates. - name: Cargo hack; check each feature/crate on its own run: cargo hack --exclude subxt --exclude subxt-historic --exclude subxt-signer --exclude subxt-lightclient --exclude subxt-rpcs --exclude-all-features --each-feature check --workspace diff --git a/Cargo.lock b/Cargo.lock index 4aba7f6e1e..f3d191e3b6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1953,9 +1953,9 @@ dependencies = [ [[package]] name = "frame-decode" -version = "0.12.0" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e5c3badfabd704dda4ddc7fafcd09127e8661d1cca2f16556c6826166932c87" +checksum = "f4d325134db7604b9649bc5428cbbc35e01f667ef684ccd3e5f4fc5d59938bdf" dependencies = [ "frame-metadata 23.0.0", "parity-scale-codec", @@ -5800,6 +5800,8 @@ dependencies = [ "hashbrown 0.14.5", "parity-scale-codec", "scale-info", + "scale-info-legacy", + "scale-type-resolver", "sp-crypto-hashing", "subxt-utils-stripmetadata", "thiserror 2.0.12", diff --git a/Cargo.toml b/Cargo.toml index d31a168b40..fd836cd1af 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -81,7 +81,7 @@ darling = "0.20.10" derive-where = "1.2.7" either = { version = "1.13.0", default-features = false } finito = { version = "0.1.0", default-features = false } -frame-decode = { version = "0.12.0", default-features = false } +frame-decode = { version = "0.12.1", default-features = false } frame-metadata = { version = "23.0.0", default-features = false } futures = { version = "0.3.31", default-features = false, features = ["std"] } getrandom = { version = "0.2", default-features = false } diff --git a/core/src/blocks/extrinsics.rs b/core/src/blocks/extrinsics.rs index 77f6f7c26d..94de10ade3 100644 --- a/core/src/blocks/extrinsics.rs +++ b/core/src/blocks/extrinsics.rs @@ -494,7 +494,7 @@ mod tests { let metadata = metadata(); // Except our metadata to contain the registered types. - let pallet = metadata.pallet_by_index(0).expect("pallet exists"); + let pallet = metadata.pallet_by_call_index(0).expect("pallet exists"); let extrinsic = pallet .call_variant_by_index(2) .expect("metadata contains the RuntimeCall enum with this pallet"); diff --git a/core/src/events.rs b/core/src/events.rs index e6246cbce3..9d38517910 100644 --- a/core/src/events.rs +++ b/core/src/events.rs @@ -262,7 +262,7 @@ impl EventDetails { // Get metadata for the event: let event_pallet = metadata - .pallet_by_index(pallet_index) + .pallet_by_event_index(pallet_index) .ok_or_else(|| EventsError::CannotFindPalletWithIndex(pallet_index))?; let event_variant = event_pallet .event_variant_by_index(variant_index) @@ -359,7 +359,7 @@ impl EventDetails { pub fn event_metadata(&self) -> EventMetadataDetails<'_> { let pallet = self .metadata - .pallet_by_index(self.pallet_index()) + .pallet_by_event_index(self.pallet_index()) .expect("event pallet to be found; we did this already during decoding"); let variant = pallet .event_variant_by_index(self.variant_index()) diff --git a/core/src/tx/payload.rs b/core/src/tx/payload.rs index 765ef64967..e87c38a98f 100644 --- a/core/src/tx/payload.rs +++ b/core/src/tx/payload.rs @@ -182,7 +182,7 @@ impl Payload for DefaultPayload { call_name: self.call_name.to_string(), })?; - let pallet_index = pallet.index(); + let pallet_index = pallet.call_index(); let call_index = call.index; pallet_index.encode_to(out); diff --git a/metadata/Cargo.toml b/metadata/Cargo.toml index 425dc6698d..31a213bfdd 100644 --- a/metadata/Cargo.toml +++ b/metadata/Cargo.toml @@ -14,11 +14,23 @@ homepage.workspace = true description = "Command line utilities for checking metadata compatibility between nodes." [features] -default = ["std"] +default = ["std", "legacy"] std = ["scale-info/std", "frame-metadata/std"] +# Enable decoding of legacy metadata, too. +# std required by frame-metadata to decode { + $vis fn $fn_name(metadata: &$metadata, mut types: TypeRegistrySet<'_>) -> Result { + // Extend the types with important information from the metadata: + { + let builtin_types = frame_decode::helpers::type_registry_from_metadata(metadata) + .map_err(Error::CannotEnhanceTypesFromMetadata)?; + types.prepend(builtin_types); + } + + // This will be used to construct our `PortableRegistry` from old-style types. + let mut portable_registry_builder = PortableRegistryBuilder::new(&types); + + // We use this type in a few places to denote that we don't know how to decode it. + let unknown_type_id = portable_registry_builder.add_type_str("special::Unknown", None) + .map_err(|e| Error::add_type("constructing 'Unknown' type", e))?; + + // Pallet metadata + let mut call_index = 0u8; + let mut error_index = 0u8; + let mut event_index = 0u8; + + let new_pallets = as_decoded(&metadata.modules).iter().map(|pallet| { + // In older metadatas, calls and event enums can have different indexes + // in a given pallet. Pallets without calls or events don't increment + // the respective index for them. + // + // We assume since errors are non optional, that the pallet index _always_ + // increments for errors (no `None`s to skip). + let (call_index, event_index, error_index) = { + let out = (call_index, event_index, error_index); + if pallet.calls.is_some() { + call_index += 1; + } + if pallet.event.is_some() { + event_index += 1; + } + error_index += 1; + + out + }; + + // For v12 and v13 metadata, there is a builtin index for everything in a pallet. + // We enable this logic for those metadatas to get the correct index. + $( + let $builtin_index = true; + let (call_index, event_index, error_index) = if $builtin_index { + (pallet.index, pallet.index, pallet.index) + } else { + (call_index, event_index, error_index) + }; + )? + + let pallet_name = as_decoded(&pallet.name).to_string(); + + // Storage entries: + let storage = pallet.storage.as_ref().map(|s| { + let storage = as_decoded(s); + let prefix = as_decoded(&storage.prefix); + let entries = metadata.storage_entries_in_pallet(&pallet_name).map(|entry_name| { + let info = metadata + .storage_info(&pallet_name, &entry_name) + .map_err(|e| Error::StorageInfoError(e.into_owned()))?; + let entry_name = entry_name.into_owned(); + + let info = info.map_ids(|old_id| { + portable_registry_builder.add_type(old_id) + }).map_err(|e| { + let ctx = format!("adding type used in storage entry {pallet_name}.{entry_name}"); + Error::add_type(ctx, e) + })?; + + let entry = crate::StorageEntryMetadata { + name: entry_name.clone(), + info: info.into_owned(), + // We don't expose docs via our storage info yet. + docs: Vec::new(), + }; + + Ok((entry_name, entry)) + }).collect::, _>>()?; + + Ok(crate::StorageMetadata { + prefix: prefix.clone(), + entries, + }) + }).transpose()?; + + // Pallet error type is just a builtin type: + let error_ty = portable_registry_builder.add_type_str(&format!("builtin::module::error::{pallet_name}"), None) + .map_err(|e| { + let ctx = format!("converting the error enum for pallet {pallet_name}"); + Error::add_type(ctx, e) + })?; + + // Pallet calls also just a builtin type: + let call_ty = pallet.calls.as_ref().map(|_| { + portable_registry_builder.add_type_str(&format!("builtin::module::call::{pallet_name}"), None) + .map_err(|e| { + let ctx = format!("converting the call enum for pallet {pallet_name}"); + Error::add_type(ctx, e) + }) + }).transpose()?; + + // Pallet events also just a builtin type: + let event_ty = pallet.event.as_ref().map(|_| { + portable_registry_builder.add_type_str(&format!("builtin::module::event::{pallet_name}"), None) + .map_err(|e| { + let ctx = format!("converting the event enum for pallet {pallet_name}"); + Error::add_type(ctx, e) + }) + }).transpose()?; + + let call_variant_index = + VariantIndex::build(call_ty, portable_registry_builder.types()); + let error_variant_index = + VariantIndex::build(Some(error_ty), portable_registry_builder.types()); + let event_variant_index = + VariantIndex::build(event_ty, portable_registry_builder.types()); + + let constants = metadata.constants_in_pallet(&pallet_name).map(|name| { + let name = name.into_owned(); + let info = metadata.constant_info(&pallet_name, &name) + .map_err(|e| Error::ConstantInfoError(e.into_owned()))?; + let new_type_id = portable_registry_builder.add_type(info.type_id) + .map_err(|e| { + let ctx = format!("converting the constant {name} for pallet {pallet_name}"); + Error::add_type(ctx, e) + })?; + + let constant = crate::ConstantMetadata { + name: name.clone(), + ty: new_type_id, + value: info.bytes.to_vec(), + // We don't expose docs via our constant info yet. + docs: Vec::new(), + }; + + Ok((name, constant)) + }).collect::>()?; + + let pallet_metadata = crate::PalletMetadataInner { + name: pallet_name.clone(), + call_index, + event_index, + error_index, + storage, + error_ty: Some(error_ty), + call_ty, + event_ty, + call_variant_index, + error_variant_index, + event_variant_index, + constants, + view_functions: Default::default(), + associated_types: Default::default(), + // Pallets did not have docs prior to V15. + docs: Default::default(), + }; + + Ok((pallet_name, pallet_metadata)) + }).collect::,Error>>()?; + + // Extrinsic metadata + let new_extrinsic = { + let signature_info = metadata + .extrinsic_signature_info() + .map_err(|e| Error::ExtrinsicInfoError(e.into_owned()))?; + + let address_ty_id = portable_registry_builder.add_type(signature_info.address_id) + .map_err(|_| Error::CannotFindAddressType)?; + + let signature_ty_id = portable_registry_builder.add_type(signature_info.signature_id) + .map_err(|_| Error::CannotFindCallType)?; + + let transaction_extensions = metadata + .extrinsic_extension_info(None) + .map_err(|e| Error::ExtrinsicInfoError(e.into_owned()))? + .extension_ids + .into_iter() + .map(|ext| { + let ext_name = ext.name.into_owned(); + let ext_type = portable_registry_builder.add_type(ext.id) + .map_err(|e| { + let ctx = format!("converting the signed extension {ext_name}"); + Error::add_type(ctx, e) + })?; + + Ok(crate::TransactionExtensionMetadataInner { + identifier: ext_name, + extra_ty: ext_type, + // This only started existing in V14+ metadata, but in any case, + // we don't need to know how to decode the signed payload for + // historic blocks (hopefully), so set to unknown. + additional_ty: unknown_type_id.into() + }) + }) + .collect::,Error>>()?; + + let transaction_extensions_by_version = BTreeMap::from_iter([( + 0, + (0..transaction_extensions.len() as u32).collect() + )]); + + crate::ExtrinsicMetadata { + address_ty: address_ty_id.into(), + signature_ty: signature_ty_id.into(), + supported_versions: Vec::from_iter([4]), + transaction_extensions, + transaction_extensions_by_version, + } + }; + + // Outer enum types + let outer_enums = crate::OuterEnumsMetadata { + call_enum_ty: portable_registry_builder.add_type_str("builtin::Call", None) + .map_err(|e| { + let ctx = format!("constructing the 'builtin::Call' type to put in the OuterEnums metadata"); + Error::add_type(ctx, e) + })?, + event_enum_ty: portable_registry_builder.add_type_str("builtin::Event", None) + .map_err(|e| { + let ctx = format!("constructing the 'builtin::Event' type to put in the OuterEnums metadata"); + Error::add_type(ctx, e) + })?, + error_enum_ty: portable_registry_builder.add_type_str("builtin::Error", None) + .map_err(|e| { + let ctx = format!("constructing the 'builtin::Error' type to put in the OuterEnums metadata"); + Error::add_type(ctx, e) + })?, + }; + + // These are all the same in V13, but be explicit anyway for clarity. + let pallets_by_call_index = new_pallets + .values() + .iter() + .enumerate() + .map(|(idx,p)| (p.call_index, idx)) + .collect(); + let pallets_by_error_index = new_pallets + .values() + .iter() + .enumerate() + .map(|(idx,p)| (p.error_index, idx)) + .collect(); + let pallets_by_event_index = new_pallets + .values() + .iter() + .enumerate() + .map(|(idx,p)| (p.event_index, idx)) + .collect(); + + // This is optional in the sense that Subxt will return an error if it needs to decode this type, + // and I think for historic metadata we wouldn't end up down that path anyway. Historic metadata + // tends to call it just "DispatchError" but search more specific paths first. + let dispatch_error_ty = portable_registry_builder + .try_add_type_str("hardcoded::DispatchError", None) + .or_else(|| portable_registry_builder.try_add_type_str("sp_runtime::DispatchError", None)) + .or_else(|| portable_registry_builder.try_add_type_str("DispatchError", None)) + .transpose() + .map_err(|e| Error::add_type("constructing DispatchError", e))?; + + // Runtime API definitions live with type definitions. + let apis = type_registry_to_runtime_apis(&types, &mut portable_registry_builder)?; + + Ok(crate::Metadata { + types: portable_registry_builder.finish(), + pallets: new_pallets, + pallets_by_call_index, + pallets_by_error_index, + pallets_by_event_index, + extrinsic: new_extrinsic, + outer_enums, + dispatch_error_ty, + apis, + // Nothing custom existed in V13 + custom: v15::CustomMetadata { map: Default::default() }, + }) + }} +} + +from_historic!(pub fn from_v13(frame_metadata::v13::RuntimeMetadataV13, builtin_index: yes)); +from_historic!(pub fn from_v12(frame_metadata::v12::RuntimeMetadataV12, builtin_index: yes)); +from_historic!(pub fn from_v11(frame_metadata::v11::RuntimeMetadataV11)); +from_historic!(pub fn from_v10(frame_metadata::v10::RuntimeMetadataV10)); +from_historic!(pub fn from_v9(frame_metadata::v9::RuntimeMetadataV9)); +from_historic!(pub fn from_v8(frame_metadata::v8::RuntimeMetadataV8)); + +fn as_decoded(item: &frame_metadata::decode_different::DecodeDifferent) -> &B { + match item { + frame_metadata::decode_different::DecodeDifferent::Encode(_a) => { + panic!("Expecting decoded data") + } + frame_metadata::decode_different::DecodeDifferent::Decoded(b) => b, + } +} + +// Obtain Runtime API information from some type registry. +pub fn type_registry_to_runtime_apis( + types: &TypeRegistrySet<'_>, + portable_registry_builder: &mut PortableRegistryBuilder, +) -> Result, Error> { + let mut apis = OrderedMap::new(); + let mut trait_name = ""; + let mut trait_methods = OrderedMap::new(); + + for api in types.runtime_apis() { + match api { + RuntimeApiName::Trait(name) => { + if !trait_methods.is_empty() { + apis.push_insert( + trait_name.into(), + crate::RuntimeApiMetadataInner { + name: trait_name.into(), + methods: trait_methods, + docs: Vec::new(), + }, + ); + } + trait_methods = OrderedMap::new(); + trait_name = name; + } + RuntimeApiName::Method(name) => { + let info = types + .runtime_api_info(trait_name, name) + .map_err(|e| Error::RuntimeApiInfoError(e.into_owned()))?; + + let info = info.map_ids(|id| { + portable_registry_builder.add_type(id).map_err(|e| { + let c = format!("converting type for runtime API {trait_name}.{name}"); + Error::add_type(c, e) + }) + })?; + + trait_methods.push_insert( + name.to_owned(), + crate::RuntimeApiMethodMetadataInner { + name: name.into(), + info, + docs: Vec::new(), + }, + ); + } + } + } + + Ok(apis) +} + +/// An error encountered converting some legacy metadata to our internal format. +#[allow(missing_docs)] +#[derive(Debug, thiserror::Error)] +pub enum Error { + /// Cannot add a type. + #[error("Cannot add type ({context}): {error}")] + AddTypeError { + context: String, + error: portable_registry_builder::PortableRegistryAddTypeError, + }, + #[error("Cannot enhance the types with information from metadata: {0}")] + CannotEnhanceTypesFromMetadata(scale_info_legacy::lookup_name::ParseError), + #[error("Cannot find 'hardcoded::ExtrinsicAddress' type in legacy types")] + CannotFindAddressType, + #[error("Cannot find 'hardcoded::ExtrinsicSignature' type in legacy types")] + CannotFindSignatureType, + #[error( + "Cannot find 'builtin::Call' type in legacy types (this should have been automatically added)" + )] + CannotFindCallType, + #[error("Cannot obtain the storage information we need to convert storage entries")] + StorageInfoError(frame_decode::storage::StorageInfoError<'static>), + #[error("Cannot obtain the extrinsic information we need to convert transaction extensions")] + ExtrinsicInfoError(frame_decode::extrinsics::ExtrinsicInfoError<'static>), + #[error("Cannot obtain the Runtime API information we need")] + RuntimeApiInfoError(frame_decode::runtime_apis::RuntimeApiInfoError<'static>), + #[error("Cannot obtain the Constant information we need")] + ConstantInfoError(frame_decode::constants::ConstantInfoError<'static>), +} + +impl Error { + /// A shorthand for the [`Error::AddTypeError`] variant. + fn add_type( + context: impl Into, + error: impl Into, + ) -> Self { + Error::AddTypeError { + context: context.into(), + error: error.into(), + } + } +} diff --git a/metadata/src/from/legacy/portable_registry_builder.rs b/metadata/src/from/legacy/portable_registry_builder.rs new file mode 100644 index 0000000000..a44217413c --- /dev/null +++ b/metadata/src/from/legacy/portable_registry_builder.rs @@ -0,0 +1,401 @@ +use alloc::borrow::ToOwned; +use alloc::collections::BTreeMap; +use alloc::string::ToString; +use alloc::vec::Vec; +use scale_info::PortableRegistry; +use scale_info::{PortableType, form::PortableForm}; +use scale_info_legacy::type_registry::TypeRegistryResolveError; +use scale_info_legacy::{LookupName, TypeRegistrySet}; +use scale_type_resolver::{ + BitsOrderFormat, BitsStoreFormat, FieldIter, PathIter, Primitive, ResolvedTypeVisitor, + UnhandledKind, VariantIter, +}; + +#[derive(thiserror::Error, Debug)] +pub enum PortableRegistryAddTypeError { + #[error("Error resolving type: {0}")] + ResolveError(#[from] TypeRegistryResolveError), + #[error("Cannot find type '{0}'")] + TypeNotFound(LookupName), +} + +/// the purpose of this is to convert a (subset of) [`scale_info_legacy::TypeRegistrySet`] +/// into a [`scale_info::PortableRegistry`]. Type IDs from the former are passed in, and +/// type IDs from the latter are handed back. Calling [`PortableRegistryBuilder::finish()`] +/// then hands back a [`scale_info::PortableRegistry`] which these Ids can be used with. +pub struct PortableRegistryBuilder<'info> { + legacy_types: &'info TypeRegistrySet<'info>, + scale_info_types: PortableRegistry, + old_to_new: BTreeMap, +} + +impl<'info> PortableRegistryBuilder<'info> { + /// Instantiate a new [`PortableRegistryBuilder`], providing the set of + /// legacy types you wish to use to construct modern types from. + pub fn new(legacy_types: &'info TypeRegistrySet<'info>) -> Self { + PortableRegistryBuilder { + legacy_types, + scale_info_types: PortableRegistry { + types: Default::default(), + }, + old_to_new: Default::default(), + } + } + + /// Try adding a type, given its string name and optionally the pallet it's scoped to. + pub fn try_add_type_str( + &mut self, + id: &str, + pallet: Option<&str>, + ) -> Option> { + let mut id = match LookupName::parse(id) { + Ok(id) => id, + Err(e) => { + return Some(Err(TypeRegistryResolveError::LookupNameInvalid( + id.to_owned(), + e, + ))); + } + }; + + if let Some(pallet) = pallet { + id = id.in_pallet(pallet); + } + + self.try_add_type(id) + } + + /// Try adding a type, returning `None` if the type doesn't exist. + pub fn try_add_type( + &mut self, + id: LookupName, + ) -> Option> { + match self.add_type(id) { + Ok(id) => Some(Ok(id)), + Err(PortableRegistryAddTypeError::TypeNotFound(_)) => None, + Err(PortableRegistryAddTypeError::ResolveError(e)) => Some(Err(e)), + } + } + + /// Add a new legacy type, giving its string ID/name and, if applicable, the pallet that it's seen in, + /// returning the corresponding "modern" type ID to use in its place, or an error if something does wrong. + pub fn add_type_str( + &mut self, + id: &str, + pallet: Option<&str>, + ) -> Result { + let mut id = LookupName::parse(id) + .map_err(|e| TypeRegistryResolveError::LookupNameInvalid(id.to_owned(), e))?; + + if let Some(pallet) = pallet { + id = id.in_pallet(pallet); + } + + self.add_type(id) + } + + /// Add a new legacy type, returning the corresponding "modern" type ID to use in + /// its place, or an error if something does wrong. + pub fn add_type(&mut self, id: LookupName) -> Result { + if let Some(new_id) = self.old_to_new.get(&id) { + return Ok(*new_id); + } + + let visitor = PortableRegistryVisitor { + builder: &mut *self, + current_type: &id, + }; + + match visitor + .builder + .legacy_types + .resolve_type(id.clone(), visitor) + { + Ok(Ok(new_id)) => { + self.old_to_new.insert(id, new_id); + Ok(new_id) + } + Ok(Err(e)) => Err(e), + Err(e) => Err(e.into()), + } + } + + /// Return the current [`scale_info::PortableRegistry`]. + pub fn types(&self) -> &PortableRegistry { + &self.scale_info_types + } + + /// Finish adding types and return the modern type registry. + pub fn finish(self) -> PortableRegistry { + self.scale_info_types + } + + fn push_type(&mut self, ty: scale_info::Type) -> u32 { + let id = self.scale_info_types.types.len() as u32; + self.scale_info_types.types.push(PortableType { id, ty }); + id + } +} + +struct PortableRegistryVisitor<'a, 'info> { + builder: &'a mut PortableRegistryBuilder<'info>, + current_type: &'a LookupName, +} + +impl<'a, 'info> ResolvedTypeVisitor<'info> for PortableRegistryVisitor<'a, 'info> { + type TypeId = LookupName; + type Value = Result; + + fn visit_unhandled(self, kind: UnhandledKind) -> Self::Value { + panic!("A handler exists for every type, but visit_unhandled({kind:?}) was called"); + } + + fn visit_not_found(self) -> Self::Value { + Err(PortableRegistryAddTypeError::TypeNotFound( + self.current_type.clone(), + )) + } + + fn visit_primitive(self, primitive: Primitive) -> Self::Value { + let p = match primitive { + Primitive::Bool => scale_info::TypeDefPrimitive::Bool, + Primitive::Char => scale_info::TypeDefPrimitive::Char, + Primitive::Str => scale_info::TypeDefPrimitive::Str, + Primitive::U8 => scale_info::TypeDefPrimitive::U8, + Primitive::U16 => scale_info::TypeDefPrimitive::U16, + Primitive::U32 => scale_info::TypeDefPrimitive::U32, + Primitive::U64 => scale_info::TypeDefPrimitive::U64, + Primitive::U128 => scale_info::TypeDefPrimitive::U128, + Primitive::U256 => scale_info::TypeDefPrimitive::U256, + Primitive::I8 => scale_info::TypeDefPrimitive::I8, + Primitive::I16 => scale_info::TypeDefPrimitive::I16, + Primitive::I32 => scale_info::TypeDefPrimitive::I32, + Primitive::I64 => scale_info::TypeDefPrimitive::I64, + Primitive::I128 => scale_info::TypeDefPrimitive::I128, + Primitive::I256 => scale_info::TypeDefPrimitive::I256, + }; + + let primitive_type = scale_info::Type::new( + Default::default(), + core::iter::empty(), + scale_info::TypeDef::Primitive(p), + Default::default(), + ); + + Ok(self.builder.push_type(primitive_type)) + } + + fn visit_sequence>( + self, + path: Path, + inner_type_id: Self::TypeId, + ) -> Self::Value { + let inner_id = self.builder.add_type(inner_type_id)?; + let path = scale_info::Path { + segments: path.map(Into::into).collect(), + }; + let sequence_type = scale_info::Type::new( + path, + core::iter::empty(), + scale_info::TypeDef::Sequence(scale_info::TypeDefSequence { + type_param: inner_id.into(), + }), + Default::default(), + ); + + Ok(self.builder.push_type(sequence_type)) + } + + fn visit_composite(self, path: Path, fields: Fields) -> Self::Value + where + Path: PathIter<'info>, + Fields: FieldIter<'info, Self::TypeId>, + { + let path = scale_info::Path { + segments: path.map(Into::into).collect(), + }; + + let mut scale_info_fields = Vec::>::new(); + for field in fields { + let type_name = field.id.to_string(); + let id = self.builder.add_type(field.id)?; + scale_info_fields.push(scale_info::Field { + name: field.name.map(Into::into), + ty: id.into(), + type_name: Some(type_name), + docs: Default::default(), + }); + } + + let composite_type = scale_info::Type::new( + path, + core::iter::empty(), + scale_info::TypeDef::Composite(scale_info::TypeDefComposite { + fields: scale_info_fields, + }), + Default::default(), + ); + + Ok(self.builder.push_type(composite_type)) + } + + fn visit_array(self, inner_type_id: LookupName, len: usize) -> Self::Value { + let inner_id = self.builder.add_type(inner_type_id)?; + let array_type = scale_info::Type::new( + Default::default(), + core::iter::empty(), + scale_info::TypeDef::Array(scale_info::TypeDefArray { + len: len as u32, + type_param: inner_id.into(), + }), + Default::default(), + ); + + Ok(self.builder.push_type(array_type)) + } + + fn visit_tuple(self, type_ids: TypeIds) -> Self::Value + where + TypeIds: ExactSizeIterator, + { + let mut scale_info_fields = Vec::new(); + for old_id in type_ids { + let new_id = self.builder.add_type(old_id)?; + scale_info_fields.push(new_id.into()); + } + + let tuple_type = scale_info::Type::new( + Default::default(), + core::iter::empty(), + scale_info::TypeDef::Tuple(scale_info::TypeDefTuple { + fields: scale_info_fields, + }), + Default::default(), + ); + + Ok(self.builder.push_type(tuple_type)) + } + + fn visit_variant(self, path: Path, variants: Var) -> Self::Value + where + Path: PathIter<'info>, + Fields: FieldIter<'info, Self::TypeId>, + Var: VariantIter<'info, Fields>, + { + let path = scale_info::Path { + segments: path.map(Into::into).collect(), + }; + + let mut scale_info_variants = Vec::new(); + for variant in variants { + let mut scale_info_variant_fields = Vec::>::new(); + for field in variant.fields { + let type_name = field.id.to_string(); + let id = self.builder.add_type(field.id)?; + scale_info_variant_fields.push(scale_info::Field { + name: field.name.map(Into::into), + ty: id.into(), + type_name: Some(type_name), + docs: Default::default(), + }); + } + + scale_info_variants.push(scale_info::Variant { + name: variant.name.to_owned(), + index: variant.index, + fields: scale_info_variant_fields, + docs: Default::default(), + }) + } + + let variant_type = scale_info::Type::new( + path, + core::iter::empty(), + scale_info::TypeDef::Variant(scale_info::TypeDefVariant { + variants: scale_info_variants, + }), + Default::default(), + ); + + Ok(self.builder.push_type(variant_type)) + } + + fn visit_compact(self, inner_type_id: Self::TypeId) -> Self::Value { + let inner_id = self.builder.add_type(inner_type_id)?; + + // Configure the path and type params to maximise compat. + let path = ["parity_scale_codec", "Compact"] + .into_iter() + .map(ToOwned::to_owned) + .collect(); + let type_params = [scale_info::TypeParameter { + name: "T".to_owned(), + ty: Some(inner_id.into()), + }]; + + let compact_type = scale_info::Type::new( + scale_info::Path { segments: path }, + type_params, + scale_info::TypeDef::Compact(scale_info::TypeDefCompact { + type_param: inner_id.into(), + }), + Default::default(), + ); + + Ok(self.builder.push_type(compact_type)) + } + + fn visit_bit_sequence( + self, + store_format: BitsStoreFormat, + order_format: BitsOrderFormat, + ) -> Self::Value { + // These order types are added by default into a `TypeRegistry`, so we + // expect them to exist. Parsing should always succeed. + let order_ty_str = match order_format { + BitsOrderFormat::Lsb0 => "bitvec::order::Lsb0", + BitsOrderFormat::Msb0 => "bitvec::order::Msb0", + }; + let order_ty = LookupName::parse(order_ty_str).unwrap(); + let new_order_ty = self.builder.add_type(order_ty)?; + + // The store types also exist by default. Parsing should always succeed. + let store_ty_str = match store_format { + BitsStoreFormat::U8 => "u8", + BitsStoreFormat::U16 => "u16", + BitsStoreFormat::U32 => "u32", + BitsStoreFormat::U64 => "u64", + }; + let store_ty = LookupName::parse(store_ty_str).unwrap(); + let new_store_ty = self.builder.add_type(store_ty)?; + + // Configure the path and type params to look like BitVec's to try + // and maximise compatibility. + let path = ["bitvec", "vec", "BitVec"] + .into_iter() + .map(ToOwned::to_owned) + .collect(); + let type_params = [ + scale_info::TypeParameter { + name: "Store".to_owned(), + ty: Some(new_store_ty.into()), + }, + scale_info::TypeParameter { + name: "Order".to_owned(), + ty: Some(new_order_ty.into()), + }, + ]; + + let bitseq_type = scale_info::Type::new( + scale_info::Path { segments: path }, + type_params, + scale_info::TypeDef::BitSequence(scale_info::TypeDefBitSequence { + bit_order_type: new_order_ty.into(), + bit_store_type: new_store_ty.into(), + }), + Default::default(), + ); + + Ok(self.builder.push_type(bitseq_type)) + } +} diff --git a/metadata/src/from/mod.rs b/metadata/src/from/mod.rs index 16e0f816d1..5a5ed0c55b 100644 --- a/metadata/src/from/mod.rs +++ b/metadata/src/from/mod.rs @@ -8,6 +8,10 @@ mod v14; mod v15; mod v16; +/// Legacy translation hidden behind the corresponding feature flag. +#[cfg(feature = "legacy")] +pub mod legacy; + /// The metadata versions that we support converting into [`crate::Metadata`]. /// These are ordest from highest to lowest, so that the metadata we'd want to /// pick first is first in the array. diff --git a/metadata/src/from/v14.rs b/metadata/src/from/v14.rs index e8e3fd90a8..ee31cd8537 100644 --- a/metadata/src/from/v14.rs +++ b/metadata/src/from/v14.rs @@ -73,7 +73,9 @@ impl TryFrom for Metadata { name.clone(), PalletMetadataInner { name: name.clone(), - index: p.index, + call_index: p.index, + event_index: p.index, + error_index: p.index, storage, call_ty: p.calls.as_ref().map(|c| c.ty.id), call_variant_index, @@ -99,7 +101,9 @@ impl TryFrom for Metadata { Ok(Metadata { types: m.types, pallets, - pallets_by_index, + pallets_by_call_index: pallets_by_index.clone(), + pallets_by_error_index: pallets_by_index.clone(), + pallets_by_event_index: pallets_by_index, extrinsic: from_extrinsic_metadata(m.extrinsic, missing_extrinsic_type_ids), dispatch_error_ty, outer_enums: OuterEnumsMetadata { diff --git a/metadata/src/from/v15.rs b/metadata/src/from/v15.rs index 157c01a7a0..0a170c1cde 100644 --- a/metadata/src/from/v15.rs +++ b/metadata/src/from/v15.rs @@ -69,7 +69,9 @@ impl TryFrom for Metadata { name.clone(), PalletMetadataInner { name, - index: p.index, + call_index: p.index, + event_index: p.index, + error_index: p.index, storage, call_ty: p.calls.as_ref().map(|c| c.ty.id), call_variant_index, @@ -126,7 +128,9 @@ impl TryFrom for Metadata { Ok(Metadata { types: m.types, pallets, - pallets_by_index, + pallets_by_call_index: pallets_by_index.clone(), + pallets_by_error_index: pallets_by_index.clone(), + pallets_by_event_index: pallets_by_index, extrinsic: from_extrinsic_metadata(m.extrinsic), dispatch_error_ty, apis, diff --git a/metadata/src/from/v16.rs b/metadata/src/from/v16.rs index 83301f70b3..8369620264 100644 --- a/metadata/src/from/v16.rs +++ b/metadata/src/from/v16.rs @@ -88,7 +88,9 @@ impl TryFrom for Metadata { name.clone(), PalletMetadataInner { name, - index: p.index, + call_index: p.index, + event_index: p.index, + error_index: p.index, storage, call_ty: p.calls.as_ref().map(|c| c.ty.id), call_variant_index, @@ -157,7 +159,9 @@ impl TryFrom for Metadata { Ok(Metadata { types: m.types, pallets, - pallets_by_index, + pallets_by_call_index: pallets_by_index.clone(), + pallets_by_error_index: pallets_by_index.clone(), + pallets_by_event_index: pallets_by_index, extrinsic: from_extrinsic_metadata(m.extrinsic), dispatch_error_ty, apis, diff --git a/metadata/src/lib.rs b/metadata/src/lib.rs index b317050afc..d609dc5fa0 100644 --- a/metadata/src/lib.rs +++ b/metadata/src/lib.rs @@ -49,6 +49,9 @@ pub use from::SUPPORTED_METADATA_VERSIONS; pub use from::TryFromError; pub use utils::validation::MetadataHasher; +#[cfg(feature = "legacy")] +pub use from::legacy::Error as LegacyFromError; + type CustomMetadataInner = frame_metadata::v15::CustomMetadata; /// Node metadata. This can be constructed by providing some compatible [`frame_metadata`] @@ -60,8 +63,18 @@ pub struct Metadata { types: PortableRegistry, /// Metadata of all the pallets. pallets: OrderedMap, - /// Find the location in the pallet Vec by pallet index. - pallets_by_index: HashMap, + /// Find the pallet for a given call index. + pallets_by_call_index: HashMap, + /// Find the pallet for a given event index. + /// + /// for modern metadatas, this is the same as pallets_by_call_index, + /// but for old metadatas this can vary. + pallets_by_event_index: HashMap, + /// Find the pallet for a given error index. + /// + /// for modern metadatas, this is the same as pallets_by_call_index, + /// but for old metadatas this can vary. + pallets_by_error_index: HashMap, /// Metadata of the extrinsic. extrinsic: ExtrinsicMetadata, /// The types of the outer enums. @@ -84,7 +97,7 @@ impl frame_decode::extrinsics::ExtrinsicTypeInfo for Metadata { pallet_index: u8, call_index: u8, ) -> Result, ExtrinsicInfoError<'_>> { - let pallet = self.pallet_by_index(pallet_index).ok_or({ + let pallet = self.pallet_by_call_index(pallet_index).ok_or({ ExtrinsicInfoError::PalletNotFound { index: pallet_index, } @@ -347,11 +360,65 @@ impl frame_decode::custom_values::CustomValueTypeInfo for Metadata { } impl Metadata { - /// This is essentiall an alias for `::decode(&mut bytes)` + /// This is essentially an alias for `::decode(&mut bytes)` pub fn decode_from(mut bytes: &[u8]) -> Result { ::decode(&mut bytes) } + /// Convert V13 metadata into [`Metadata`], given the necessary extra type information. + #[cfg(feature = "legacy")] + pub fn from_v13( + metadata: &frame_metadata::v13::RuntimeMetadataV13, + types: scale_info_legacy::TypeRegistrySet<'_>, + ) -> Result { + from::legacy::from_v13(metadata, types) + } + + /// Convert V12 metadata into [`Metadata`], given the necessary extra type information. + #[cfg(feature = "legacy")] + pub fn from_v12( + metadata: &frame_metadata::v12::RuntimeMetadataV12, + types: scale_info_legacy::TypeRegistrySet<'_>, + ) -> Result { + from::legacy::from_v12(metadata, types) + } + + /// Convert V13 metadata into [`Metadata`], given the necessary extra type information. + #[cfg(feature = "legacy")] + pub fn from_v11( + metadata: &frame_metadata::v11::RuntimeMetadataV11, + types: scale_info_legacy::TypeRegistrySet<'_>, + ) -> Result { + from::legacy::from_v11(metadata, types) + } + + /// Convert V13 metadata into [`Metadata`], given the necessary extra type information. + #[cfg(feature = "legacy")] + pub fn from_v10( + metadata: &frame_metadata::v10::RuntimeMetadataV10, + types: scale_info_legacy::TypeRegistrySet<'_>, + ) -> Result { + from::legacy::from_v10(metadata, types) + } + + /// Convert V9 metadata into [`Metadata`], given the necessary extra type information. + #[cfg(feature = "legacy")] + pub fn from_v9( + metadata: &frame_metadata::v9::RuntimeMetadataV9, + types: scale_info_legacy::TypeRegistrySet<'_>, + ) -> Result { + from::legacy::from_v9(metadata, types) + } + + /// Convert V8 metadata into [`Metadata`], given the necessary extra type information. + #[cfg(feature = "legacy")] + pub fn from_v8( + metadata: &frame_metadata::v8::RuntimeMetadataV8, + types: scale_info_legacy::TypeRegistrySet<'_>, + ) -> Result { + from::legacy::from_v8(metadata, types) + } + /// Access the underlying type registry. pub fn types(&self) -> &PortableRegistry { &self.types @@ -385,10 +452,36 @@ impl Metadata { }) } - /// Access a pallet given its encoded variant index. - pub fn pallet_by_index(&self, variant_index: u8) -> Option> { + /// Access a pallet given some call/extrinsic pallet index byte + pub fn pallet_by_call_index(&self, variant_index: u8) -> Option> { let inner = self - .pallets_by_index + .pallets_by_call_index + .get(&variant_index) + .and_then(|i| self.pallets.get_by_index(*i))?; + + Some(PalletMetadata { + inner, + types: self.types(), + }) + } + + /// Access a pallet given some event pallet index byte + pub fn pallet_by_event_index(&self, variant_index: u8) -> Option> { + let inner = self + .pallets_by_event_index + .get(&variant_index) + .and_then(|i| self.pallets.get_by_index(*i))?; + + Some(PalletMetadata { + inner, + types: self.types(), + }) + } + + /// Access a pallet given some error pallet index byte + pub fn pallet_by_error_index(&self, variant_index: u8) -> Option> { + let inner = self + .pallets_by_error_index .get(&variant_index) .and_then(|i| self.pallets.get_by_index(*i))?; @@ -458,9 +551,19 @@ impl<'a> PalletMetadata<'a> { &self.inner.name } - /// The pallet index. - pub fn index(&self) -> u8 { - self.inner.index + /// The index to use for calls in this pallet. + pub fn call_index(&self) -> u8 { + self.inner.call_index + } + + /// The index to use for events in this pallet. + pub fn event_index(&self) -> u8 { + self.inner.event_index + } + + /// The index to use for errors in this pallet. + pub fn error_index(&self) -> u8 { + self.inner.error_index } /// The pallet docs. @@ -613,8 +716,18 @@ impl<'a> PalletMetadata<'a> { struct PalletMetadataInner { /// Pallet name. name: String, - /// Pallet index. - index: u8, + /// The index for calls in the pallet. + call_index: u8, + /// The index for events in the pallet. + /// + /// This is the same as `call_index` for modern metadatas, + /// but can be different for older metadatas (pre-V12). + event_index: u8, + /// The index for errors in the pallet. + /// + /// This is the same as `call_index` for modern metadatas, + /// but can be different for older metadatas (pre-V12). + error_index: u8, /// Pallet storage metadata. storage: Option, /// Type ID for the pallet Call enum. diff --git a/subxt/src/error/dispatch_error.rs b/subxt/src/error/dispatch_error.rs index a98da908ff..4b27e63dfd 100644 --- a/subxt/src/error/dispatch_error.rs +++ b/subxt/src/error/dispatch_error.rs @@ -169,11 +169,12 @@ impl std::fmt::Display for ModuleError { impl ModuleError { /// Return more details about this error. pub fn details(&self) -> Result, ModuleErrorDetailsError> { - let pallet = self.metadata.pallet_by_index(self.pallet_index()).ok_or( - ModuleErrorDetailsError::PalletNotFound { + let pallet = self + .metadata + .pallet_by_error_index(self.pallet_index()) + .ok_or(ModuleErrorDetailsError::PalletNotFound { pallet_index: self.pallet_index(), - }, - )?; + })?; let variant = pallet .error_variant_by_index(self.error_index())