diff --git a/.github/workflows/cargo-hack-check.yml b/.github/workflows/cargo-hack-check.yml index c1f208562d..7fff9a2873 100644 --- a/.github/workflows/cargo-hack-check.yml +++ b/.github/workflows/cargo-hack-check.yml @@ -78,11 +78,12 @@ jobs: - name: "Clarity & Stacks-Common WASM Web" command: | cargo hack check \ + -p clarity \ -p clarity-serialization \ -p stacks-common \ --each-feature \ --no-dev-deps \ - --exclude-features=default,rusqlite,ctrlc-handler,wasm-deterministic \ + --exclude-features=default,rusqlite,ctrlc-handler,wasm-deterministic,testing \ --features=wasm-web - name: "Clarity & Stacks-Common WASM Deterministic" diff --git a/Cargo.lock b/Cargo.lock index f7c0905f54..0ab32a2dec 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -605,7 +605,7 @@ name = "clarity" version = "0.0.1" dependencies = [ "assert-json-diff 1.1.0", - "hashbrown 0.15.2", + "clarity-serialization", "integer-sqrt", "lazy_static", "mutants", @@ -652,11 +652,12 @@ dependencies = [ "mutants", "regex", "rstest", + "rusqlite", "serde", "serde_derive", + "serde_json", "slog", "stacks-common 0.0.1", - "thiserror", ] [[package]] @@ -3356,7 +3357,6 @@ dependencies = [ "chrono", "clarity 0.0.1", "ed25519-dalek", - "hashbrown 0.15.2", "lazy_static", "libstackerdb 0.0.1", "mio 0.6.23", diff --git a/clarity-serialization/Cargo.toml b/clarity-serialization/Cargo.toml index 341d8c0c29..f7518e3a18 100644 --- a/clarity-serialization/Cargo.toml +++ b/clarity-serialization/Cargo.toml @@ -12,11 +12,12 @@ readme = "README.md" [dependencies] lazy_static = { workspace = true } regex = { version = "1", default-features = false } +rusqlite = { workspace = true, optional = true } serde = { workspace = true } +serde_json = { version = "1.0", default-features = false } serde_derive = { workspace = true } slog = { workspace = true } stacks_common = { package = "stacks-common", path = "../stacks-common", default-features = false } -thiserror = { workspace = true } [dev-dependencies] mutants = "0.0.3" @@ -25,7 +26,9 @@ rstest = "0.17.0" [features] default = [] testing = [] +developer-mode = ["stacks_common/developer-mode"] slog_json = ["stacks_common/slog_json"] +rusqlite = ["stacks_common/rusqlite", "dep:rusqlite"] # Wasm-specific features for easier configuration wasm-web = ["stacks_common/wasm-web"] diff --git a/clarity-serialization/src/diagnostic.rs b/clarity-serialization/src/diagnostic.rs new file mode 100644 index 0000000000..c8aeda8e6c --- /dev/null +++ b/clarity-serialization/src/diagnostic.rs @@ -0,0 +1,90 @@ +// Copyright (C) 2025 Stacks Open Internet Foundation +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +use std::fmt; + +use crate::representations::Span; + +/// In a near future, we can go further in our static analysis and provide different levels +/// of diagnostics, such as warnings, hints, best practices, etc. +#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)] +pub enum Level { + Note, + Warning, + Error, +} + +pub trait DiagnosableError { + fn message(&self) -> String; + fn suggestion(&self) -> Option; + fn level(&self) -> Level { + Level::Error + } +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct Diagnostic { + pub level: Level, + pub message: String, + pub spans: Vec, + pub suggestion: Option, +} + +impl Diagnostic { + pub fn err(error: &dyn DiagnosableError) -> Diagnostic { + Diagnostic { + spans: vec![], + level: Level::Error, + message: error.message(), + suggestion: error.suggestion(), + } + } + + pub fn add_span(&mut self, start_line: u32, start_column: u32, end_line: u32, end_column: u32) { + self.spans.push(Span { + start_line, + start_column, + end_line, + end_column, + }); + } +} + +impl fmt::Display for Diagnostic { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{:?}", self.level)?; + match self.spans.len().cmp(&1) { + std::cmp::Ordering::Equal => write!( + f, + " (line {}, column {})", + self.spans[0].start_line, self.spans[0].start_column + )?, + std::cmp::Ordering::Greater => { + let lines: Vec = self + .spans + .iter() + .map(|s| format!("line: {}", s.start_line)) + .collect(); + write!(f, " ({})", lines.join(", "))?; + } + _ => {} + } + write!(f, ": {}.", &self.message)?; + if let Some(suggestion) = &self.suggestion { + write!(f, "\n{suggestion}")?; + } + writeln!(f) + } +} diff --git a/clarity-serialization/src/errors.rs b/clarity-serialization/src/errors.rs deleted file mode 100644 index c0b6428a82..0000000000 --- a/clarity-serialization/src/errors.rs +++ /dev/null @@ -1,129 +0,0 @@ -// Copyright (C) 2025 Stacks Open Internet Foundation -// -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. -// -// You should have received a copy of the GNU General Public License -// along with this program. If not, see . - -use std::io; - -use thiserror::Error; - -use crate::types::{TupleTypeSignature, TypeSignature, Value}; - -/// The primary error type for the `clarity-codec` crate. -/// -/// It represents all possible failures that can occur when encoding, decoding, -/// or validating the structure and types of a Clarity value. -#[derive(Error, Debug)] -pub enum CodecError { - #[error("I/O error during (de)serialization: {0}")] - Io(#[from] io::Error), - - #[error("Serialization error caused by IO: {0}")] - Serialization(String), - - #[error("Deserialization failed: {0}")] - Deserialization(String), - - #[error("Deserialization expected the type of the input to be: {0}")] - DeserializeExpected(Box), - - #[error("The serializer handled an input in an unexpected way")] - UnexpectedSerialization, - - #[error("Deserialization finished but there were leftover bytes in the buffer")] - LeftoverBytesInDeserialization, - - #[error("Parse error: {0}")] - ParseError(String), - - #[error("Bad type construction.")] - BadTypeConstruction, - - // --- Structural and Size Errors --- - #[error("A value being constructed is larger than the 1MB Clarity limit")] - ValueTooLarge, - - #[error("A value is out of its prescribed bounds")] - ValueOutOfBounds, - - #[error("A type signature is deeper than the 32-level Clarity limit")] - TypeSignatureTooDeep, - - #[error("The supertype of two types is too large to be represented")] - SupertypeTooLarge, - - #[error("Empty tuples are not allowed")] - EmptyTuplesNotAllowed, - - #[error("Failed to construct a tuple with the given type")] - FailureConstructingTupleWithType, - - #[error("Failed to construct a list with the given type")] - FailureConstructingListWithType, - - #[error("All elements in a list must have a compatible supertype")] - ListTypesMustMatch, - - // --- Type Mismatch and Semantic Errors --- - #[error("Expected a value of type '{expected}', but found a value of type '{found}'")] - TypeError { - expected: Box, - found: Box, - }, - - #[error("Expected a value of type '{expected}', but found the value '{found}'")] - TypeValueError { - expected: Box, - found: Box, - }, - - #[error("could not determine the input type for the serialization function")] - CouldNotDetermineSerializationType, - - #[error("type of expression cannot be determined")] - CouldNotDetermineType, - - // --- Naming and Identifier Errors --- - #[error("Name '{0}' is already used in this tuple")] - NameAlreadyUsedInTuple(String), - - #[error("Could not find field '{0}' in tuple '{1}'")] - NoSuchTupleField(String, TupleTypeSignature), - - #[error("Failed to parse {0}: {1}")] - InvalidClarityName(&'static str, String), - - #[error("Failed to parse {0}: {1}")] - InvalidContractName(&'static str, String), - - // --- String/Buffer Content Errors --- - #[error("Invalid characters detected in string")] - InvalidStringCharacters, - - #[error("Invalid UTF-8 encoding in string")] - InvalidUtf8Encoding, - - // --- Catch-all for internal logic errors --- - #[error("An unexpected internal error occurred: {0}")] - Expect(String), -} - -// Implement PartialEq for testing and simple equality checks by comparing the -// string representations of each error. This avoids requiring all wrapped -// fields (like `std::io::Error`) to implement PartialEq. -#[cfg(any(test, feature = "testing"))] -impl PartialEq for CodecError { - fn eq(&self, other: &Self) -> bool { - self.to_string() == other.to_string() - } -} diff --git a/clarity-serialization/src/errors/analysis.rs b/clarity-serialization/src/errors/analysis.rs new file mode 100644 index 0000000000..8831198579 --- /dev/null +++ b/clarity-serialization/src/errors/analysis.rs @@ -0,0 +1,505 @@ +// Copyright (C) 2025 Stacks Open Internet Foundation +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +use std::{error, fmt}; + +use crate::diagnostic::{DiagnosableError, Diagnostic}; +use crate::errors::CostErrors; +use crate::execution_cost::ExecutionCost; +use crate::representations::SymbolicExpression; +use crate::types::{TraitIdentifier, TupleTypeSignature, TypeSignature, Value}; + +pub type CheckResult = Result; + +#[derive(Debug, PartialEq)] +pub enum CheckErrors { + // cost checker errors + CostOverflow, + CostBalanceExceeded(ExecutionCost, ExecutionCost), + MemoryBalanceExceeded(u64, u64), + CostComputationFailed(String), + + ValueTooLarge, + ValueOutOfBounds, + TypeSignatureTooDeep, + ExpectedName, + SupertypeTooLarge, + + // unexpected interpreter behavior + Expects(String), + + // match errors + BadMatchOptionSyntax(Box), + BadMatchResponseSyntax(Box), + BadMatchInput(TypeSignature), + + // list typing errors + UnknownListConstructionFailure, + ListTypesMustMatch, + ConstructedListTooLarge, + + // simple type expectation mismatch + TypeError(TypeSignature, TypeSignature), + TypeLiteralError(TypeSignature, TypeSignature), + TypeValueError(TypeSignature, Value), + + NoSuperType(TypeSignature, TypeSignature), + InvalidTypeDescription, + UnknownTypeName(String), + + // union type mismatch + UnionTypeError(Vec, TypeSignature), + UnionTypeValueError(Vec, Value), + + ExpectedLiteral, + ExpectedOptionalType(TypeSignature), + ExpectedResponseType(TypeSignature), + ExpectedOptionalOrResponseType(TypeSignature), + ExpectedOptionalValue(Value), + ExpectedResponseValue(Value), + ExpectedOptionalOrResponseValue(Value), + CouldNotDetermineResponseOkType, + CouldNotDetermineResponseErrType, + CouldNotDetermineSerializationType, + UncheckedIntermediaryResponses, + + CouldNotDetermineMatchTypes, + CouldNotDetermineType, + + // Checker runtime failures + TypeAlreadyAnnotatedFailure, + TypeAnnotationExpectedFailure, + CheckerImplementationFailure, + + // Assets + BadTokenName, + DefineFTBadSignature, + DefineNFTBadSignature, + NoSuchNFT(String), + NoSuchFT(String), + + BadTransferSTXArguments, + BadTransferFTArguments, + BadTransferNFTArguments, + BadMintFTArguments, + BadBurnFTArguments, + + // tuples + BadTupleFieldName, + ExpectedTuple(TypeSignature), + NoSuchTupleField(String, TupleTypeSignature), + EmptyTuplesNotAllowed, + BadTupleConstruction, + TupleExpectsPairs, + + // variables + NoSuchDataVariable(String), + + // data map + BadMapName, + NoSuchMap(String), + + // defines + DefineFunctionBadSignature, + BadFunctionName, + BadMapTypeDefinition, + PublicFunctionMustReturnResponse(TypeSignature), + DefineVariableBadSignature, + ReturnTypesMustMatch(TypeSignature, TypeSignature), + + CircularReference(Vec), + + // contract-call errors + NoSuchContract(String), + NoSuchPublicFunction(String, String), + PublicFunctionNotReadOnly(String, String), + ContractAlreadyExists(String), + ContractCallExpectName, + ExpectedCallableType(TypeSignature), + + // get-block-info? errors + NoSuchBlockInfoProperty(String), + NoSuchBurnBlockInfoProperty(String), + NoSuchStacksBlockInfoProperty(String), + NoSuchTenureInfoProperty(String), + GetBlockInfoExpectPropertyName, + GetBurnBlockInfoExpectPropertyName, + GetStacksBlockInfoExpectPropertyName, + GetTenureInfoExpectPropertyName, + + NameAlreadyUsed(String), + ReservedWord(String), + + // expect a function, or applying a function to a list + NonFunctionApplication, + ExpectedListApplication, + ExpectedSequence(TypeSignature), + MaxLengthOverflow, + + // let syntax + BadLetSyntax, + + // generic binding syntax + BadSyntaxBinding, + BadSyntaxExpectedListOfPairs, + + MaxContextDepthReached, + UndefinedFunction(String), + UndefinedVariable(String), + + // argument counts + RequiresAtLeastArguments(usize, usize), + RequiresAtMostArguments(usize, usize), + IncorrectArgumentCount(usize, usize), + IfArmsMustMatch(TypeSignature, TypeSignature), + MatchArmsMustMatch(TypeSignature, TypeSignature), + DefaultTypesMustMatch(TypeSignature, TypeSignature), + TooManyExpressions, + IllegalOrUnknownFunctionApplication(String), + UnknownFunction(String), + + // traits + NoSuchTrait(String, String), + TraitReferenceUnknown(String), + TraitMethodUnknown(String, String), + ExpectedTraitIdentifier, + ImportTraitBadSignature, + TraitReferenceNotAllowed, + BadTraitImplementation(String, String), + DefineTraitBadSignature, + DefineTraitDuplicateMethod(String), + UnexpectedTraitOrFieldReference, + TraitBasedContractCallInReadOnly, + ContractOfExpectsTrait, + IncompatibleTrait(TraitIdentifier, TraitIdentifier), + + // strings + InvalidCharactersDetected, + InvalidUTF8Encoding, + + // secp256k1 signature + InvalidSecp65k1Signature, + + WriteAttemptedInReadOnly, + AtBlockClosureMustBeReadOnly, + + // time checker errors + ExecutionTimeExpired, +} + +#[derive(Debug, PartialEq)] +pub struct CheckError { + pub err: CheckErrors, + pub expressions: Option>, + pub diagnostic: Diagnostic, +} + +impl CheckErrors { + /// Does this check error indicate that the transaction should be + /// rejected? + pub fn rejectable(&self) -> bool { + matches!( + self, + CheckErrors::SupertypeTooLarge | CheckErrors::Expects(_) + ) + } +} + +impl CheckError { + pub fn new(err: CheckErrors) -> CheckError { + let diagnostic = Diagnostic::err(&err); + CheckError { + err, + expressions: None, + diagnostic, + } + } + + pub fn has_expression(&self) -> bool { + self.expressions.is_some() + } + + pub fn set_expression(&mut self, expr: &SymbolicExpression) { + self.diagnostic.spans = vec![expr.span().clone()]; + self.expressions.replace(vec![expr.clone()]); + } + + pub fn set_expressions(&mut self, exprs: &[SymbolicExpression]) { + self.diagnostic.spans = exprs.iter().map(|e| e.span().clone()).collect(); + self.expressions.replace(exprs.to_vec()); + } +} + +impl fmt::Display for CheckErrors { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{self:?}") + } +} + +impl fmt::Display for CheckError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}", self.err)?; + + if let Some(ref e) = self.expressions { + write!(f, "\nNear:\n{e:?}")?; + } + + Ok(()) + } +} + +impl From for CheckError { + fn from(err: CostErrors) -> Self { + CheckError::from(CheckErrors::from(err)) + } +} + +impl From for CheckErrors { + fn from(err: CostErrors) -> Self { + match err { + CostErrors::CostOverflow => CheckErrors::CostOverflow, + CostErrors::CostBalanceExceeded(a, b) => CheckErrors::CostBalanceExceeded(a, b), + CostErrors::MemoryBalanceExceeded(a, b) => CheckErrors::MemoryBalanceExceeded(a, b), + CostErrors::CostComputationFailed(s) => CheckErrors::CostComputationFailed(s), + CostErrors::CostContractLoadFailure => { + CheckErrors::CostComputationFailed("Failed to load cost contract".into()) + } + CostErrors::InterpreterFailure => { + CheckErrors::Expects("Unexpected interpreter failure in cost computation".into()) + } + CostErrors::Expect(s) => CheckErrors::Expects(s), + CostErrors::ExecutionTimeExpired => CheckErrors::ExecutionTimeExpired, + } + } +} + +impl error::Error for CheckError { + fn source(&self) -> Option<&(dyn error::Error + 'static)> { + None + } +} + +impl error::Error for CheckErrors { + fn source(&self) -> Option<&(dyn error::Error + 'static)> { + None + } +} + +impl From for CheckError { + fn from(err: CheckErrors) -> Self { + CheckError::new(err) + } +} + +#[cfg(any(test, feature = "testing"))] +impl From for String { + fn from(o: CheckErrors) -> Self { + o.to_string() + } +} + +#[allow(clippy::result_large_err)] +pub fn check_argument_count(expected: usize, args: &[T]) -> Result<(), CheckErrors> { + if args.len() != expected { + Err(CheckErrors::IncorrectArgumentCount(expected, args.len())) + } else { + Ok(()) + } +} + +#[allow(clippy::result_large_err)] +pub fn check_arguments_at_least(expected: usize, args: &[T]) -> Result<(), CheckErrors> { + if args.len() < expected { + Err(CheckErrors::RequiresAtLeastArguments(expected, args.len())) + } else { + Ok(()) + } +} + +#[allow(clippy::result_large_err)] +pub fn check_arguments_at_most(expected: usize, args: &[T]) -> Result<(), CheckErrors> { + if args.len() > expected { + Err(CheckErrors::RequiresAtMostArguments(expected, args.len())) + } else { + Ok(()) + } +} + +fn formatted_expected_types(expected_types: &[TypeSignature]) -> String { + let mut expected_types_joined = format!("'{}'", expected_types[0]); + + if expected_types.len() > 2 { + for expected_type in expected_types[1..expected_types.len() - 1].iter() { + expected_types_joined.push_str(&format!(", '{expected_type}'")); + } + } + expected_types_joined.push_str(&format!( + " or '{}'", + expected_types[expected_types.len() - 1] + )); + expected_types_joined +} + +impl DiagnosableError for CheckErrors { + fn message(&self) -> String { + match &self { + CheckErrors::ExpectedLiteral => "expected a literal argument".into(), + CheckErrors::SupertypeTooLarge => "supertype of two types is too large".into(), + CheckErrors::Expects(s) => format!("unexpected interpreter behavior: {s}"), + CheckErrors::BadMatchOptionSyntax(source) => + format!("match on a optional type uses the following syntax: (match input some-name if-some-expression if-none-expression). Caused by: {}", + source.message()), + CheckErrors::BadMatchResponseSyntax(source) => + format!("match on a result type uses the following syntax: (match input ok-name if-ok-expression err-name if-err-expression). Caused by: {}", + source.message()), + CheckErrors::BadMatchInput(t) => + format!("match requires an input of either a response or optional, found input: '{t}'"), + CheckErrors::TypeAnnotationExpectedFailure => "analysis expected type to already be annotated for expression".into(), + CheckErrors::CostOverflow => "contract execution cost overflowed cost counter".into(), + CheckErrors::CostBalanceExceeded(a, b) => format!("contract execution cost exceeded budget: {a:?} > {b:?}"), + CheckErrors::MemoryBalanceExceeded(a, b) => format!("contract execution cost exceeded memory budget: {a:?} > {b:?}"), + CheckErrors::InvalidTypeDescription => "supplied type description is invalid".into(), + CheckErrors::EmptyTuplesNotAllowed => "tuple types may not be empty".into(), + CheckErrors::BadSyntaxExpectedListOfPairs => "bad syntax: function expects a list of pairs to bind names, e.g., ((name-0 a) (name-1 b) ...)".into(), + CheckErrors::UnknownTypeName(name) => format!("failed to parse type: '{name}'"), + CheckErrors::ValueTooLarge => "created a type which was greater than maximum allowed value size".into(), + CheckErrors::ValueOutOfBounds => "created a type which value size was out of defined bounds".into(), + CheckErrors::TypeSignatureTooDeep => "created a type which was deeper than maximum allowed type depth".into(), + CheckErrors::ExpectedName => "expected a name argument to this function".into(), + CheckErrors::NoSuperType(a, b) => format!("unable to create a supertype for the two types: '{a}' and '{b}'"), + CheckErrors::UnknownListConstructionFailure => "invalid syntax for list definition".into(), + CheckErrors::ListTypesMustMatch => "expecting elements of same type in a list".into(), + CheckErrors::ConstructedListTooLarge => "reached limit of elements in a sequence".into(), + CheckErrors::TypeError(expected_type, found_type) => format!("expecting expression of type '{expected_type}', found '{found_type}'"), + CheckErrors::TypeLiteralError(expected_type, found_type) => format!("expecting a literal of type '{expected_type}', found '{found_type}'"), + CheckErrors::TypeValueError(expected_type, found_value) => format!("expecting expression of type '{expected_type}', found '{found_value}'"), + CheckErrors::UnionTypeError(expected_types, found_type) => format!("expecting expression of type {}, found '{}'", formatted_expected_types(expected_types), found_type), + CheckErrors::UnionTypeValueError(expected_types, found_type) => format!("expecting expression of type {}, found '{}'", formatted_expected_types(expected_types), found_type), + CheckErrors::ExpectedOptionalType(found_type) => format!("expecting expression of type 'optional', found '{found_type}'"), + CheckErrors::ExpectedOptionalOrResponseType(found_type) => format!("expecting expression of type 'optional' or 'response', found '{found_type}'"), + CheckErrors::ExpectedOptionalOrResponseValue(found_type) => format!("expecting expression of type 'optional' or 'response', found '{found_type}'"), + CheckErrors::ExpectedResponseType(found_type) => format!("expecting expression of type 'response', found '{found_type}'"), + CheckErrors::ExpectedOptionalValue(found_type) => format!("expecting expression of type 'optional', found '{found_type}'"), + CheckErrors::ExpectedResponseValue(found_type) => format!("expecting expression of type 'response', found '{found_type}'"), + CheckErrors::CouldNotDetermineResponseOkType => "attempted to obtain 'ok' value from response, but 'ok' type is indeterminate".into(), + CheckErrors::CouldNotDetermineResponseErrType => "attempted to obtain 'err' value from response, but 'err' type is indeterminate".into(), + CheckErrors::CouldNotDetermineMatchTypes => "attempted to match on an (optional) or (response) type where either the some, ok, or err type is indeterminate. you may wish to use unwrap-panic or unwrap-err-panic instead.".into(), + CheckErrors::CouldNotDetermineType => "type of expression cannot be determined".into(), + CheckErrors::BadTupleFieldName => "invalid tuple field name".into(), + CheckErrors::ExpectedTuple(type_signature) => format!("expecting tuple, found '{type_signature}'"), + CheckErrors::NoSuchTupleField(field_name, tuple_signature) => format!("cannot find field '{field_name}' in tuple '{tuple_signature}'"), + CheckErrors::BadTupleConstruction => "invalid tuple syntax, expecting list of pair".into(), + CheckErrors::TupleExpectsPairs => "invalid tuple syntax, expecting pair".into(), + CheckErrors::NoSuchDataVariable(var_name) => format!("use of unresolved persisted variable '{var_name}'"), + CheckErrors::BadTransferSTXArguments => "STX transfer expects an int amount, from principal, to principal".into(), + CheckErrors::BadTransferFTArguments => "transfer expects an int amount, from principal, to principal".into(), + CheckErrors::BadTransferNFTArguments => "transfer expects an asset, from principal, to principal".into(), + CheckErrors::BadMintFTArguments => "mint expects a uint amount and from principal".into(), + CheckErrors::BadBurnFTArguments => "burn expects a uint amount and from principal".into(), + CheckErrors::BadMapName => "invalid map name".into(), + CheckErrors::NoSuchMap(map_name) => format!("use of unresolved map '{map_name}'"), + CheckErrors::DefineFunctionBadSignature => "invalid function definition".into(), + CheckErrors::BadFunctionName => "invalid function name".into(), + CheckErrors::BadMapTypeDefinition => "invalid map definition".into(), + CheckErrors::PublicFunctionMustReturnResponse(found_type) => format!("public functions must return an expression of type 'response', found '{found_type}'"), + CheckErrors::DefineVariableBadSignature => "invalid variable definition".into(), + CheckErrors::ReturnTypesMustMatch(type_1, type_2) => format!("detected two execution paths, returning two different expression types (got '{type_1}' and '{type_2}')"), + CheckErrors::NoSuchContract(contract_identifier) => format!("use of unresolved contract '{contract_identifier}'"), + CheckErrors::NoSuchPublicFunction(contract_identifier, function_name) => format!("contract '{contract_identifier}' has no public function '{function_name}'"), + CheckErrors::PublicFunctionNotReadOnly(contract_identifier, function_name) => format!("function '{contract_identifier}' in '{function_name}' is not read-only"), + CheckErrors::ContractAlreadyExists(contract_identifier) => format!("contract name '{contract_identifier}' conflicts with existing contract"), + CheckErrors::ContractCallExpectName => "missing contract name for call".into(), + CheckErrors::ExpectedCallableType(found_type) => format!("expected a callable contract, found {found_type}"), + CheckErrors::NoSuchBlockInfoProperty(property_name) => format!("use of block unknown property '{property_name}'"), + CheckErrors::NoSuchBurnBlockInfoProperty(property_name) => format!("use of burn block unknown property '{property_name}'"), + CheckErrors::NoSuchStacksBlockInfoProperty(property_name) => format!("use of unknown stacks block property '{property_name}'"), + CheckErrors::NoSuchTenureInfoProperty(property_name) => format!("use of unknown tenure property '{property_name}'"), + CheckErrors::GetBlockInfoExpectPropertyName => "missing property name for block info introspection".into(), + CheckErrors::GetBurnBlockInfoExpectPropertyName => "missing property name for burn block info introspection".into(), + CheckErrors::GetStacksBlockInfoExpectPropertyName => "missing property name for stacks block info introspection".into(), + CheckErrors::GetTenureInfoExpectPropertyName => "missing property name for tenure info introspection".into(), + CheckErrors::NameAlreadyUsed(name) => format!("defining '{name}' conflicts with previous value"), + CheckErrors::ReservedWord(name) => format!("{name} is a reserved word"), + CheckErrors::NonFunctionApplication => "expecting expression of type function".into(), + CheckErrors::ExpectedListApplication => "expecting expression of type list".into(), + CheckErrors::ExpectedSequence(found_type) => format!("expecting expression of type 'list', 'buff', 'string-ascii' or 'string-utf8' - found '{found_type}'"), + CheckErrors::MaxLengthOverflow => format!("expecting a value <= {}", u32::MAX), + CheckErrors::BadLetSyntax => "invalid syntax of 'let'".into(), + CheckErrors::CircularReference(references) => format!("detected circular reference: ({})", references.join(", ")), + CheckErrors::BadSyntaxBinding => "invalid syntax binding".into(), + CheckErrors::MaxContextDepthReached => "reached depth limit".into(), + CheckErrors::UndefinedVariable(var_name) => format!("use of unresolved variable '{var_name}'"), + CheckErrors::UndefinedFunction(var_name) => format!("use of unresolved function '{var_name}'"), + CheckErrors::RequiresAtLeastArguments(expected, found) => format!("expecting >= {expected} arguments, got {found}"), + CheckErrors::RequiresAtMostArguments(expected, found) => format!("expecting < {expected} arguments, got {found}"), + CheckErrors::IncorrectArgumentCount(expected_count, found_count) => format!("expecting {expected_count} arguments, got {found_count}"), + CheckErrors::IfArmsMustMatch(type_1, type_2) => format!("expression types returned by the arms of 'if' must match (got '{type_1}' and '{type_2}')"), + CheckErrors::MatchArmsMustMatch(type_1, type_2) => format!("expression types returned by the arms of 'match' must match (got '{type_1}' and '{type_2}')"), + CheckErrors::DefaultTypesMustMatch(type_1, type_2) => format!("expression types passed in 'default-to' must match (got '{type_1}' and '{type_2}')"), + CheckErrors::TooManyExpressions => "reached limit of expressions".into(), + CheckErrors::IllegalOrUnknownFunctionApplication(function_name) => format!("use of illegal / unresolved function '{function_name}"), + CheckErrors::UnknownFunction(function_name) => format!("use of unresolved function '{function_name}'"), + CheckErrors::TraitBasedContractCallInReadOnly => "use of trait based contract calls are not allowed in read-only context".into(), + CheckErrors::WriteAttemptedInReadOnly => "expecting read-only statements, detected a writing operation".into(), + CheckErrors::AtBlockClosureMustBeReadOnly => "(at-block ...) closures expect read-only statements, but detected a writing operation".into(), + CheckErrors::BadTokenName => "expecting an token name as an argument".into(), + CheckErrors::DefineFTBadSignature => "(define-token ...) expects a token name as an argument".into(), + CheckErrors::DefineNFTBadSignature => "(define-asset ...) expects an asset name and an asset identifier type signature as arguments".into(), + CheckErrors::NoSuchNFT(asset_name) => format!("tried to use asset function with a undefined asset ('{asset_name}')"), + CheckErrors::NoSuchFT(asset_name) => format!("tried to use token function with a undefined token ('{asset_name}')"), + CheckErrors::NoSuchTrait(contract_name, trait_name) => format!("use of unresolved trait {contract_name}.{trait_name}"), + CheckErrors::TraitReferenceUnknown(trait_name) => format!("use of undeclared trait <{trait_name}>"), + CheckErrors::TraitMethodUnknown(trait_name, func_name) => format!("method '{func_name}' unspecified in trait <{trait_name}>"), + CheckErrors::ImportTraitBadSignature => "(use-trait ...) expects a trait name and a trait identifier".into(), + CheckErrors::BadTraitImplementation(trait_name, func_name) => format!("invalid signature for method '{func_name}' regarding trait's specification <{trait_name}>"), + CheckErrors::ExpectedTraitIdentifier => "expecting expression of type trait identifier".into(), + CheckErrors::UnexpectedTraitOrFieldReference => "unexpected use of trait reference or field".into(), + CheckErrors::DefineTraitBadSignature => "invalid trait definition".into(), + CheckErrors::DefineTraitDuplicateMethod(method_name) => format!("duplicate method name '{method_name}' in trait definition"), + CheckErrors::TraitReferenceNotAllowed => "trait references can not be stored".into(), + CheckErrors::ContractOfExpectsTrait => "trait reference expected".into(), + CheckErrors::IncompatibleTrait(expected_trait, actual_trait) => format!("trait '{actual_trait}' is not a compatible with expected trait, '{expected_trait}'"), + CheckErrors::InvalidCharactersDetected => "invalid characters detected".into(), + CheckErrors::InvalidUTF8Encoding => "invalid UTF8 encoding".into(), + CheckErrors::InvalidSecp65k1Signature => "invalid seckp256k1 signature".into(), + CheckErrors::TypeAlreadyAnnotatedFailure | CheckErrors::CheckerImplementationFailure => { + "internal error - please file an issue on https://github.com/stacks-network/stacks-blockchain".into() + }, + CheckErrors::UncheckedIntermediaryResponses => "intermediary responses in consecutive statements must be checked".into(), + CheckErrors::CostComputationFailed(s) => format!("contract cost computation failed: {s}"), + CheckErrors::CouldNotDetermineSerializationType => "could not determine the input type for the serialization function".into(), + CheckErrors::ExecutionTimeExpired => "execution time expired".into(), + } + } + + fn suggestion(&self) -> Option { + match &self { + CheckErrors::BadSyntaxBinding => { + Some("binding syntax example: ((supply int) (ttl int))".into()) + } + CheckErrors::BadLetSyntax => Some( + "'let' syntax example: (let ((supply 1000) (ttl 60)) )".into(), + ), + CheckErrors::TraitReferenceUnknown(_) => Some( + "traits should be either defined, with define-trait, or imported, with use-trait." + .into(), + ), + CheckErrors::NoSuchBlockInfoProperty(_) => Some( + "properties available: time, header-hash, burnchain-header-hash, vrf-seed".into(), + ), + _ => None, + } + } +} diff --git a/clarity-serialization/src/errors/ast.rs b/clarity-serialization/src/errors/ast.rs new file mode 100644 index 0000000000..35ee8fca2d --- /dev/null +++ b/clarity-serialization/src/errors/ast.rs @@ -0,0 +1,319 @@ +// Copyright (C) 2025 Stacks Open Internet Foundation +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +use std::{error, fmt}; + +use crate::MAX_CALL_STACK_DEPTH; +use crate::diagnostic::{DiagnosableError, Diagnostic, Level}; +use crate::errors::{CostErrors, LexerError}; +use crate::execution_cost::ExecutionCost; +use crate::representations::{PreSymbolicExpression, Span}; +use crate::token::Token; + +pub type ParseResult = Result; + +#[derive(Debug, PartialEq)] +pub enum ParseErrors { + CostOverflow, + CostBalanceExceeded(ExecutionCost, ExecutionCost), + MemoryBalanceExceeded(u64, u64), + TooManyExpressions, + ExpressionStackDepthTooDeep, + VaryExpressionStackDepthTooDeep, + FailedCapturingInput, + SeparatorExpected(String), + SeparatorExpectedAfterColon(String), + ProgramTooLarge, + IllegalVariableName(String), + IllegalContractName(String), + UnknownQuotedValue(String), + FailedParsingIntValue(String), + FailedParsingUIntValue(String), + FailedParsingBuffer(String), + FailedParsingHexValue(String, String), + FailedParsingPrincipal(String), + FailedParsingField(String), + FailedParsingRemainder(String), + ClosingParenthesisUnexpected, + ClosingParenthesisExpected, + ClosingTupleLiteralUnexpected, + ClosingTupleLiteralExpected, + CircularReference(Vec), + TupleColonExpected(usize), + TupleCommaExpected(usize), + TupleItemExpected(usize), + NameAlreadyUsed(String), + TraitReferenceNotAllowed, + ImportTraitBadSignature, + DefineTraitBadSignature, + ImplTraitBadSignature, + TraitReferenceUnknown(String), + CommaSeparatorUnexpected, + ColonSeparatorUnexpected, + InvalidCharactersDetected, + InvalidEscaping, + CostComputationFailed(String), + + // V2 Errors + Lexer(LexerError), + ContractNameTooLong(String), + ExpectedContractIdentifier, + ExpectedTraitIdentifier, + IllegalTraitName(String), + InvalidPrincipalLiteral, + InvalidBuffer, + NameTooLong(String), + UnexpectedToken(Token), + ExpectedClosing(Token), + TupleColonExpectedv2, + TupleCommaExpectedv2, + TupleValueExpected, + IllegalClarityName(String), + IllegalASCIIString(String), + IllegalUtf8String(String), + ExpectedWhitespace, + // Notes + NoteToMatchThis(Token), + + /// Should be an unreachable error + UnexpectedParserFailure, + /// Should be an unreachable failure which invalidates the transaction + InterpreterFailure, + + ExecutionTimeExpired, +} + +#[derive(Debug, PartialEq)] +pub struct ParseError { + pub err: ParseErrors, + pub pre_expressions: Option>, + pub diagnostic: Diagnostic, +} + +impl ParseError { + pub fn new(err: ParseErrors) -> ParseError { + let diagnostic = Diagnostic::err(&err); + ParseError { + err, + pre_expressions: None, + diagnostic, + } + } + + pub fn rejectable(&self) -> bool { + matches!(self.err, ParseErrors::InterpreterFailure) + } + + pub fn has_pre_expression(&self) -> bool { + self.pre_expressions.is_some() + } + + pub fn set_pre_expression(&mut self, expr: &PreSymbolicExpression) { + self.diagnostic.spans = vec![expr.span().clone()]; + self.pre_expressions.replace(vec![expr.clone()]); + } + + pub fn set_pre_expressions(&mut self, exprs: Vec) { + self.diagnostic.spans = exprs.iter().map(|e| e.span().clone()).collect(); + self.pre_expressions.replace(exprs.to_vec()); + } +} + +impl fmt::Display for ParseError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{:?}", self.err)?; + + if let Some(ref e) = self.pre_expressions { + write!(f, "\nNear:\n{e:?}")?; + } + + Ok(()) + } +} + +impl error::Error for ParseError { + fn source(&self) -> Option<&(dyn error::Error + 'static)> { + None + } +} + +impl From for ParseError { + fn from(err: ParseErrors) -> Self { + ParseError::new(err) + } +} + +impl From for ParseError { + fn from(err: CostErrors) -> Self { + match err { + CostErrors::CostOverflow => ParseError::new(ParseErrors::CostOverflow), + CostErrors::CostBalanceExceeded(a, b) => { + ParseError::new(ParseErrors::CostBalanceExceeded(a, b)) + } + CostErrors::MemoryBalanceExceeded(a, b) => { + ParseError::new(ParseErrors::MemoryBalanceExceeded(a, b)) + } + CostErrors::CostComputationFailed(s) => { + ParseError::new(ParseErrors::CostComputationFailed(s)) + } + CostErrors::CostContractLoadFailure => ParseError::new( + ParseErrors::CostComputationFailed("Failed to load cost contract".into()), + ), + CostErrors::InterpreterFailure | CostErrors::Expect(_) => { + ParseError::new(ParseErrors::InterpreterFailure) + } + CostErrors::ExecutionTimeExpired => ParseError::new(ParseErrors::ExecutionTimeExpired), + } + } +} + +impl DiagnosableError for ParseErrors { + fn message(&self) -> String { + match &self { + ParseErrors::CostOverflow => "Used up cost budget during the parse".into(), + ParseErrors::CostBalanceExceeded(bal, used) => { + format!("Used up cost budget during the parse: {bal} balance, {used} used") + } + ParseErrors::MemoryBalanceExceeded(bal, used) => { + format!("Used up memory budget during the parse: {bal} balance, {used} used") + } + ParseErrors::TooManyExpressions => "Too many expressions".into(), + ParseErrors::FailedCapturingInput => "Failed to capture value from input".into(), + ParseErrors::SeparatorExpected(found) => { + format!("Expected whitespace or a close parens. Found: '{found}'") + } + ParseErrors::SeparatorExpectedAfterColon(found) => { + format!("Whitespace expected after colon (:), Found: '{found}'") + } + ParseErrors::ProgramTooLarge => "Program too large to parse".into(), + ParseErrors::IllegalContractName(contract_name) => { + format!("Illegal contract name: '{contract_name}'") + } + ParseErrors::IllegalVariableName(var_name) => { + format!("Illegal variable name: '{var_name}'") + } + ParseErrors::UnknownQuotedValue(value) => format!("Unknown 'quoted value '{value}'"), + ParseErrors::FailedParsingIntValue(value) => { + format!("Failed to parse int literal '{value}'") + } + ParseErrors::FailedParsingUIntValue(value) => { + format!("Failed to parse uint literal 'u{value}'") + } + ParseErrors::FailedParsingHexValue(value, x) => { + format!("Invalid hex-string literal {value}: {x}") + } + ParseErrors::FailedParsingPrincipal(value) => { + format!("Invalid principal literal: {value}") + } + ParseErrors::FailedParsingBuffer(value) => format!("Invalid buffer literal: {value}"), + ParseErrors::FailedParsingField(value) => format!("Invalid field literal: {value}"), + ParseErrors::FailedParsingRemainder(remainder) => { + format!("Failed to lex input remainder: '{remainder}'") + } + ParseErrors::ClosingParenthesisUnexpected => { + "Tried to close list which isn't open.".into() + } + ParseErrors::ClosingParenthesisExpected => "List expressions (..) left opened.".into(), + ParseErrors::ClosingTupleLiteralUnexpected => { + "Tried to close tuple literal which isn't open.".into() + } + ParseErrors::ClosingTupleLiteralExpected => "Tuple literal {{..}} left opened.".into(), + ParseErrors::ColonSeparatorUnexpected => "Misplaced colon.".into(), + ParseErrors::CommaSeparatorUnexpected => "Misplaced comma.".into(), + ParseErrors::TupleColonExpected(i) => { + format!("Tuple literal construction expects a colon at index {i}") + } + ParseErrors::TupleCommaExpected(i) => { + format!("Tuple literal construction expects a comma at index {i}") + } + ParseErrors::TupleItemExpected(i) => { + format!("Tuple literal construction expects a key or value at index {i}") + } + ParseErrors::CircularReference(function_names) => format!( + "detected interdependent functions ({})", + function_names.join(", ") + ), + ParseErrors::NameAlreadyUsed(name) => { + format!("defining '{name}' conflicts with previous value") + } + ParseErrors::ImportTraitBadSignature => { + "(use-trait ...) expects a trait name and a trait identifier".into() + } + ParseErrors::DefineTraitBadSignature => { + "(define-trait ...) expects a trait name and a trait definition".into() + } + ParseErrors::ImplTraitBadSignature => { + "(impl-trait ...) expects a trait identifier".into() + } + ParseErrors::TraitReferenceNotAllowed => "trait references can not be stored".into(), + ParseErrors::TraitReferenceUnknown(trait_name) => { + format!("use of undeclared trait <{trait_name}>") + } + ParseErrors::ExpressionStackDepthTooDeep => format!( + "AST has too deep of an expression nesting. The maximum stack depth is {MAX_CALL_STACK_DEPTH}" + ), + ParseErrors::VaryExpressionStackDepthTooDeep => format!( + "AST has too deep of an expression nesting. The maximum stack depth is {MAX_CALL_STACK_DEPTH}" + ), + ParseErrors::InvalidCharactersDetected => "invalid characters detected".into(), + ParseErrors::InvalidEscaping => "invalid escaping detected in string".into(), + ParseErrors::CostComputationFailed(s) => format!("Cost computation failed: {s}"), + + // Parser v2 errors + ParseErrors::Lexer(le) => le.message(), + ParseErrors::ContractNameTooLong(name) => { + format!("contract name '{name}' is too long") + } + ParseErrors::ExpectedContractIdentifier => "expected contract identifier".into(), + ParseErrors::ExpectedTraitIdentifier => "expected trait identifier".into(), + ParseErrors::IllegalTraitName(name) => format!("illegal trait name, '{name}'"), + ParseErrors::InvalidPrincipalLiteral => "invalid principal literal".into(), + ParseErrors::InvalidBuffer => "invalid hex-string literal".into(), + ParseErrors::NameTooLong(name) => format!("illegal name (too long), '{name}'"), + ParseErrors::UnexpectedToken(token) => format!("unexpected '{token}'"), + ParseErrors::ExpectedClosing(token) => format!("expected closing '{token}'"), + ParseErrors::TupleColonExpectedv2 => "expected ':' after key in tuple".into(), + ParseErrors::TupleCommaExpectedv2 => { + "expected ',' separating key-value pairs in tuple".into() + } + ParseErrors::TupleValueExpected => "expected value expression for tuple".into(), + ParseErrors::IllegalClarityName(name) => format!("illegal clarity name, '{name}'"), + ParseErrors::IllegalASCIIString(s) => format!("illegal ascii string \"{s}\""), + ParseErrors::IllegalUtf8String(s) => format!("illegal UTF8 string \"{s}\""), + ParseErrors::ExpectedWhitespace => "expected whitespace before expression".into(), + ParseErrors::NoteToMatchThis(token) => format!("to match this '{token}'"), + ParseErrors::UnexpectedParserFailure => "unexpected failure while parsing".to_string(), + ParseErrors::InterpreterFailure => "unexpected failure while parsing".to_string(), + ParseErrors::ExecutionTimeExpired => "max execution time expired".to_string(), + } + } + + fn suggestion(&self) -> Option { + None + } + + fn level(&self) -> Level { + match self { + ParseErrors::NoteToMatchThis(_) => Level::Note, + ParseErrors::Lexer(lexer_error) => lexer_error.level(), + _ => Level::Error, + } + } +} + +pub struct PlacedError { + pub e: ParseErrors, + pub span: Span, +} diff --git a/clarity-serialization/src/errors/cost.rs b/clarity-serialization/src/errors/cost.rs new file mode 100644 index 0000000000..2678d8ade4 --- /dev/null +++ b/clarity-serialization/src/errors/cost.rs @@ -0,0 +1,54 @@ +// Copyright (C) 2025 Stacks Open Internet Foundation +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . +use std::fmt; + +use crate::execution_cost::ExecutionCost; + +#[derive(Debug, PartialEq, Eq)] +pub enum CostErrors { + CostComputationFailed(String), + CostOverflow, + CostBalanceExceeded(ExecutionCost, ExecutionCost), + MemoryBalanceExceeded(u64, u64), + CostContractLoadFailure, + InterpreterFailure, + Expect(String), + ExecutionTimeExpired, +} + +impl fmt::Display for CostErrors { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + CostErrors::CostComputationFailed(s) => write!(f, "Cost computation failed: {s}"), + CostErrors::CostOverflow => write!(f, "Cost overflow"), + CostErrors::CostBalanceExceeded(total, limit) => { + write!(f, "Cost balance exceeded: total {total}, limit {limit}") + } + CostErrors::MemoryBalanceExceeded(used, limit) => { + write!(f, "Memory balance exceeded: used {used}, limit {limit}") + } + CostErrors::CostContractLoadFailure => write!(f, "Failed to load cost contract"), + CostErrors::InterpreterFailure => write!(f, "Interpreter failure"), + CostErrors::Expect(s) => write!(f, "Expectation failed: {s}"), + CostErrors::ExecutionTimeExpired => write!(f, "Execution time expired"), + } + } +} + +impl CostErrors { + pub fn rejectable(&self) -> bool { + matches!(self, CostErrors::InterpreterFailure | CostErrors::Expect(_)) + } +} diff --git a/clarity-serialization/src/errors/lexer.rs b/clarity-serialization/src/errors/lexer.rs new file mode 100644 index 0000000000..27d6a2c3a2 --- /dev/null +++ b/clarity-serialization/src/errors/lexer.rs @@ -0,0 +1,94 @@ +// Copyright (C) 2025 Stacks Open Internet Foundation +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +use crate::diagnostic::{DiagnosableError, Level}; +use crate::representations::Span; + +#[derive(Debug, PartialEq, Clone)] +pub enum LexerError { + InvalidCharInt(char), + InvalidCharUint(char), + InvalidCharBuffer(char), + InvalidCharIdent(char), + InvalidCharTraitIdent(char), + InvalidCharPrincipal(char), + InvalidBufferLength(usize), + UnknownEscapeChar(char), + IllegalCharString(char), + IllegalCharUTF8Encoding(char), + UnterminatedUTF8Encoding, + ExpectedClosing(char), + ExpectedSeparator, + EmptyUTF8Encoding, + InvalidUTF8Encoding, + SingleSemiColon, + UnknownSymbol(char), + NonASCIIChar(char), + NoteToMatchThis(char), + UnsupportedLineEnding, + EditorCRLFMode, +} + +#[derive(Debug)] +pub struct PlacedError { + pub e: LexerError, + pub span: Span, +} + +impl DiagnosableError for LexerError { + fn message(&self) -> String { + use self::LexerError::*; + match self { + InvalidCharInt(c) => format!("invalid character, '{c}', in int literal"), + InvalidCharUint(c) => format!("invalid character, '{c}', in uint literal"), + InvalidCharBuffer(c) => format!("invalid character, '{c}', in buffer"), + InvalidCharIdent(c) => format!("invalid character, '{c}', in identifier"), + InvalidCharTraitIdent(c) => format!("invalid character, '{c}', in trait identifier"), + InvalidCharPrincipal(c) => format!("invalid character, '{c}', in principal literal"), + IllegalCharString(c) => format!("invalid character, '{c}', in string literal"), + IllegalCharUTF8Encoding(c) => format!("invalid character, '{c}', in UTF8 encoding"), + InvalidUTF8Encoding => "invalid UTF8 encoding".to_string(), + EmptyUTF8Encoding => "empty UTF8 encoding".to_string(), + UnterminatedUTF8Encoding => "unterminated UTF8 encoding, missing '}'".to_string(), + InvalidBufferLength(size) => format!("invalid buffer length, {size}"), + UnknownEscapeChar(c) => format!("unknown escape character, '{c}'"), + ExpectedClosing(c) => format!("expected closing '{c}'"), + ExpectedSeparator => "expected separator".to_string(), + SingleSemiColon => "unexpected single ';' (comments begin with \";;\"".to_string(), + UnknownSymbol(c) => format!("unknown symbol, '{c}'"), + NonASCIIChar(c) => format!("illegal non-ASCII character, '{c}'"), + NoteToMatchThis(c) => format!("to match this '{c}'"), + UnsupportedLineEnding => { + "unsupported line-ending '\\r', only '\\n' is supported".to_string() + } + EditorCRLFMode => { + "you may need to change your editor from CRLF mode to LF mode".to_string() + } + } + } + + fn suggestion(&self) -> Option { + None + } + + fn level(&self) -> Level { + use self::LexerError::*; + match self { + NoteToMatchThis(_) => Level::Note, + EditorCRLFMode => Level::Note, + _ => Level::Error, + } + } +} diff --git a/clarity-serialization/src/errors/mod.rs b/clarity-serialization/src/errors/mod.rs new file mode 100644 index 0000000000..e65fe47e62 --- /dev/null +++ b/clarity-serialization/src/errors/mod.rs @@ -0,0 +1,256 @@ +// Copyright (C) 2025 Stacks Open Internet Foundation +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +pub mod analysis; +pub mod ast; +pub mod cost; +pub mod lexer; + +use std::{error, fmt}; + +pub use analysis::{CheckError, CheckErrors, CheckResult}; +pub use ast::{ParseError, ParseErrors, ParseResult}; +pub use cost::CostErrors; +pub use lexer::LexerError; +#[cfg(feature = "rusqlite")] +use rusqlite::Error as SqliteError; +use serde_json::Error as SerdeJSONErr; +use stacks_common::types::chainstate::BlockHeaderHash; + +use crate::types::{FunctionIdentifier, Value}; + +pub type StackTrace = Vec; + +#[derive(Debug)] +pub struct IncomparableError { + pub err: T, +} + +#[derive(Debug)] +pub enum Error { + /// UncheckedErrors are errors that *should* be caught by the + /// TypeChecker and other check passes. Test executions may + /// trigger these errors. + Unchecked(CheckErrors), + Interpreter(InterpreterError), + Runtime(RuntimeErrorType, Option), + ShortReturn(ShortReturnType), +} + +/// InterpreterErrors are errors that *should never* occur. +/// Test executions may trigger these errors. +#[derive(Debug, PartialEq)] +pub enum InterpreterError { + BadSender(Value), + BadSymbolicRepresentation(String), + InterpreterError(String), + UninitializedPersistedVariable, + FailedToConstructAssetTable, + FailedToConstructEventBatch, + #[cfg(feature = "rusqlite")] + SqliteError(IncomparableError), + BadFileName, + FailedToCreateDataDirectory, + MarfFailure(String), + FailureConstructingTupleWithType, + FailureConstructingListWithType, + InsufficientBalance, + CostContractLoadFailure, + DBError(String), + Expect(String), +} + +/// RuntimeErrors are errors that smart contracts are expected +/// to be able to trigger during execution (e.g., arithmetic errors) +#[derive(Debug, PartialEq)] +pub enum RuntimeErrorType { + Arithmetic(String), + ArithmeticOverflow, + ArithmeticUnderflow, + SupplyOverflow(u128, u128), + SupplyUnderflow(u128, u128), + DivisionByZero, + // error in parsing types + ParseError(String), + // error in parsing the AST + ASTError(ParseError), + MaxStackDepthReached, + MaxContextDepthReached, + ListDimensionTooHigh, + BadTypeConstruction, + ValueTooLarge, + BadBlockHeight(String), + TransferNonPositiveAmount, + NoSuchToken, + NotImplemented, + NoCallerInContext, + NoSenderInContext, + NonPositiveTokenSupply, + JSONParseError(IncomparableError), + AttemptToFetchInTransientContext, + BadNameValue(&'static str, String), + UnknownBlockHeaderHash(BlockHeaderHash), + BadBlockHash(Vec), + UnwrapFailure, + DefunctPoxContract, + PoxAlreadyLocked, + MetadataAlreadySet, +} + +#[derive(Debug, PartialEq)] +pub enum ShortReturnType { + ExpectedValue(Value), + AssertionFailed(Value), +} + +pub type InterpreterResult = Result; + +impl PartialEq> for IncomparableError { + fn eq(&self, _other: &IncomparableError) -> bool { + false + } +} + +impl PartialEq for Error { + fn eq(&self, other: &Error) -> bool { + match (self, other) { + (Error::Runtime(x, _), Error::Runtime(y, _)) => x == y, + (Error::Unchecked(x), Error::Unchecked(y)) => x == y, + (Error::ShortReturn(x), Error::ShortReturn(y)) => x == y, + (Error::Interpreter(x), Error::Interpreter(y)) => x == y, + _ => false, + } + } +} + +impl fmt::Display for Error { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + Error::Runtime(err, stack) => { + write!(f, "{err}")?; + if let Some(stack_trace) = stack { + writeln!(f, "\n Stack Trace: ")?; + for item in stack_trace.iter() { + writeln!(f, "{item}")?; + } + } + Ok(()) + } + _ => write!(f, "{self:?}"), + } + } +} + +impl fmt::Display for RuntimeErrorType { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{self:?}") + } +} + +impl error::Error for Error { + fn source(&self) -> Option<&(dyn error::Error + 'static)> { + None + } +} + +impl error::Error for RuntimeErrorType { + fn source(&self) -> Option<&(dyn error::Error + 'static)> { + None + } +} + +impl From for Error { + fn from(err: ParseError) -> Self { + match &err.err { + ParseErrors::InterpreterFailure => Error::from(InterpreterError::Expect( + "Unexpected interpreter failure during parsing".into(), + )), + _ => Error::from(RuntimeErrorType::ASTError(err)), + } + } +} + +impl From for Error { + fn from(err: CostErrors) -> Self { + match err { + CostErrors::InterpreterFailure => Error::from(InterpreterError::Expect( + "Interpreter failure during cost calculation".into(), + )), + CostErrors::Expect(s) => Error::from(InterpreterError::Expect(format!( + "Interpreter failure during cost calculation: {s}" + ))), + other_err => Error::from(CheckErrors::from(other_err)), + } + } +} + +impl From for Error { + fn from(err: RuntimeErrorType) -> Self { + Error::Runtime(err, None) + } +} + +impl From for Error { + fn from(err: CheckErrors) -> Self { + Error::Unchecked(err) + } +} + +impl From for Error { + fn from(err: ShortReturnType) -> Self { + Error::ShortReturn(err) + } +} + +impl From for Error { + fn from(err: InterpreterError) -> Self { + Error::Interpreter(err) + } +} + +#[cfg(any(test, feature = "testing"))] +impl From for () { + fn from(_err: Error) -> Self {} +} + +impl From for Value { + fn from(val: ShortReturnType) -> Self { + match val { + ShortReturnType::ExpectedValue(v) => v, + ShortReturnType::AssertionFailed(v) => v, + } + } +} + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn equality() { + assert_eq!( + Error::ShortReturn(ShortReturnType::ExpectedValue(Value::Bool(true))), + Error::ShortReturn(ShortReturnType::ExpectedValue(Value::Bool(true))) + ); + assert_eq!( + Error::Interpreter(InterpreterError::InterpreterError("".to_string())), + Error::Interpreter(InterpreterError::InterpreterError("".to_string())) + ); + assert!( + Error::ShortReturn(ShortReturnType::ExpectedValue(Value::Bool(true))) + != Error::Interpreter(InterpreterError::InterpreterError("".to_string())) + ); + } +} diff --git a/clarity-serialization/src/execution_cost.rs b/clarity-serialization/src/execution_cost.rs new file mode 100644 index 0000000000..d261eb00c1 --- /dev/null +++ b/clarity-serialization/src/execution_cost.rs @@ -0,0 +1,217 @@ +// Copyright (C) 2025 Stacks Open Internet Foundation +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . +use std::{cmp, fmt}; + +#[cfg(feature = "rusqlite")] +use rusqlite::{ + ToSql, + types::{FromSql, FromSqlResult, ToSqlOutput, ValueRef}, +}; + +use crate::errors::CostErrors; + +#[derive(Debug, Deserialize, Serialize, Clone, PartialEq, Eq, Hash)] +pub struct ExecutionCost { + pub write_length: u64, + pub write_count: u64, + pub read_length: u64, + pub read_count: u64, + pub runtime: u64, +} + +impl fmt::Display for ExecutionCost { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!( + f, + "{{\"runtime\": {}, \"write_len\": {}, \"write_cnt\": {}, \"read_len\": {}, \"read_cnt\": {}}}", + self.runtime, self.write_length, self.write_count, self.read_length, self.read_count + ) + } +} + +impl ExecutionCost { + pub const ZERO: Self = Self { + runtime: 0, + write_length: 0, + read_count: 0, + write_count: 0, + read_length: 0, + }; + + /// Returns the percentage of self consumed in `numerator`'s largest proportion dimension. + pub fn proportion_largest_dimension(&self, numerator: &ExecutionCost) -> u64 { + // max() should always return because there are > 0 elements + #[allow(clippy::expect_used)] + *[ + numerator.runtime / cmp::max(1, self.runtime / 100), + numerator.write_length / cmp::max(1, self.write_length / 100), + numerator.write_count / cmp::max(1, self.write_count / 100), + numerator.read_length / cmp::max(1, self.read_length / 100), + numerator.read_count / cmp::max(1, self.read_count / 100), + ] + .iter() + .max() + .expect("BUG: should find maximum") + } + + /// Returns the dot product of this execution cost with `resolution`/block_limit + /// This provides a scalar value representing the cumulative consumption + /// of `self` in the provided block_limit. + pub fn proportion_dot_product(&self, block_limit: &ExecutionCost, resolution: u64) -> u64 { + [ + // each field here is calculating `r * self / limit`, using f64 + // use MAX(1, block_limit) to guard against divide by zero + // use MIN(1, self/block_limit) to guard against self > block_limit + resolution as f64 + * 1_f64.min(self.runtime as f64 / 1_f64.max(block_limit.runtime as f64)), + resolution as f64 + * 1_f64.min(self.read_count as f64 / 1_f64.max(block_limit.read_count as f64)), + resolution as f64 + * 1_f64.min(self.write_count as f64 / 1_f64.max(block_limit.write_count as f64)), + resolution as f64 + * 1_f64.min(self.read_length as f64 / 1_f64.max(block_limit.read_length as f64)), + resolution as f64 + * 1_f64.min(self.write_length as f64 / 1_f64.max(block_limit.write_length as f64)), + ] + .iter() + .fold(0, |acc, dim| acc.saturating_add(cmp::max(*dim as u64, 1))) + } + + pub fn max_value() -> ExecutionCost { + Self { + runtime: u64::MAX, + write_length: u64::MAX, + read_count: u64::MAX, + write_count: u64::MAX, + read_length: u64::MAX, + } + } + + pub fn runtime(runtime: u64) -> ExecutionCost { + Self { + runtime, + write_length: 0, + read_count: 0, + write_count: 0, + read_length: 0, + } + } + + pub fn add_runtime(&mut self, runtime: u64) -> Result<(), CostErrors> { + self.runtime = self.runtime.cost_overflow_add(runtime)?; + Ok(()) + } + + pub fn add(&mut self, other: &ExecutionCost) -> Result<(), CostErrors> { + self.runtime = self.runtime.cost_overflow_add(other.runtime)?; + self.read_count = self.read_count.cost_overflow_add(other.read_count)?; + self.read_length = self.read_length.cost_overflow_add(other.read_length)?; + self.write_length = self.write_length.cost_overflow_add(other.write_length)?; + self.write_count = self.write_count.cost_overflow_add(other.write_count)?; + Ok(()) + } + + pub fn sub(&mut self, other: &ExecutionCost) -> Result<(), CostErrors> { + self.runtime = self.runtime.cost_overflow_sub(other.runtime)?; + self.read_count = self.read_count.cost_overflow_sub(other.read_count)?; + self.read_length = self.read_length.cost_overflow_sub(other.read_length)?; + self.write_length = self.write_length.cost_overflow_sub(other.write_length)?; + self.write_count = self.write_count.cost_overflow_sub(other.write_count)?; + Ok(()) + } + + pub fn multiply(&mut self, times: u64) -> Result<(), CostErrors> { + self.runtime = self.runtime.cost_overflow_mul(times)?; + self.read_count = self.read_count.cost_overflow_mul(times)?; + self.read_length = self.read_length.cost_overflow_mul(times)?; + self.write_length = self.write_length.cost_overflow_mul(times)?; + self.write_count = self.write_count.cost_overflow_mul(times)?; + Ok(()) + } + + pub fn divide(&mut self, divisor: u64) -> Result<(), CostErrors> { + self.runtime = self.runtime.cost_overflow_div(divisor)?; + self.read_count = self.read_count.cost_overflow_div(divisor)?; + self.read_length = self.read_length.cost_overflow_div(divisor)?; + self.write_length = self.write_length.cost_overflow_div(divisor)?; + self.write_count = self.write_count.cost_overflow_div(divisor)?; + Ok(()) + } + + /// Returns whether or not this cost exceeds any dimension of the + /// other cost. + pub fn exceeds(&self, other: &ExecutionCost) -> bool { + self.runtime > other.runtime + || self.write_length > other.write_length + || self.write_count > other.write_count + || self.read_count > other.read_count + || self.read_length > other.read_length + } + + pub fn max_cost(first: ExecutionCost, second: ExecutionCost) -> ExecutionCost { + Self { + runtime: first.runtime.max(second.runtime), + write_length: first.write_length.max(second.write_length), + write_count: first.write_count.max(second.write_count), + read_count: first.read_count.max(second.read_count), + read_length: first.read_length.max(second.read_length), + } + } + + pub fn is_zero(&self) -> bool { + *self == Self::ZERO + } +} + +pub trait CostOverflowingMath { + fn cost_overflow_mul(self, other: T) -> Result; + fn cost_overflow_add(self, other: T) -> Result; + fn cost_overflow_sub(self, other: T) -> Result; + fn cost_overflow_div(self, other: T) -> Result; +} + +impl CostOverflowingMath for u64 { + fn cost_overflow_mul(self, other: u64) -> Result { + self.checked_mul(other).ok_or(CostErrors::CostOverflow) + } + fn cost_overflow_add(self, other: u64) -> Result { + self.checked_add(other).ok_or(CostErrors::CostOverflow) + } + fn cost_overflow_sub(self, other: u64) -> Result { + self.checked_sub(other).ok_or(CostErrors::CostOverflow) + } + fn cost_overflow_div(self, other: u64) -> Result { + self.checked_div(other).ok_or(CostErrors::CostOverflow) + } +} + +#[cfg(feature = "rusqlite")] +impl ToSql for ExecutionCost { + fn to_sql(&self) -> rusqlite::Result> { + let val = serde_json::to_string(self) + .map_err(|e| rusqlite::Error::ToSqlConversionFailure(Box::new(e)))?; + Ok(ToSqlOutput::from(val)) + } +} + +#[cfg(feature = "rusqlite")] +impl FromSql for ExecutionCost { + fn column_result(value: ValueRef) -> FromSqlResult { + let str_val = String::column_result(value)?; + let parsed = serde_json::from_str(&str_val) + .map_err(|e| rusqlite::types::FromSqlError::Other(Box::new(e)))?; + Ok(parsed) + } +} diff --git a/clarity-serialization/src/lib.rs b/clarity-serialization/src/lib.rs index 054d8d0a1b..de51f53915 100644 --- a/clarity-serialization/src/lib.rs +++ b/clarity-serialization/src/lib.rs @@ -24,14 +24,19 @@ pub use stacks_common::{ impl_byte_array_serde, types as stacks_types, util, }; +pub mod diagnostic; pub mod errors; +pub mod execution_cost; pub mod representations; +pub mod token; pub mod types; -pub use errors::CodecError; +pub use errors::Error; pub use representations::{ClarityName, ContractName}; pub use types::Value; +pub const MAX_CALL_STACK_DEPTH: usize = 64; + #[cfg(test)] pub mod tests; diff --git a/clarity-serialization/src/representations.rs b/clarity-serialization/src/representations.rs index 74bffc2a53..f68d35ee2e 100644 --- a/clarity-serialization/src/representations.rs +++ b/clarity-serialization/src/representations.rs @@ -22,7 +22,9 @@ use lazy_static::lazy_static; use regex::Regex; use stacks_common::codec::{Error as codec_error, StacksMessageCodec, read_next, write_next}; -use crate::errors::CodecError; +use crate::Value; +use crate::errors::RuntimeErrorType; +use crate::types::TraitIdentifier; pub const CONTRACT_MIN_NAME_LENGTH: usize = 1; pub const CONTRACT_MAX_NAME_LENGTH: usize = 40; @@ -64,8 +66,8 @@ guarded_string!( "ClarityName", CLARITY_NAME_REGEX, MAX_STRING_LEN, - CodecError, - CodecError::InvalidClarityName + RuntimeErrorType, + RuntimeErrorType::BadNameValue ); guarded_string!( @@ -73,8 +75,8 @@ guarded_string!( "ContractName", CONTRACT_NAME_REGEX, MAX_STRING_LEN, - CodecError, - CodecError::InvalidContractName + RuntimeErrorType, + RuntimeErrorType::BadNameValue ); impl StacksMessageCodec for ClarityName { @@ -160,3 +162,513 @@ impl StacksMessageCodec for ContractName { Ok(name) } } + +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)] +pub enum PreSymbolicExpressionType { + AtomValue(Value), + Atom(ClarityName), + List(Vec), + Tuple(Vec), + SugaredContractIdentifier(ContractName), + SugaredFieldIdentifier(ContractName, ClarityName), + FieldIdentifier(TraitIdentifier), + TraitReference(ClarityName), + Comment(String), + Placeholder(String), +} + +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)] +pub struct PreSymbolicExpression { + pub pre_expr: PreSymbolicExpressionType, + pub id: u64, + + #[cfg(feature = "developer-mode")] + pub span: Span, +} + +pub trait SymbolicExpressionCommon { + type S: SymbolicExpressionCommon; + fn set_id(&mut self, id: u64); + fn match_list_mut(&mut self) -> Option<&mut [Self::S]>; +} + +impl SymbolicExpressionCommon for PreSymbolicExpression { + type S = PreSymbolicExpression; + fn set_id(&mut self, id: u64) { + self.id = id; + } + fn match_list_mut(&mut self) -> Option<&mut [PreSymbolicExpression]> { + if let PreSymbolicExpressionType::List(ref mut list) = self.pre_expr { + Some(list) + } else { + None + } + } +} + +impl SymbolicExpressionCommon for SymbolicExpression { + type S = SymbolicExpression; + fn set_id(&mut self, id: u64) { + self.id = id; + } + fn match_list_mut(&mut self) -> Option<&mut [SymbolicExpression]> { + if let SymbolicExpressionType::List(ref mut list) = self.expr { + Some(list) + } else { + None + } + } +} + +impl PreSymbolicExpression { + #[cfg(feature = "developer-mode")] + fn cons() -> PreSymbolicExpression { + PreSymbolicExpression { + id: 0, + span: Span::zero(), + pre_expr: PreSymbolicExpressionType::AtomValue(Value::Bool(false)), + } + } + #[cfg(not(feature = "developer-mode"))] + fn cons() -> PreSymbolicExpression { + PreSymbolicExpression { + id: 0, + pre_expr: PreSymbolicExpressionType::AtomValue(Value::Bool(false)), + } + } + + #[cfg(feature = "developer-mode")] + pub fn set_span(&mut self, start_line: u32, start_column: u32, end_line: u32, end_column: u32) { + self.span = Span { + start_line, + start_column, + end_line, + end_column, + } + } + + #[cfg(not(feature = "developer-mode"))] + pub fn set_span( + &mut self, + _start_line: u32, + _start_column: u32, + _end_line: u32, + _end_column: u32, + ) { + } + + #[cfg(feature = "developer-mode")] + pub fn copy_span(&mut self, src: &Span) { + self.span = src.clone(); + } + + #[cfg(not(feature = "developer-mode"))] + pub fn copy_span(&mut self, _src: &Span) {} + + #[cfg(feature = "developer-mode")] + pub fn span(&self) -> &Span { + &self.span + } + + #[cfg(not(feature = "developer-mode"))] + pub fn span(&self) -> &Span { + &Span::ZERO + } + + pub fn sugared_contract_identifier(val: ContractName) -> PreSymbolicExpression { + PreSymbolicExpression { + pre_expr: PreSymbolicExpressionType::SugaredContractIdentifier(val), + ..PreSymbolicExpression::cons() + } + } + + pub fn sugared_field_identifier( + contract_name: ContractName, + name: ClarityName, + ) -> PreSymbolicExpression { + PreSymbolicExpression { + pre_expr: PreSymbolicExpressionType::SugaredFieldIdentifier(contract_name, name), + ..PreSymbolicExpression::cons() + } + } + + pub fn atom_value(val: Value) -> PreSymbolicExpression { + PreSymbolicExpression { + pre_expr: PreSymbolicExpressionType::AtomValue(val), + ..PreSymbolicExpression::cons() + } + } + + pub fn atom(val: ClarityName) -> PreSymbolicExpression { + PreSymbolicExpression { + pre_expr: PreSymbolicExpressionType::Atom(val), + ..PreSymbolicExpression::cons() + } + } + + pub fn trait_reference(val: ClarityName) -> PreSymbolicExpression { + PreSymbolicExpression { + pre_expr: PreSymbolicExpressionType::TraitReference(val), + ..PreSymbolicExpression::cons() + } + } + + pub fn field_identifier(val: TraitIdentifier) -> PreSymbolicExpression { + PreSymbolicExpression { + pre_expr: PreSymbolicExpressionType::FieldIdentifier(val), + ..PreSymbolicExpression::cons() + } + } + + pub fn list(val: Vec) -> PreSymbolicExpression { + PreSymbolicExpression { + pre_expr: PreSymbolicExpressionType::List(val), + ..PreSymbolicExpression::cons() + } + } + + pub fn tuple(val: Vec) -> PreSymbolicExpression { + PreSymbolicExpression { + pre_expr: PreSymbolicExpressionType::Tuple(val), + ..PreSymbolicExpression::cons() + } + } + + pub fn placeholder(s: String) -> PreSymbolicExpression { + PreSymbolicExpression { + pre_expr: PreSymbolicExpressionType::Placeholder(s), + ..PreSymbolicExpression::cons() + } + } + + pub fn comment(comment: String) -> PreSymbolicExpression { + PreSymbolicExpression { + pre_expr: PreSymbolicExpressionType::Comment(comment), + ..PreSymbolicExpression::cons() + } + } + + pub fn match_trait_reference(&self) -> Option<&ClarityName> { + if let PreSymbolicExpressionType::TraitReference(ref value) = self.pre_expr { + Some(value) + } else { + None + } + } + + pub fn match_atom_value(&self) -> Option<&Value> { + if let PreSymbolicExpressionType::AtomValue(ref value) = self.pre_expr { + Some(value) + } else { + None + } + } + + pub fn match_atom(&self) -> Option<&ClarityName> { + if let PreSymbolicExpressionType::Atom(ref value) = self.pre_expr { + Some(value) + } else { + None + } + } + + pub fn match_list(&self) -> Option<&[PreSymbolicExpression]> { + if let PreSymbolicExpressionType::List(ref list) = self.pre_expr { + Some(list) + } else { + None + } + } + + pub fn match_field_identifier(&self) -> Option<&TraitIdentifier> { + if let PreSymbolicExpressionType::FieldIdentifier(ref value) = self.pre_expr { + Some(value) + } else { + None + } + } + + pub fn match_placeholder(&self) -> Option<&str> { + if let PreSymbolicExpressionType::Placeholder(ref s) = self.pre_expr { + Some(s.as_str()) + } else { + None + } + } + + pub fn match_comment(&self) -> Option<&str> { + if let PreSymbolicExpressionType::Comment(ref s) = self.pre_expr { + Some(s.as_str()) + } else { + None + } + } +} + +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)] +pub enum SymbolicExpressionType { + AtomValue(Value), + Atom(ClarityName), + List(Vec), + LiteralValue(Value), + Field(TraitIdentifier), + TraitReference(ClarityName, TraitDefinition), +} + +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)] +pub enum TraitDefinition { + Defined(TraitIdentifier), + Imported(TraitIdentifier), +} + +pub fn depth_traverse(expr: &SymbolicExpression, mut visit: F) -> Result, E> +where + F: FnMut(&SymbolicExpression) -> Result, +{ + let mut stack = vec![]; + let mut last = None; + stack.push(expr); + while let Some(current) = stack.pop() { + last = Some(visit(current)?); + if let Some(list) = current.match_list() { + for item in list.iter() { + stack.push(item); + } + } + } + + Ok(last) +} + +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)] +pub struct SymbolicExpression { + pub expr: SymbolicExpressionType, + // this id field is used by compiler passes to store information in + // maps. + // first pass -> fill out unique ids + // ...typing passes -> store information in hashmap according to id. + // + // this is a fairly standard technique in compiler passes + pub id: u64, + + #[cfg(feature = "developer-mode")] + #[serde(default)] + pub span: Span, + + #[cfg(feature = "developer-mode")] + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub pre_comments: Vec<(String, Span)>, + #[cfg(feature = "developer-mode")] + #[serde(default, skip_serializing_if = "Option::is_none")] + pub end_line_comment: Option, + #[cfg(feature = "developer-mode")] + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub post_comments: Vec<(String, Span)>, +} + +impl SymbolicExpression { + #[cfg(feature = "developer-mode")] + fn cons() -> SymbolicExpression { + SymbolicExpression { + id: 0, + expr: SymbolicExpressionType::AtomValue(Value::Bool(false)), + span: Span::zero(), + pre_comments: vec![], + end_line_comment: None, + post_comments: vec![], + } + } + #[cfg(not(feature = "developer-mode"))] + fn cons() -> SymbolicExpression { + SymbolicExpression { + id: 0, + expr: SymbolicExpressionType::AtomValue(Value::Bool(false)), + } + } + + #[cfg(feature = "developer-mode")] + pub fn set_span(&mut self, start_line: u32, start_column: u32, end_line: u32, end_column: u32) { + self.span = Span { + start_line, + start_column, + end_line, + end_column, + } + } + + #[cfg(not(feature = "developer-mode"))] + pub fn set_span( + &mut self, + _start_line: u32, + _start_column: u32, + _end_line: u32, + _end_column: u32, + ) { + } + + #[cfg(feature = "developer-mode")] + pub fn copy_span(&mut self, src: &Span) { + self.span = src.clone(); + } + + #[cfg(not(feature = "developer-mode"))] + pub fn copy_span(&mut self, _src: &Span) {} + + #[cfg(feature = "developer-mode")] + pub fn span(&self) -> &Span { + &self.span + } + + #[cfg(not(feature = "developer-mode"))] + pub fn span(&self) -> &Span { + &Span::ZERO + } + + pub fn atom_value(val: Value) -> SymbolicExpression { + SymbolicExpression { + expr: SymbolicExpressionType::AtomValue(val), + ..SymbolicExpression::cons() + } + } + + pub fn atom(val: ClarityName) -> SymbolicExpression { + SymbolicExpression { + expr: SymbolicExpressionType::Atom(val), + ..SymbolicExpression::cons() + } + } + + pub fn literal_value(val: Value) -> SymbolicExpression { + SymbolicExpression { + expr: SymbolicExpressionType::LiteralValue(val), + ..SymbolicExpression::cons() + } + } + + pub fn list(val: Vec) -> SymbolicExpression { + SymbolicExpression { + expr: SymbolicExpressionType::List(val), + ..SymbolicExpression::cons() + } + } + + pub fn trait_reference( + val: ClarityName, + trait_definition: TraitDefinition, + ) -> SymbolicExpression { + SymbolicExpression { + expr: SymbolicExpressionType::TraitReference(val, trait_definition), + ..SymbolicExpression::cons() + } + } + + pub fn field(val: TraitIdentifier) -> SymbolicExpression { + SymbolicExpression { + expr: SymbolicExpressionType::Field(val), + ..SymbolicExpression::cons() + } + } + + // These match functions are used to simplify calling code + // areas a lot. There is a frequent code pattern where + // a block _expects_ specific symbolic expressions, leading + // to a lot of very verbose `if let x = {` expressions. + + pub fn match_list(&self) -> Option<&[SymbolicExpression]> { + if let SymbolicExpressionType::List(ref list) = self.expr { + Some(list) + } else { + None + } + } + + pub fn match_atom(&self) -> Option<&ClarityName> { + if let SymbolicExpressionType::Atom(ref value) = self.expr { + Some(value) + } else { + None + } + } + + pub fn match_atom_value(&self) -> Option<&Value> { + if let SymbolicExpressionType::AtomValue(ref value) = self.expr { + Some(value) + } else { + None + } + } + + pub fn match_literal_value(&self) -> Option<&Value> { + if let SymbolicExpressionType::LiteralValue(ref value) = self.expr { + Some(value) + } else { + None + } + } + + pub fn match_trait_reference(&self) -> Option<&ClarityName> { + if let SymbolicExpressionType::TraitReference(ref value, _) = self.expr { + Some(value) + } else { + None + } + } + + pub fn match_field(&self) -> Option<&TraitIdentifier> { + if let SymbolicExpressionType::Field(ref value) = self.expr { + Some(value) + } else { + None + } + } +} + +impl fmt::Display for SymbolicExpression { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self.expr { + SymbolicExpressionType::List(ref list) => { + write!(f, "(")?; + for item in list.iter() { + write!(f, " {item}")?; + } + write!(f, " )")?; + } + SymbolicExpressionType::Atom(ref value) => { + write!(f, "{}", &**value)?; + } + SymbolicExpressionType::AtomValue(ref value) + | SymbolicExpressionType::LiteralValue(ref value) => { + write!(f, "{value}")?; + } + SymbolicExpressionType::TraitReference(ref value, _) => { + write!(f, "<{}>", &**value)?; + } + SymbolicExpressionType::Field(ref value) => { + write!(f, "<{value}>")?; + } + }; + + Ok(()) + } +} + +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] +pub struct Span { + pub start_line: u32, + pub start_column: u32, + pub end_line: u32, + pub end_column: u32, +} + +impl Span { + pub const ZERO: Span = Span { + start_line: 0, + start_column: 0, + end_line: 0, + end_column: 0, + }; + + pub fn zero() -> Self { + Self::default() + } +} diff --git a/clarity-serialization/src/tests/representations.rs b/clarity-serialization/src/tests/representations.rs index b4e2219d4f..ac395809c7 100644 --- a/clarity-serialization/src/tests/representations.rs +++ b/clarity-serialization/src/tests/representations.rs @@ -15,7 +15,7 @@ use rstest::rstest; -use crate::errors::CodecError; +use crate::errors::RuntimeErrorType; use crate::representations::{ CONTRACT_MAX_NAME_LENGTH, CONTRACT_MIN_NAME_LENGTH, ClarityName, ContractName, MAX_STRING_LEN, }; @@ -73,7 +73,7 @@ fn test_clarity_name_invalid(#[case] name: &str) { assert!(result.is_err()); assert!(matches!( result.unwrap_err(), - CodecError::InvalidClarityName(_, _) + RuntimeErrorType::BadNameValue(_, _) )); } @@ -99,7 +99,7 @@ fn test_clarity_name_serialization(#[case] name: &str) { // the first byte is the length of the buffer. #[rstest] #[case::invalid_utf8(vec![4, 0xFF, 0xFE, 0xFD, 0xFC], "Failed to parse Clarity name: could not contruct from utf8")] -#[case::invalid_name(vec![2, b'2', b'i'], "Failed to parse Clarity name: InvalidClarityName(\"ClarityName\", \"2i\")")] // starts with number +#[case::invalid_name(vec![2, b'2', b'i'], "Failed to parse Clarity name: BadNameValue(\"ClarityName\", \"2i\")")] // starts with number #[case::too_long(vec![MAX_STRING_LEN + 1], "Failed to deserialize clarity name: too long")] #[case::wrong_length(vec![3, b'a'], "failed to fill whole buffer")] fn test_clarity_name_deserialization_errors(#[case] buffer: Vec, #[case] error_message: &str) { @@ -157,7 +157,7 @@ fn test_contract_name_invalid(#[case] name: &str) { assert!(result.is_err()); assert!(matches!( result.unwrap_err(), - CodecError::InvalidContractName(_, _) + RuntimeErrorType::BadNameValue(_, _) )); } @@ -201,7 +201,7 @@ fn test_contract_name_serialization_too_long() { // the first byte is the length of the buffer. #[rstest] #[case::invalid_utf8(vec![4, 0xFF, 0xFE, 0xFD, 0xFC], "Failed to parse Contract name: could not construct from utf8")] -#[case::invalid_name(vec![2, b'2', b'i'], "Failed to parse Contract name: InvalidContractName(\"ContractName\", \"2i\")")] // starts with number +#[case::invalid_name(vec![2, b'2', b'i'], "Failed to parse Contract name: BadNameValue(\"ContractName\", \"2i\")")] // starts with number #[case::too_long(vec![MAX_STRING_LEN + 1], &format!("Failed to deserialize contract name: too short or too long: {}", MAX_STRING_LEN + 1))] #[case::wrong_length(vec![3, b'a'], "failed to fill whole buffer")] fn test_contract_name_deserialization_errors(#[case] buffer: Vec, #[case] error_message: &str) { diff --git a/clarity-serialization/src/tests/types/mod.rs b/clarity-serialization/src/tests/types/mod.rs index 40f6aec738..8dd540986b 100644 --- a/clarity-serialization/src/tests/types/mod.rs +++ b/clarity-serialization/src/tests/types/mod.rs @@ -17,7 +17,7 @@ mod signatures; use stacks_common::types::StacksEpochId; -use crate::CodecError; +use crate::errors::{CheckErrors, InterpreterError}; use crate::types::{ BuffData, ListTypeData, MAX_VALUE_SIZE, PrincipalData, SequenceData, TupleData, TypeSignature, Value, @@ -25,42 +25,42 @@ use crate::types::{ #[test] fn test_constructors() { - assert!(matches!( + assert_eq!( Value::list_with_type( &StacksEpochId::latest(), vec![Value::Int(5), Value::Int(2)], ListTypeData::new_list(TypeSignature::BoolType, 3).unwrap() ), - Err(CodecError::FailureConstructingListWithType) - )); - assert!(matches!( + Err(InterpreterError::FailureConstructingListWithType.into()) + ); + assert_eq!( ListTypeData::new_list(TypeSignature::IntType, MAX_VALUE_SIZE), - Err(CodecError::ValueTooLarge) - )); + Err(CheckErrors::ValueTooLarge) + ); - assert!(matches!( + assert_eq!( Value::buff_from(vec![0; (MAX_VALUE_SIZE + 1) as usize]), - Err(CodecError::ValueTooLarge) - )); + Err(CheckErrors::ValueTooLarge.into()) + ); // Test that wrappers (okay, error, some) // correctly error when _they_ cause the value size // to exceed the max value size (note, the buffer constructor // isn't causing the error). - assert!(matches!( + assert_eq!( Value::okay(Value::buff_from(vec![0; (MAX_VALUE_SIZE) as usize]).unwrap()), - Err(CodecError::ValueTooLarge) - )); + Err(CheckErrors::ValueTooLarge.into()) + ); - assert!(matches!( + assert_eq!( Value::error(Value::buff_from(vec![0; (MAX_VALUE_SIZE) as usize]).unwrap()), - Err(CodecError::ValueTooLarge) - )); + Err(CheckErrors::ValueTooLarge.into()) + ); - assert!(matches!( + assert_eq!( Value::some(Value::buff_from(vec![0; (MAX_VALUE_SIZE) as usize]).unwrap()), - Err(CodecError::ValueTooLarge) - )); + Err(CheckErrors::ValueTooLarge.into()) + ); // Test that the depth limit is correctly enforced: // for tuples, lists, somes, okays, errors. @@ -81,27 +81,27 @@ fn test_constructors() { )?)?)?)?) }; let inner_value = cons().unwrap(); - assert!(matches!( + assert_eq!( TupleData::from_data(vec![("a".into(), inner_value.clone())]), - Err(CodecError::TypeSignatureTooDeep) - )); + Err(CheckErrors::TypeSignatureTooDeep.into()) + ); - assert!(matches!( + assert_eq!( Value::list_from(vec![inner_value.clone()]), - Err(CodecError::TypeSignatureTooDeep) - )); - assert!(matches!( + Err(CheckErrors::TypeSignatureTooDeep.into()) + ); + assert_eq!( Value::okay(inner_value.clone()), - Err(CodecError::TypeSignatureTooDeep) - )); - assert!(matches!( + Err(CheckErrors::TypeSignatureTooDeep.into()) + ); + assert_eq!( Value::error(inner_value.clone()), - Err(CodecError::TypeSignatureTooDeep) - )); - assert!(matches!( + Err(CheckErrors::TypeSignatureTooDeep.into()) + ); + assert_eq!( Value::some(inner_value), - Err(CodecError::TypeSignatureTooDeep) - )); + Err(CheckErrors::TypeSignatureTooDeep.into()) + ); if std::env::var("CIRCLE_TESTING") == Ok("1".to_string()) { println!("Skipping allocation test on Circle"); @@ -110,10 +110,10 @@ fn test_constructors() { // on 32-bit archs, this error cannot even happen, so don't test (and cause an overflow panic) if (u32::MAX as usize) < usize::MAX { - assert!(matches!( + assert_eq!( Value::buff_from(vec![0; (u32::MAX as usize) + 10]), - Err(CodecError::ValueTooLarge) - )); + Err(CheckErrors::ValueTooLarge.into()) + ); } } diff --git a/clarity-serialization/src/tests/types/serialization.rs b/clarity-serialization/src/tests/types/serialization.rs index 5e6112d061..4d3f63df32 100644 --- a/clarity-serialization/src/tests/types/serialization.rs +++ b/clarity-serialization/src/tests/types/serialization.rs @@ -14,7 +14,8 @@ // along with this program. If not, see . use std::io::Write; -use crate::errors::CodecError; +use crate::errors::CheckErrors; +use crate::types::serialization::SerializationError; use crate::types::{ MAX_VALUE_SIZE, PrincipalData, QualifiedContractIdentifier, StandardPrincipalData, TupleData, TypeSignature, Value, @@ -45,7 +46,7 @@ fn test_deser_ser(v: Value) { fn test_bad_expectation(v: Value, e: TypeSignature) { assert!(matches!( Value::try_deserialize_hex(&v.serialize_to_hex().unwrap(), &e, false).unwrap_err(), - CodecError::DeserializeExpected(_) + SerializationError::DeserializeExpected(_) )); } @@ -71,10 +72,10 @@ fn test_lists() { ) .unwrap(); - assert!(matches!( + assert_eq!( Value::deserialize_read(&mut too_big.as_slice(), None, false).unwrap_err(), - CodecError::Deserialization(e) if e == "Illegal list type" - )); + "Illegal list type".into() + ); // make a list that says it is longer than it is! // this describes a list of size MAX_VALUE_SIZE of Value::Bool(true)'s, but is actually only 59 bools. @@ -98,11 +99,11 @@ fn test_lists() { match Value::deserialize_read(&mut eof.as_slice(), None, false) { Ok(_) => panic!("Accidentally parsed truncated slice"), Err(eres) => match eres { - CodecError::Io(io_e) => match io_e.kind() { + SerializationError::IOError(ioe) => match ioe.err.kind() { std::io::ErrorKind::UnexpectedEof => {} - _ => panic!("Invalid I/O error: {:?}", &io_e), + _ => panic!("Invalid I/O error: {ioe:?}"), }, - _ => panic!("Invalid deserialize error: {:?}", &eres), + _ => panic!("Invalid deserialize error: {eres:?}"), }, } } @@ -240,7 +241,7 @@ fn test_tuples() { false ) .unwrap_err(), - CodecError::DeserializeExpected(_) + SerializationError::DeserializeExpected(_) )); // field type mismatch @@ -251,7 +252,7 @@ fn test_tuples() { false ) .unwrap_err(), - CodecError::DeserializeExpected(_) + SerializationError::DeserializeExpected(_) )); // field not-present in expected @@ -262,17 +263,14 @@ fn test_tuples() { false ) .unwrap_err(), - CodecError::DeserializeExpected(_) + SerializationError::DeserializeExpected(_) )); } #[test] fn test_vectors() { let tests = [ - ( - "1010", - Err(CodecError::Deserialization("Bad type prefix".to_string())), - ), + ("1010", Err("Bad type prefix".into())), ("0000000000000000000000000000000001", Ok(Value::Int(1))), ("00ffffffffffffffffffffffffffffffff", Ok(Value::Int(-1))), ("0100000000000000000000000000000001", Ok(Value::UInt(1))), @@ -373,10 +371,10 @@ fn try_deser_large_list() { 11, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, ]; - assert!(matches!( + assert_eq!( Value::try_deserialize_bytes_untyped(&buff).unwrap_err(), - CodecError::Deserialization(e) if e == "Illegal list type" - )); + SerializationError::DeserializationError("Illegal list type".to_string()) + ); } #[test] @@ -385,19 +383,19 @@ fn try_deser_large_tuple() { 12, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, ]; - assert!(matches!( + assert_eq!( Value::try_deserialize_bytes_untyped(&buff).unwrap_err(), - CodecError::Deserialization(e) if e == "Illegal tuple type" - )); + SerializationError::DeserializationError("Illegal tuple type".to_string()) + ); } #[test] fn try_overflow_stack() { let input = "08080808080808080808070707080807080808080808080708080808080708080707080707080807080808080808080708080808080708080707080708070807080808080808080708080808080708080708080808080808080807070807080808080808070808070707080807070808070808080808070808070708070807080808080808080707080708070807080708080808080808070808080808070808070808080808080808080707080708080808080807080807070708080707080807080808080807080807070807080708080808080808070708070808080808080708080707070808070708080807080807070708"; - assert!(matches!( - Value::try_deserialize_hex_untyped(input), - Err(CodecError::TypeSignatureTooDeep) - )); + assert_eq!( + Err(CheckErrors::TypeSignatureTooDeep.into()), + Value::try_deserialize_hex_untyped(input) + ); } #[test] diff --git a/clarity-serialization/src/tests/types/signatures.rs b/clarity-serialization/src/tests/types/signatures.rs index d04949c183..3569cf4fbd 100644 --- a/clarity-serialization/src/tests/types/signatures.rs +++ b/clarity-serialization/src/tests/types/signatures.rs @@ -14,7 +14,7 @@ // along with this program. If not, see . use std::collections::HashSet; -use crate::errors::CodecError; +use crate::errors::CheckErrors; use crate::types::TypeSignature::{BoolType, IntType, ListUnionType, UIntType}; use crate::types::signatures::{CallableSubtype, TypeSignature}; use crate::types::{ @@ -517,11 +517,11 @@ fn test_least_supertype() { for pair in bad_pairs { matches!( TypeSignature::least_supertype_v2_1(&pair.0, &pair.1).unwrap_err(), - CodecError::TypeError { .. } + CheckErrors::TypeError(..) ); matches!( TypeSignature::least_supertype_v2_1(&pair.1, &pair.0).unwrap_err(), - CodecError::TypeError { .. } + CheckErrors::TypeError(..) ); } } diff --git a/clarity-serialization/src/token.rs b/clarity-serialization/src/token.rs new file mode 100644 index 0000000000..0411a0f07f --- /dev/null +++ b/clarity-serialization/src/token.rs @@ -0,0 +1,124 @@ +// Copyright (C) 2025 Stacks Open Internet Foundation +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . +use std::fmt::Display; + +use crate::representations::Span; + +#[derive(Debug, PartialEq, Clone)] +pub enum Token { + Eof, + Whitespace, + Lparen, + Rparen, + Lbrace, + Rbrace, + Colon, + Comma, + Dot, + Int(String), + Uint(String), + AsciiString(String), + Utf8String(String), + Bytes(String), + Principal(String), + Ident(String), + TraitIdent(String), + Plus, + Minus, + Multiply, + Divide, + Less, + LessEqual, + Greater, + GreaterEqual, + Comment(String), + Placeholder(String), // used to continue parsing after errors +} + +impl Display for Token { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + use self::Token::*; + match self { + Eof => write!(f, "EOF"), + Whitespace => write!(f, "whitespace"), + Lparen => write!(f, "("), + Rparen => write!(f, ")"), + Lbrace => write!(f, "{{"), + Rbrace => write!(f, "}}"), + Colon => write!(f, ":"), + Comma => write!(f, ","), + Dot => write!(f, "."), + Int(_) => write!(f, "int"), + Uint(_) => write!(f, "uint"), + AsciiString(_) => write!(f, "string-ascii"), + Utf8String(_) => write!(f, "string-utf8"), + Bytes(_) => write!(f, "bytes"), + Principal(_) => write!(f, "principal"), + Ident(_) => write!(f, "identifier"), + TraitIdent(_) => write!(f, "trait-identifier"), + Plus => write!(f, "+"), + Minus => write!(f, "-"), + Multiply => write!(f, "*"), + Divide => write!(f, "/"), + Less => write!(f, "<"), + LessEqual => write!(f, "<="), + Greater => write!(f, ">"), + GreaterEqual => write!(f, ">="), + Comment(_) => write!(f, "comment"), + Placeholder(_) => write!(f, "placeholder"), + } + } +} + +impl Token { + pub fn reproduce(&self) -> String { + use self::Token::*; + match self { + Eof => "".to_string(), + Whitespace => " ".to_string(), + Lparen => "(".to_string(), + Rparen => ")".to_string(), + Lbrace => "{{".to_string(), + Rbrace => "}}".to_string(), + Colon => ":".to_string(), + Comma => ",".to_string(), + Dot => ".".to_string(), + Int(s) => s.to_string(), + Uint(s) => format!("u{s}"), + AsciiString(s) => format!("\"{s}\""), + Utf8String(s) => s.to_string(), + Bytes(s) => format!("0x{s}"), + Principal(s) => format!("'{s}"), + Ident(s) => s.to_string(), + TraitIdent(s) => format!("<{s}>"), + Plus => "+".to_string(), + Minus => "-".to_string(), + Multiply => "*".to_string(), + Divide => "/".to_string(), + Less => "<".to_string(), + LessEqual => "<=".to_string(), + Greater => ">".to_string(), + GreaterEqual => ">=".to_string(), + Comment(c) => format!(";; {c}"), + Placeholder(s) => s.to_string(), + } + } +} + +#[derive(Clone, Debug)] +pub struct PlacedToken { + pub span: Span, + pub token: Token, +} diff --git a/clarity-serialization/src/types/mod.rs b/clarity-serialization/src/types/mod.rs index 61f69ec174..6b347583da 100644 --- a/clarity-serialization/src/types/mod.rs +++ b/clarity-serialization/src/types/mod.rs @@ -12,6 +12,7 @@ // // You should have received a copy of the GNU General Public License // along with this program. If not, see . +#![allow(clippy::result_large_err)] pub mod serialization; pub mod signatures; @@ -25,6 +26,8 @@ use serde::{Deserialize, Serialize}; use stacks_common::address::c32; use stacks_common::types::StacksEpochId; use stacks_common::types::chainstate::StacksAddress; +#[cfg(feature = "testing")] +use stacks_common::types::chainstate::StacksPrivateKey; use stacks_common::util::hash; pub use self::signatures::{ @@ -32,8 +35,8 @@ pub use self::signatures::{ ListTypeData, SequenceSubtype, StringSubtype, StringUTF8Length, TupleTypeSignature, TypeSignature, }; -use crate::errors::CodecError; -use crate::representations::{ClarityName, ContractName}; +use crate::errors::{CheckErrors, InterpreterError, InterpreterResult as Result, RuntimeErrorType}; +use crate::representations::{ClarityName, ContractName, SymbolicExpression}; // use crate::vm::ClarityVersion; pub const MAX_VALUE_SIZE: u32 = 1024 * 1024; // 1MB @@ -76,9 +79,9 @@ impl StandardPrincipalData { } impl StandardPrincipalData { - pub fn new(version: u8, bytes: [u8; 20]) -> Result { + pub fn new(version: u8, bytes: [u8; 20]) -> std::result::Result { if version >= 32 { - return Err(CodecError::Expect("Unexpected principal data".into())); + return Err(InterpreterError::Expect("Unexpected principal data".into())); } Ok(Self(version, bytes)) } @@ -122,6 +125,23 @@ impl fmt::Debug for StandardPrincipalData { } } +#[cfg(any(test, feature = "testing"))] +impl From<&StacksPrivateKey> for StandardPrincipalData { + fn from(o: &StacksPrivateKey) -> StandardPrincipalData { + use stacks_common::address::{AddressHashMode, C32_ADDRESS_VERSION_TESTNET_SINGLESIG}; + use stacks_common::types::chainstate::StacksPublicKey; + + let stacks_addr = StacksAddress::from_public_keys( + C32_ADDRESS_VERSION_TESTNET_SINGLESIG, + &AddressHashMode::SerializeP2PKH, + 1, + &vec![StacksPublicKey::from_private(o)], + ) + .unwrap(); + StandardPrincipalData::from(stacks_addr) + } +} + #[derive(Debug, Clone, Eq, PartialEq, Hash, Serialize, Deserialize, PartialOrd, Ord)] pub struct QualifiedContractIdentifier { pub issuer: StandardPrincipalData, @@ -133,7 +153,7 @@ impl QualifiedContractIdentifier { Self { issuer, name } } - pub fn local(name: &str) -> Result { + pub fn local(name: &str) -> Result { let name = name.to_string().try_into()?; Ok(Self::new(StandardPrincipalData::transient(), name)) } @@ -152,13 +172,14 @@ impl QualifiedContractIdentifier { self.issuer.1 == [0; 20] } - pub fn parse(literal: &str) -> Result { + pub fn parse(literal: &str) -> Result { let split: Vec<_> = literal.splitn(2, '.').collect(); if split.len() != 2 { - return Err(CodecError::ParseError( + return Err(RuntimeErrorType::ParseError( "Invalid principal literal: expected a `.` in a qualified contract name" .to_string(), - )); + ) + .into()); } let sender = PrincipalData::parse_standard_principal(split[0])?; let name = split[1].to_string().try_into()?; @@ -178,6 +199,13 @@ pub enum PrincipalData { Contract(QualifiedContractIdentifier), } +#[cfg(any(test, feature = "testing"))] +impl From<&StacksPrivateKey> for PrincipalData { + fn from(o: &StacksPrivateKey) -> PrincipalData { + PrincipalData::Standard(StandardPrincipalData::from(o)) + } +} + pub enum ContractIdentifier { Relative(ContractName), Qualified(QualifiedContractIdentifier), @@ -233,26 +261,27 @@ impl TraitIdentifier { } } - pub fn parse_fully_qualified(literal: &str) -> Result { + pub fn parse_fully_qualified(literal: &str) -> Result { let (issuer, contract_name, name) = Self::parse(literal)?; - let issuer = issuer.ok_or(CodecError::BadTypeConstruction)?; + let issuer = issuer.ok_or(RuntimeErrorType::BadTypeConstruction)?; Ok(TraitIdentifier::new(issuer, contract_name, name)) } - pub fn parse_sugared_syntax(literal: &str) -> Result<(ContractName, ClarityName), CodecError> { + pub fn parse_sugared_syntax(literal: &str) -> Result<(ContractName, ClarityName)> { let (_, contract_name, name) = Self::parse(literal)?; Ok((contract_name, name)) } pub fn parse( literal: &str, - ) -> Result<(Option, ContractName, ClarityName), CodecError> { + ) -> Result<(Option, ContractName, ClarityName)> { let split: Vec<_> = literal.splitn(3, '.').collect(); if split.len() != 3 { - return Err(CodecError::ParseError( + return Err(RuntimeErrorType::ParseError( "Invalid principal literal: expected a `.` in a qualified contract name" .to_string(), - )); + ) + .into()); } let issuer = match split[0].len() { @@ -290,7 +319,16 @@ pub enum SequenceData { } impl SequenceData { - pub fn element_size(&self) -> Result { + pub fn atom_values(&mut self) -> Result> { + match self { + SequenceData::Buffer(data) => data.atom_values(), + SequenceData::List(data) => data.atom_values(), + SequenceData::String(CharType::ASCII(data)) => data.atom_values(), + SequenceData::String(CharType::UTF8(data)) => data.atom_values(), + } + } + + pub fn element_size(&self) -> Result { let out = match self { SequenceData::Buffer(..) => TypeSignature::min_buffer()?.size(), SequenceData::List(data) => data.type_signature.get_list_item_type().size(), @@ -312,10 +350,8 @@ impl SequenceData { pub fn is_empty(&self) -> bool { self.len() == 0 } -} -impl SequenceData { - pub fn element_at(self, index: usize) -> Result, CodecError> { + pub fn element_at(self, index: usize) -> Result> { if self.len() <= index { return Ok(None); } @@ -324,7 +360,9 @@ impl SequenceData { SequenceData::List(mut data) => data.data.remove(index), SequenceData::String(CharType::ASCII(data)) => { Value::string_ascii_from_bytes(vec![data.data[index]]).map_err(|_| { - CodecError::Expect("BUG: failed to initialize single-byte ASCII buffer".into()) + InterpreterError::Expect( + "BUG: failed to initialize single-byte ASCII buffer".into(), + ) })? } SequenceData::String(CharType::UTF8(mut data)) => { @@ -337,12 +375,7 @@ impl SequenceData { Ok(Some(result)) } - pub fn replace_at( - self, - epoch: &StacksEpochId, - index: usize, - element: Value, - ) -> Result { + pub fn replace_at(self, epoch: &StacksEpochId, index: usize, element: Value) -> Result { let seq_length = self.len(); // Check that the length of the provided element is 1. In the case that SequenceData @@ -351,14 +384,14 @@ impl SequenceData { if let Value::Sequence(data) = &element { let elem_length = data.len(); if elem_length != 1 { - return Err(CodecError::BadTypeConstruction); + return Err(RuntimeErrorType::BadTypeConstruction.into()); } } else { - return Err(CodecError::BadTypeConstruction); + return Err(RuntimeErrorType::BadTypeConstruction.into()); } } if index >= seq_length { - return Err(CodecError::ValueOutOfBounds); + return Err(CheckErrors::ValueOutOfBounds.into()); } let new_seq_data = match (self, element) { @@ -369,7 +402,7 @@ impl SequenceData { (SequenceData::List(mut data), elem) => { let entry_type = data.type_signature.get_list_item_type(); if !entry_type.admits(epoch, &elem)? { - return Err(CodecError::ListTypesMustMatch); + return Err(CheckErrors::ListTypesMustMatch.into()); } data.data[index] = elem; SequenceData::List(data) @@ -388,13 +421,13 @@ impl SequenceData { data.data[index] = elem.data.swap_remove(0); SequenceData::String(CharType::UTF8(data)) } - _ => return Err(CodecError::ListTypesMustMatch), + _ => return Err(CheckErrors::ListTypesMustMatch.into()), }; Value::some(Value::Sequence(new_seq_data)) } - pub fn contains(&self, to_find: Value) -> Result, CodecError> { + pub fn contains(&self, to_find: Value) -> Result> { match self { SequenceData::Buffer(data) => { if let Value::Sequence(SequenceData::Buffer(to_find_vec)) = to_find { @@ -409,10 +442,7 @@ impl SequenceData { Ok(None) } } else { - Err(CodecError::TypeValueError { - expected: Box::new(TypeSignature::min_buffer()?), - found: Box::new(to_find), - }) + Err(CheckErrors::TypeValueError(TypeSignature::min_buffer()?, to_find).into()) } } SequenceData::List(data) => { @@ -437,10 +467,10 @@ impl SequenceData { Ok(None) } } else { - Err(CodecError::TypeValueError { - expected: Box::new(TypeSignature::min_string_ascii()?), - found: Box::new(to_find), - }) + Err( + CheckErrors::TypeValueError(TypeSignature::min_string_ascii()?, to_find) + .into(), + ) } } SequenceData::String(CharType::UTF8(data)) => { @@ -457,20 +487,59 @@ impl SequenceData { Ok(None) } } else { - Err(CodecError::TypeValueError { - expected: Box::new(TypeSignature::min_string_utf8()?), - found: Box::new(to_find), - }) + Err( + CheckErrors::TypeValueError(TypeSignature::min_string_utf8()?, to_find) + .into(), + ) } } } } - pub fn concat( - &mut self, - epoch: &StacksEpochId, - other_seq: SequenceData, - ) -> Result<(), CodecError> { + pub fn filter(&mut self, filter: &mut F) -> Result<()> + where + F: FnMut(SymbolicExpression) -> Result, + { + // Note: this macro can probably get removed once + // ```Vec::drain_filter(&mut self, filter: F) -> DrainFilter``` + // is available in rust stable channel (experimental at this point). + macro_rules! drain_filter { + ($data:expr, $seq_type:ident) => { + let mut i = 0; + while i != $data.data.len() { + let atom_value = + SymbolicExpression::atom_value($seq_type::to_value(&$data.data[i])?); + match filter(atom_value) { + Ok(res) if res == false => { + $data.data.remove(i); + } + Ok(_) => { + i += 1; + } + Err(err) => return Err(err), + } + } + }; + } + + match self { + SequenceData::Buffer(data) => { + drain_filter!(data, BuffData); + } + SequenceData::List(data) => { + drain_filter!(data, ListData); + } + SequenceData::String(CharType::ASCII(data)) => { + drain_filter!(data, ASCIIData); + } + SequenceData::String(CharType::UTF8(data)) => { + drain_filter!(data, UTF8Data); + } + } + Ok(()) + } + + pub fn concat(&mut self, epoch: &StacksEpochId, other_seq: SequenceData) -> Result<()> { match (self, other_seq) { (SequenceData::List(inner_data), SequenceData::List(other_inner_data)) => { inner_data.append(epoch, other_inner_data) @@ -486,7 +555,7 @@ impl SequenceData { SequenceData::String(CharType::UTF8(inner_data)), SequenceData::String(CharType::UTF8(ref mut other_inner_data)), ) => inner_data.append(other_inner_data), - _ => Err(CodecError::BadTypeConstruction), + _ => Err(RuntimeErrorType::BadTypeConstruction.into()), }?; Ok(()) } @@ -496,7 +565,7 @@ impl SequenceData { epoch: &StacksEpochId, left_position: usize, right_position: usize, - ) -> Result { + ) -> Result { let empty_seq = left_position == right_position; let result = match self { @@ -604,30 +673,38 @@ impl fmt::Display for UTF8Data { } pub trait SequencedValue { - fn type_signature(&self) -> Result; + fn type_signature(&self) -> std::result::Result; fn items(&self) -> &Vec; fn drained_items(&mut self) -> Vec; - fn to_value(v: &T) -> Result; + fn to_value(v: &T) -> Result; + + fn atom_values(&mut self) -> Result> { + self.drained_items() + .iter() + .map(|item| Ok(SymbolicExpression::atom_value(Self::to_value(item)?))) + .collect() + } } impl SequencedValue for ListData { fn items(&self) -> &Vec { &self.data } + fn drained_items(&mut self) -> Vec { self.data.drain(..).collect() } - fn type_signature(&self) -> std::result::Result { + fn type_signature(&self) -> std::result::Result { Ok(TypeSignature::SequenceType(SequenceSubtype::ListType( self.type_signature.clone(), ))) } - fn to_value(v: &Value) -> Result { + fn to_value(v: &Value) -> Result { Ok(v.clone()) } } @@ -641,16 +718,16 @@ impl SequencedValue for BuffData { self.data.drain(..).collect() } - fn type_signature(&self) -> std::result::Result { + fn type_signature(&self) -> std::result::Result { let buff_length = BufferLength::try_from(self.data.len()).map_err(|_| { - CodecError::Expect("ERROR: Too large of a buffer successfully constructed.".into()) + CheckErrors::Expects("ERROR: Too large of a buffer successfully constructed.".into()) })?; Ok(TypeSignature::SequenceType(SequenceSubtype::BufferType( buff_length, ))) } - fn to_value(v: &u8) -> Result { + fn to_value(v: &u8) -> Result { Ok(Value::buff_from_byte(*v)) } } @@ -664,18 +741,19 @@ impl SequencedValue for ASCIIData { self.data.drain(..).collect() } - fn type_signature(&self) -> std::result::Result { + fn type_signature(&self) -> std::result::Result { let buff_length = BufferLength::try_from(self.data.len()).map_err(|_| { - CodecError::Expect("ERROR: Too large of a buffer successfully constructed.".into()) + CheckErrors::Expects("ERROR: Too large of a buffer successfully constructed.".into()) })?; Ok(TypeSignature::SequenceType(SequenceSubtype::StringType( StringSubtype::ASCII(buff_length), ))) } - fn to_value(v: &u8) -> Result { + fn to_value(v: &u8) -> Result { Value::string_ascii_from_bytes(vec![*v]).map_err(|_| { - CodecError::Expect("ERROR: Invalid ASCII string successfully constructed".into()) + InterpreterError::Expect("ERROR: Invalid ASCII string successfully constructed".into()) + .into() }) } } @@ -689,36 +767,37 @@ impl SequencedValue> for UTF8Data { self.data.drain(..).collect() } - fn type_signature(&self) -> std::result::Result { + fn type_signature(&self) -> std::result::Result { let str_len = StringUTF8Length::try_from(self.data.len()).map_err(|_| { - CodecError::Expect("ERROR: Too large of a buffer successfully constructed.".into()) + CheckErrors::Expects("ERROR: Too large of a buffer successfully constructed.".into()) })?; Ok(TypeSignature::SequenceType(SequenceSubtype::StringType( StringSubtype::UTF8(str_len), ))) } - fn to_value(v: &Vec) -> Result { + fn to_value(v: &Vec) -> Result { Value::string_utf8_from_bytes(v.clone()).map_err(|_| { - CodecError::Expect("ERROR: Invalid UTF8 string successfully constructed".into()) + InterpreterError::Expect("ERROR: Invalid UTF8 string successfully constructed".into()) + .into() }) } } impl OptionalData { - pub fn type_signature(&self) -> Result { + pub fn type_signature(&self) -> std::result::Result { let type_result = match self.data { Some(ref v) => TypeSignature::new_option(TypeSignature::type_of(v)?), None => TypeSignature::new_option(TypeSignature::NoType), }; type_result.map_err(|_| { - CodecError::Expect("Should not have constructed too large of a type.".into()) + CheckErrors::Expects("Should not have constructed too large of a type.".into()) }) } } impl ResponseData { - pub fn type_signature(&self) -> Result { + pub fn type_signature(&self) -> std::result::Result { let type_result = match self.committed { true => TypeSignature::new_response( TypeSignature::type_of(&self.data)?, @@ -730,7 +809,7 @@ impl ResponseData { ), }; type_result.map_err(|_| { - CodecError::Expect("Should not have constructed too large of a type.".into()) + CheckErrors::Expects("Should not have constructed too large of a type.".into()) }) } } @@ -750,11 +829,11 @@ impl PartialEq for TupleData { pub const NONE: Value = Value::Optional(OptionalData { data: None }); impl Value { - pub fn some(data: Value) -> Result { + pub fn some(data: Value) -> Result { if data.size()? + WRAPPER_VALUE_SIZE > MAX_VALUE_SIZE { - Err(CodecError::ValueTooLarge) + Err(CheckErrors::ValueTooLarge.into()) } else if data.depth()? + 1 > MAX_TYPE_DEPTH { - Err(CodecError::TypeSignatureTooDeep) + Err(CheckErrors::TypeSignatureTooDeep.into()) } else { Ok(Value::Optional(OptionalData { data: Some(Box::new(data)), @@ -787,11 +866,11 @@ impl Value { }) } - pub fn okay(data: Value) -> Result { + pub fn okay(data: Value) -> Result { if data.size()? + WRAPPER_VALUE_SIZE > MAX_VALUE_SIZE { - Err(CodecError::ValueTooLarge) + Err(CheckErrors::ValueTooLarge.into()) } else if data.depth()? + 1 > MAX_TYPE_DEPTH { - Err(CodecError::TypeSignatureTooDeep) + Err(CheckErrors::TypeSignatureTooDeep.into()) } else { Ok(Value::Response(ResponseData { committed: true, @@ -800,11 +879,11 @@ impl Value { } } - pub fn error(data: Value) -> Result { + pub fn error(data: Value) -> Result { if data.size()? + WRAPPER_VALUE_SIZE > MAX_VALUE_SIZE { - Err(CodecError::ValueTooLarge) + Err(CheckErrors::ValueTooLarge.into()) } else if data.depth()? + 1 > MAX_TYPE_DEPTH { - Err(CodecError::TypeSignatureTooDeep) + Err(CheckErrors::TypeSignatureTooDeep.into()) } else { Ok(Value::Response(ResponseData { committed: false, @@ -813,11 +892,11 @@ impl Value { } } - pub fn size(&self) -> Result { - TypeSignature::type_of(self)?.size() + pub fn size(&self) -> Result { + Ok(TypeSignature::type_of(self)?.size()?) } - pub fn depth(&self) -> Result { + pub fn depth(&self) -> Result { Ok(TypeSignature::type_of(self)?.depth()) } @@ -828,12 +907,12 @@ impl Value { epoch: &StacksEpochId, list_data: Vec, expected_type: ListTypeData, - ) -> Result { + ) -> Result { // Constructors for TypeSignature ensure that the size of the Value cannot // be greater than MAX_VALUE_SIZE (they error on such constructions) // so we do not need to perform that check here. if (expected_type.get_max_len() as usize) < list_data.len() { - return Err(CodecError::FailureConstructingListWithType); + return Err(InterpreterError::FailureConstructingListWithType.into()); } { @@ -841,7 +920,7 @@ impl Value { for item in &list_data { if !expected_item_type.admits(epoch, item)? { - return Err(CodecError::FailureConstructingListWithType); + return Err(InterpreterError::FailureConstructingListWithType.into()); } } } @@ -852,7 +931,7 @@ impl Value { }))) } - pub fn cons_list_unsanitized(list_data: Vec) -> Result { + pub fn cons_list_unsanitized(list_data: Vec) -> Result { let type_sig = TypeSignature::construct_parent_list_type(&list_data)?; Ok(Value::Sequence(SequenceData::List(ListData { data: list_data, @@ -861,11 +940,11 @@ impl Value { } #[cfg(any(test, feature = "testing"))] - pub fn list_from(list_data: Vec) -> Result { + pub fn list_from(list_data: Vec) -> Result { Value::cons_list_unsanitized(list_data) } - pub fn cons_list(list_data: Vec, epoch: &StacksEpochId) -> Result { + pub fn cons_list(list_data: Vec, epoch: &StacksEpochId) -> Result { // Constructors for TypeSignature ensure that the size of the Value cannot // be greater than MAX_VALUE_SIZE (they error on such constructions) // Aaron: at this point, we've _already_ allocated memory for this type. @@ -880,7 +959,7 @@ impl Value { .map(|(value, _did_sanitize)| value) }) .collect(); - let list_data = list_data_opt.ok_or_else(|| CodecError::ListTypesMustMatch)?; + let list_data = list_data_opt.ok_or_else(|| CheckErrors::ListTypesMustMatch)?; Ok(Value::Sequence(SequenceData::List(ListData { data: list_data, type_signature: type_sig, @@ -888,8 +967,8 @@ impl Value { } /// # Errors - /// - [`CodecError::ValueTooLarge`] if `buff_data` is too large. - pub fn buff_from(buff_data: Vec) -> Result { + /// - CheckErrors::ValueTooLarge if `buff_data` is too large. + pub fn buff_from(buff_data: Vec) -> Result { // check the buffer size BufferLength::try_from(buff_data.len())?; // construct the buffer @@ -902,13 +981,13 @@ impl Value { Value::Sequence(SequenceData::Buffer(BuffData { data: vec![byte] })) } - pub fn string_ascii_from_bytes(bytes: Vec) -> Result { + pub fn string_ascii_from_bytes(bytes: Vec) -> Result { // check the string size BufferLength::try_from(bytes.len())?; for b in bytes.iter() { if !b.is_ascii_alphanumeric() && !b.is_ascii_punctuation() && !b.is_ascii_whitespace() { - return Err(CodecError::InvalidStringCharacters); + return Err(CheckErrors::InvalidCharactersDetected.into()); } } // construct the string @@ -917,11 +996,9 @@ impl Value { )))) } - pub fn string_utf8_from_string_utf8_literal( - tokenized_str: String, - ) -> Result { + pub fn string_utf8_from_string_utf8_literal(tokenized_str: String) -> Result { let wrapped_codepoints_matcher = Regex::new("^\\\\u\\{(?P[[:xdigit:]]+)\\}") - .map_err(|_| CodecError::Expect("Bad regex".into()))?; + .map_err(|_| InterpreterError::Expect("Bad regex".into()))?; let mut window = tokenized_str.as_str(); let mut cursor = 0; let mut data: Vec> = vec![]; @@ -929,12 +1006,12 @@ impl Value { if let Some(captures) = wrapped_codepoints_matcher.captures(window) { let matched = captures .name("value") - .ok_or_else(|| CodecError::Expect("Expected capture".into()))?; + .ok_or_else(|| InterpreterError::Expect("Expected capture".into()))?; let scalar_value = window[matched.start()..matched.end()].to_string(); let unicode_char = { let u = u32::from_str_radix(&scalar_value, 16) - .map_err(|_| CodecError::InvalidUtf8Encoding)?; - let c = char::from_u32(u).ok_or_else(|| CodecError::InvalidUtf8Encoding)?; + .map_err(|_| CheckErrors::InvalidUTF8Encoding)?; + let c = char::from_u32(u).ok_or_else(|| CheckErrors::InvalidUTF8Encoding)?; let mut encoded_char: Vec = vec![0; c.len_utf8()]; c.encode_utf8(&mut encoded_char[..]); encoded_char @@ -958,10 +1035,10 @@ impl Value { )))) } - pub fn string_utf8_from_bytes(bytes: Vec) -> Result { + pub fn string_utf8_from_bytes(bytes: Vec) -> Result { let validated_utf8_str = match str::from_utf8(&bytes) { Ok(string) => string, - _ => return Err(CodecError::InvalidStringCharacters), + _ => return Err(CheckErrors::InvalidCharactersDetected.into()), }; let data = validated_utf8_str .chars() @@ -979,35 +1056,35 @@ impl Value { )))) } - pub fn expect_ascii(self) -> Result { + pub fn expect_ascii(self) -> Result { if let Value::Sequence(SequenceData::String(CharType::ASCII(ASCIIData { data }))) = self { Ok(String::from_utf8(data) - .map_err(|_| CodecError::Expect("Non UTF-8 data in string".into()))?) + .map_err(|_| InterpreterError::Expect("Non UTF-8 data in string".into()))?) } else { error!("Value '{self:?}' is not an ASCII string"); - Err(CodecError::Expect("Expected ASCII string".into())) + Err(InterpreterError::Expect("Expected ASCII string".into()).into()) } } - pub fn expect_u128(self) -> Result { + pub fn expect_u128(self) -> Result { if let Value::UInt(inner) = self { Ok(inner) } else { error!("Value '{self:?}' is not a u128"); - Err(CodecError::Expect("Expected u128".into())) + Err(InterpreterError::Expect("Expected u128".into()).into()) } } - pub fn expect_i128(self) -> Result { + pub fn expect_i128(self) -> Result { if let Value::Int(inner) = self { Ok(inner) } else { error!("Value '{self:?}' is not an i128"); - Err(CodecError::Expect("Expected i128".into())) + Err(InterpreterError::Expect("Expected i128".into()).into()) } } - pub fn expect_buff(self, sz: usize) -> Result, CodecError> { + pub fn expect_buff(self, sz: usize) -> Result> { if let Value::Sequence(SequenceData::Buffer(buffdata)) = self { if buffdata.data.len() <= sz { Ok(buffdata.data) @@ -1016,24 +1093,24 @@ impl Value { "Value buffer has len {}, expected {sz}", buffdata.data.len() ); - Err(CodecError::Expect("Unexpected buff length".into())) + Err(InterpreterError::Expect("Unexpected buff length".into()).into()) } } else { error!("Value '{self:?}' is not a buff"); - Err(CodecError::Expect("Expected buff".into())) + Err(InterpreterError::Expect("Expected buff".into()).into()) } } - pub fn expect_list(self) -> Result, CodecError> { + pub fn expect_list(self) -> Result> { if let Value::Sequence(SequenceData::List(listdata)) = self { Ok(listdata.data) } else { error!("Value '{self:?}' is not a list"); - Err(CodecError::Expect("Expected list".into())) + Err(InterpreterError::Expect("Expected list".into()).into()) } } - pub fn expect_buff_padded(self, sz: usize, pad: u8) -> Result, CodecError> { + pub fn expect_buff_padded(self, sz: usize, pad: u8) -> Result> { let mut data = self.expect_buff(sz)?; if sz > data.len() { for _ in data.len()..sz { @@ -1043,25 +1120,25 @@ impl Value { Ok(data) } - pub fn expect_bool(self) -> Result { + pub fn expect_bool(self) -> Result { if let Value::Bool(b) = self { Ok(b) } else { error!("Value '{self:?}' is not a bool"); - Err(CodecError::Expect("Expected bool".into())) + Err(InterpreterError::Expect("Expected bool".into()).into()) } } - pub fn expect_tuple(self) -> Result { + pub fn expect_tuple(self) -> Result { if let Value::Tuple(data) = self { Ok(data) } else { error!("Value '{self:?}' is not a tuple"); - Err(CodecError::Expect("Expected tuple".into())) + Err(InterpreterError::Expect("Expected tuple".into()).into()) } } - pub fn expect_optional(self) -> Result, CodecError> { + pub fn expect_optional(self) -> Result> { if let Value::Optional(opt) = self { match opt.data { Some(boxed_value) => Ok(Some(*boxed_value)), @@ -1069,29 +1146,29 @@ impl Value { } } else { error!("Value '{self:?}' is not an optional"); - Err(CodecError::Expect("Expected optional".into())) + Err(InterpreterError::Expect("Expected optional".into()).into()) } } - pub fn expect_principal(self) -> Result { + pub fn expect_principal(self) -> Result { if let Value::Principal(p) = self { Ok(p) } else { error!("Value '{self:?}' is not a principal"); - Err(CodecError::Expect("Expected principal".into())) + Err(InterpreterError::Expect("Expected principal".into()).into()) } } - pub fn expect_callable(self) -> Result { + pub fn expect_callable(self) -> Result { if let Value::CallableContract(t) = self { Ok(t) } else { error!("Value '{self:?}' is not a callable contract"); - Err(CodecError::Expect("Expected callable".into())) + Err(InterpreterError::Expect("Expected callable".into()).into()) } } - pub fn expect_result(self) -> Result, CodecError> { + pub fn expect_result(self) -> Result> { if let Value::Response(res_data) = self { if res_data.committed { Ok(Ok(*res_data.data)) @@ -1100,52 +1177,52 @@ impl Value { } } else { error!("Value '{self:?}' is not a response"); - Err(CodecError::Expect("Expected response".into())) + Err(InterpreterError::Expect("Expected response".into()).into()) } } - pub fn expect_result_ok(self) -> Result { + pub fn expect_result_ok(self) -> Result { if let Value::Response(res_data) = self { if res_data.committed { Ok(*res_data.data) } else { error!("Value is not a (ok ..)"); - Err(CodecError::Expect("Expected ok response".into())) + Err(InterpreterError::Expect("Expected ok response".into()).into()) } } else { error!("Value '{self:?}' is not a response"); - Err(CodecError::Expect("Expected response".into())) + Err(InterpreterError::Expect("Expected response".into()).into()) } } - pub fn expect_result_err(self) -> Result { + pub fn expect_result_err(self) -> Result { if let Value::Response(res_data) = self { if !res_data.committed { Ok(*res_data.data) } else { error!("Value is not a (err ..)"); - Err(CodecError::Expect("Expected err response".into())) + Err(InterpreterError::Expect("Expected err response".into()).into()) } } else { error!("Value '{self:?}' is not a response"); - Err(CodecError::Expect("Expected response".into())) + Err(InterpreterError::Expect("Expected response".into()).into()) } } } impl BuffData { - pub fn len(&self) -> Result { + pub fn len(&self) -> Result { self.data .len() .try_into() - .map_err(|_| CodecError::Expect("Data length should be valid".into())) + .map_err(|_| InterpreterError::Expect("Data length should be valid".into()).into()) } pub fn as_slice(&self) -> &[u8] { self.data.as_slice() } - fn append(&mut self, other_seq: &mut BuffData) -> Result<(), CodecError> { + fn append(&mut self, other_seq: &mut BuffData) -> Result<()> { self.data.append(&mut other_seq.data); Ok(()) } @@ -1156,25 +1233,25 @@ impl BuffData { } impl ListData { - pub fn len(&self) -> Result { + pub fn len(&self) -> Result { self.data .len() .try_into() - .map_err(|_| CodecError::Expect("Data length should be valid".into())) + .map_err(|_| InterpreterError::Expect("Data length should be valid".into()).into()) } pub fn is_empty(&self) -> bool { self.data.is_empty() } - fn append(&mut self, epoch: &StacksEpochId, other_seq: ListData) -> Result<(), CodecError> { + fn append(&mut self, epoch: &StacksEpochId, other_seq: ListData) -> Result<()> { let entry_type_a = self.type_signature.get_list_item_type(); let entry_type_b = other_seq.type_signature.get_list_item_type(); let entry_type = TypeSignature::factor_out_no_type(epoch, entry_type_a, entry_type_b)?; let max_len = self.type_signature.get_max_len() + other_seq.type_signature.get_max_len(); for item in other_seq.data.into_iter() { let (item, _) = Value::sanitize_value(epoch, &entry_type, item) - .ok_or_else(|| CodecError::ListTypesMustMatch)?; + .ok_or_else(|| CheckErrors::ListTypesMustMatch)?; self.data.push(item); } @@ -1184,30 +1261,30 @@ impl ListData { } impl ASCIIData { - fn append(&mut self, other_seq: &mut ASCIIData) -> Result<(), CodecError> { + fn append(&mut self, other_seq: &mut ASCIIData) -> Result<()> { self.data.append(&mut other_seq.data); Ok(()) } - pub fn len(&self) -> Result { + pub fn len(&self) -> Result { self.data .len() .try_into() - .map_err(|_| CodecError::Expect("Data length should be valid".into())) + .map_err(|_| InterpreterError::Expect("Data length should be valid".into()).into()) } } impl UTF8Data { - fn append(&mut self, other_seq: &mut UTF8Data) -> Result<(), CodecError> { + fn append(&mut self, other_seq: &mut UTF8Data) -> Result<()> { self.data.append(&mut other_seq.data); Ok(()) } - pub fn len(&self) -> Result { + pub fn len(&self) -> Result { self.data .len() .try_into() - .map_err(|_| CodecError::Expect("Data length should be valid".into())) + .map_err(|_| InterpreterError::Expect("Data length should be valid".into()).into()) } } @@ -1251,7 +1328,7 @@ impl fmt::Display for Value { Value::Principal(principal_data) => write!(f, "{principal_data}"), Value::Optional(opt_data) => write!(f, "{opt_data}"), Value::Response(res_data) => write!(f, "{res_data}"), - Value::Sequence(SequenceData::Buffer(vec_bytes)) => write!(f, "0x{}", &vec_bytes), + Value::Sequence(SequenceData::Buffer(vec_bytes)) => write!(f, "0x{vec_bytes}"), Value::Sequence(SequenceData::String(string)) => write!(f, "{string}"), Value::Sequence(SequenceData::List(list_data)) => { write!(f, "(")?; @@ -1268,6 +1345,13 @@ impl fmt::Display for Value { } } +#[cfg(any(test, feature = "testing"))] +impl From<&StacksPrivateKey> for Value { + fn from(o: &StacksPrivateKey) -> Value { + Value::from(StandardPrincipalData::from(o)) + } +} + impl PrincipalData { pub fn version(&self) -> u8 { match self { @@ -1285,7 +1369,7 @@ impl PrincipalData { self.version() < 32 } - pub fn parse(literal: &str) -> Result { + pub fn parse(literal: &str) -> Result { // be permissive about leading single-quote let literal = literal.strip_prefix('\'').unwrap_or(literal); @@ -1296,22 +1380,23 @@ impl PrincipalData { } } - pub fn parse_qualified_contract_principal(literal: &str) -> Result { + pub fn parse_qualified_contract_principal(literal: &str) -> Result { let contract_id = QualifiedContractIdentifier::parse(literal)?; Ok(PrincipalData::Contract(contract_id)) } - pub fn parse_standard_principal(literal: &str) -> Result { + pub fn parse_standard_principal(literal: &str) -> Result { let (version, data) = c32::c32_address_decode(literal) - .map_err(|x| CodecError::ParseError(format!("Invalid principal literal: {x}")))?; + .map_err(|x| RuntimeErrorType::ParseError(format!("Invalid principal literal: {x}")))?; if data.len() != 20 { - return Err(CodecError::ParseError( + return Err(RuntimeErrorType::ParseError( "Invalid principal literal: Expected 20 data bytes.".to_string(), - )); + ) + .into()); } let mut fixed_data = [0; 20]; fixed_data.copy_from_slice(&data[..20]); - StandardPrincipalData::new(version, fixed_data) + Ok(StandardPrincipalData::new(version, fixed_data)?) } } @@ -1331,11 +1416,7 @@ impl fmt::Display for PrincipalData { impl fmt::Display for CallableData { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if let Some(trait_identifier) = &self.trait_identifier { - write!( - f, - "({} as <{}>)", - self.contract_identifier, trait_identifier, - ) + write!(f, "({} as <{trait_identifier}>)", self.contract_identifier) } else { write!(f, "{}", self.contract_identifier,) } @@ -1429,7 +1510,7 @@ impl TupleData { fn new( type_signature: TupleTypeSignature, data_map: BTreeMap, - ) -> Result { + ) -> Result { let t = TupleData { type_signature, data_map, @@ -1449,7 +1530,7 @@ impl TupleData { // TODO: add tests from mutation testing results #4833 #[cfg_attr(test, mutants::skip)] - pub fn from_data(data: Vec<(ClarityName, Value)>) -> Result { + pub fn from_data(data: Vec<(ClarityName, Value)>) -> Result { let mut type_map = BTreeMap::new(); let mut data_map = BTreeMap::new(); for (name, value) in data.into_iter() { @@ -1457,9 +1538,7 @@ impl TupleData { let entry = type_map.entry(name.clone()); match entry { Entry::Vacant(e) => e.insert(type_info), - Entry::Occupied(_) => { - return Err(CodecError::NameAlreadyUsedInTuple(name.into())); - } + Entry::Occupied(_) => return Err(CheckErrors::NameAlreadyUsed(name.into()).into()), }; data_map.insert(name, value); } @@ -1473,33 +1552,33 @@ impl TupleData { epoch: &StacksEpochId, data: Vec<(ClarityName, Value)>, expected: &TupleTypeSignature, - ) -> Result { + ) -> Result { let mut data_map = BTreeMap::new(); for (name, value) in data.into_iter() { let expected_type = expected .field_type(&name) - .ok_or(CodecError::FailureConstructingTupleWithType)?; + .ok_or(InterpreterError::FailureConstructingTupleWithType)?; if !expected_type.admits(epoch, &value)? { - return Err(CodecError::FailureConstructingTupleWithType); + return Err(InterpreterError::FailureConstructingTupleWithType.into()); } data_map.insert(name, value); } Self::new(expected.clone(), data_map) } - pub fn get(&self, name: &str) -> Result<&Value, CodecError> { + pub fn get(&self, name: &str) -> Result<&Value> { self.data_map.get(name).ok_or_else(|| { - CodecError::NoSuchTupleField(name.to_string(), self.type_signature.clone()) + CheckErrors::NoSuchTupleField(name.to_string(), self.type_signature.clone()).into() }) } - pub fn get_owned(mut self, name: &str) -> Result { + pub fn get_owned(mut self, name: &str) -> Result { self.data_map.remove(name).ok_or_else(|| { - CodecError::NoSuchTupleField(name.to_string(), self.type_signature.clone()) + CheckErrors::NoSuchTupleField(name.to_string(), self.type_signature.clone()).into() }) } - pub fn shallow_merge(mut base: TupleData, updates: TupleData) -> Result { + pub fn shallow_merge(mut base: TupleData, updates: TupleData) -> Result { let TupleData { data_map, mut type_signature, @@ -1517,7 +1596,7 @@ impl fmt::Display for TupleData { write!(f, "(tuple")?; for (name, value) in self.data_map.iter() { write!(f, " ")?; - write!(f, "({} {})", &**name, value)?; + write!(f, "({} {value})", &**name)?; } write!(f, ")") } @@ -1528,3 +1607,26 @@ impl fmt::Display for TupleData { pub fn byte_len_of_serialization(serialized: &str) -> u64 { serialized.len() as u64 / 2 } + +#[derive(Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize)] +pub struct FunctionIdentifier { + identifier: String, +} + +impl fmt::Display for FunctionIdentifier { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}", self.identifier) + } +} + +impl FunctionIdentifier { + pub fn new_native_function(name: &str) -> FunctionIdentifier { + let identifier = format!("_native_:{name}"); + FunctionIdentifier { identifier } + } + + pub fn new_user_function(name: &str, context: &str) -> FunctionIdentifier { + let identifier = format!("{context}:{name}"); + FunctionIdentifier { identifier } + } +} diff --git a/clarity-serialization/src/types/serialization.rs b/clarity-serialization/src/types/serialization.rs index 26da1988ab..2179efd824 100644 --- a/clarity-serialization/src/types/serialization.rs +++ b/clarity-serialization/src/types/serialization.rs @@ -14,7 +14,7 @@ // along with this program. If not, see . use std::io::{Read, Write}; -use std::{cmp, str}; +use std::{cmp, error, str}; use lazy_static::lazy_static; use stacks_common::codec::{Error as codec_error, StacksMessageCodec}; @@ -23,7 +23,7 @@ use stacks_common::util::hash::{hex_bytes, to_hex}; use stacks_common::util::retry::BoundReader; use super::{ListTypeData, TupleTypeSignature}; -use crate::errors::CodecError; +use crate::errors::{CheckErrors, IncomparableError, InterpreterError}; use crate::representations::{ClarityName, ContractName, MAX_STRING_LEN}; use crate::types::{ BOUND_VALUE_SERIALIZATION_BYTES, BufferLength, CallableData, CharType, MAX_TYPE_DEPTH, @@ -31,6 +31,23 @@ use crate::types::{ SequenceSubtype, StandardPrincipalData, StringSubtype, TupleData, TypeSignature, Value, }; +/// Errors that may occur in serialization or deserialization +/// If deserialization failed because the described type is a bad type and +/// a CheckError is thrown, it gets wrapped in BadTypeError. +/// Any IOErrrors from the supplied buffer will manifest as IOError variants, +/// except for EOF -- if the deserialization code experiences an EOF, it is caught +/// and rethrown as DeserializationError +#[derive(Debug, PartialEq)] +pub enum SerializationError { + IOError(IncomparableError), + BadTypeError(CheckErrors), + DeserializationError(String), + DeserializeExpected(Box), + LeftoverBytesInDeserialization, + SerializationError(String), + UnexpectedSerialization, +} + lazy_static! { pub static ref NONE_SERIALIZATION_LEN: u64 = { #[allow(clippy::unwrap_used)] @@ -52,6 +69,65 @@ const SANITIZATION_READ_BOUND: u64 = 15_000_000; /// clarity depth limit is supported. const UNSANITIZED_DEPTH_CHECK: usize = 16; +impl std::fmt::Display for SerializationError { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + match self { + SerializationError::IOError(e) => { + write!(f, "Serialization error caused by IO: {}", e.err) + } + SerializationError::BadTypeError(e) => { + write!(f, "Deserialization error, bad type, caused by: {e}") + } + SerializationError::DeserializationError(e) => { + write!(f, "Deserialization error: {e}") + } + SerializationError::SerializationError(e) => { + write!(f, "Serialization error: {e}") + } + SerializationError::DeserializeExpected(e) => write!( + f, + "Deserialization expected the type of the input to be: {e}" + ), + SerializationError::UnexpectedSerialization => { + write!(f, "The serializer handled an input in an unexpected way") + } + SerializationError::LeftoverBytesInDeserialization => { + write!(f, "Deserialization error: bytes left over in buffer") + } + } + } +} + +impl error::Error for SerializationError { + fn source(&self) -> Option<&(dyn error::Error + 'static)> { + match self { + SerializationError::IOError(e) => Some(&e.err), + SerializationError::BadTypeError(e) => Some(e), + _ => None, + } + } +} + +// Note: a byte stream that describes a longer type than +// there are available bytes to read will result in an IOError(UnexpectedEOF) +impl From for SerializationError { + fn from(err: std::io::Error) -> Self { + SerializationError::IOError(IncomparableError { err }) + } +} + +impl From<&str> for SerializationError { + fn from(e: &str) -> Self { + SerializationError::DeserializationError(e.into()) + } +} + +impl From for SerializationError { + fn from(e: CheckErrors) -> Self { + SerializationError::BadTypeError(e) + } +} + define_u8_enum!(TypePrefix { Int = 0, UInt = 1, @@ -121,7 +197,7 @@ impl From<&Value> for TypePrefix { /// are repeatedly serialized or deserialized. trait ClarityValueSerializable { fn serialize_write(&self, w: &mut W) -> std::io::Result<()>; - fn deserialize_read(r: &mut R) -> Result; + fn deserialize_read(r: &mut R) -> Result; } impl ClarityValueSerializable for StandardPrincipalData { @@ -130,13 +206,13 @@ impl ClarityValueSerializable for StandardPrincipalData { w.write_all(&self.1) } - fn deserialize_read(r: &mut R) -> Result { + fn deserialize_read(r: &mut R) -> Result { let mut version = [0; 1]; let mut data = [0; 20]; r.read_exact(&mut version)?; r.read_exact(&mut data)?; StandardPrincipalData::new(version[0], data) - .map_err(|_| CodecError::UnexpectedSerialization) + .map_err(|_| SerializationError::UnexpectedSerialization) } } @@ -150,24 +226,22 @@ macro_rules! serialize_guarded_string { w.write_all(self.as_str().as_bytes()) } - fn deserialize_read(r: &mut R) -> Result { + fn deserialize_read(r: &mut R) -> Result { let mut len = [0; 1]; r.read_exact(&mut len)?; let len = u8::from_be_bytes(len); if len > MAX_STRING_LEN { - return Err(CodecError::Deserialization("String too long".to_string())); + return Err(SerializationError::DeserializationError( + "String too long".to_string(), + )); } let mut data = vec![0; len as usize]; r.read_exact(&mut data)?; String::from_utf8(data) - .map_err(|_| CodecError::Deserialization("Non-UTF8 string data".into())) - .and_then(|x| { - $Name::try_from(x).map_err(|_| { - CodecError::Deserialization("Illegal Clarity string".into()) - }) - }) + .map_err(|_| "Non-UTF8 string data".into()) + .and_then(|x| $Name::try_from(x).map_err(|_| "Illegal Clarity string".into())) } } }; @@ -188,12 +262,13 @@ impl PrincipalData { } } - fn inner_consensus_deserialize(r: &mut R) -> Result { + fn inner_consensus_deserialize( + r: &mut R, + ) -> Result { let mut header = [0]; r.read_exact(&mut header)?; - let prefix = TypePrefix::from_u8(header[0]) - .ok_or(CodecError::Deserialization("Bad principal prefix".into()))?; + let prefix = TypePrefix::from_u8(header[0]).ok_or("Bad principal prefix")?; match prefix { TypePrefix::PrincipalStandard => { @@ -207,7 +282,7 @@ impl PrincipalData { name, })) } - _ => Err(CodecError::Deserialization("Bad principal prefix".into())), + _ => Err("Bad principal prefix".into()), } } } @@ -229,7 +304,7 @@ macro_rules! check_match { match $item { None => Ok(()), Some($Pattern) => Ok(()), - Some(x) => Err(CodecError::DeserializeExpected(Box::new(x))), + Some(x) => Err(SerializationError::DeserializeExpected(Box::new(x.clone()))), } }; } @@ -271,7 +346,7 @@ impl DeserializeStackItem { /// /// Returns `None` if this stack item either doesn't have an expected type, or the /// next child is going to be sanitized/elided. - fn next_expected_type(&self) -> Result, CodecError> { + fn next_expected_type(&self) -> Result, SerializationError> { match self { DeserializeStackItem::List { expected_type, .. } => Ok(expected_type .as_ref() @@ -291,7 +366,7 @@ impl DeserializeStackItem { return Ok(None); } let field_type = some_tuple.field_type(next_name).ok_or_else(|| { - CodecError::DeserializeExpected(Box::new(TypeSignature::TupleType( + SerializationError::DeserializeExpected(Box::new(TypeSignature::TupleType( some_tuple.clone(), ))) })?; @@ -319,7 +394,7 @@ impl TypeSignature { /// size of a `(buff 1024*1024)` is `1+1024*1024` because of the /// type prefix byte. However, that is 1 byte larger than the maximum /// buffer size in Clarity. - pub fn max_serialized_size(&self) -> Result { + pub fn max_serialized_size(&self) -> Result { let type_prefix_size = 1; let max_output_size = match self { @@ -330,7 +405,7 @@ impl TypeSignature { // `some` or similar with `result` types). So, when // serializing an object with a `NoType`, the other // branch should always be used. - return Err(CodecError::CouldNotDetermineSerializationType); + return Err(CheckErrors::CouldNotDetermineSerializationType); } TypeSignature::IntType => 16, TypeSignature::UIntType => 16, @@ -342,14 +417,14 @@ impl TypeSignature { .get_max_len() .checked_mul(list_type.get_list_item_type().max_serialized_size()?) .and_then(|x| x.checked_add(list_length_encode)) - .ok_or_else(|| CodecError::ValueTooLarge)? + .ok_or_else(|| CheckErrors::ValueTooLarge)? } TypeSignature::SequenceType(SequenceSubtype::BufferType(buff_length)) => { // u32 length as big-endian bytes let buff_length_encode = 4; u32::from(buff_length) .checked_add(buff_length_encode) - .ok_or_else(|| CodecError::ValueTooLarge)? + .ok_or_else(|| CheckErrors::ValueTooLarge)? } TypeSignature::SequenceType(SequenceSubtype::StringType(StringSubtype::ASCII( length, @@ -359,7 +434,7 @@ impl TypeSignature { // ascii is 1-byte per character u32::from(length) .checked_add(str_length_encode) - .ok_or_else(|| CodecError::ValueTooLarge)? + .ok_or_else(|| CheckErrors::ValueTooLarge)? } TypeSignature::SequenceType(SequenceSubtype::StringType(StringSubtype::UTF8( length, @@ -370,7 +445,7 @@ impl TypeSignature { u32::from(length) .checked_mul(4) .and_then(|x| x.checked_add(str_length_encode)) - .ok_or_else(|| CodecError::ValueTooLarge)? + .ok_or_else(|| CheckErrors::ValueTooLarge)? } TypeSignature::PrincipalType | TypeSignature::CallableType(_) @@ -393,7 +468,7 @@ impl TypeSignature { .checked_add(1) // length of key-name .and_then(|x| x.checked_add(key.len() as u32)) // ClarityName is ascii-only, so 1 byte per length .and_then(|x| x.checked_add(value_size)) - .ok_or_else(|| CodecError::ValueTooLarge)?; + .ok_or_else(|| CheckErrors::ValueTooLarge)?; } total_size } @@ -402,7 +477,7 @@ impl TypeSignature { Ok(size) => size, // if NoType, then this is just serializing a none // value, which is only the type prefix - Err(CodecError::CouldNotDetermineSerializationType) => 0, + Err(CheckErrors::CouldNotDetermineSerializationType) => 0, Err(e) => return Err(e), } } @@ -410,17 +485,17 @@ impl TypeSignature { let (ok_type, err_type) = response_types.as_ref(); let (ok_type_max_size, no_ok_type) = match ok_type.max_serialized_size() { Ok(size) => (size, false), - Err(CodecError::CouldNotDetermineSerializationType) => (0, true), + Err(CheckErrors::CouldNotDetermineSerializationType) => (0, true), Err(e) => return Err(e), }; let err_type_max_size = match err_type.max_serialized_size() { Ok(size) => size, - Err(CodecError::CouldNotDetermineSerializationType) => { + Err(CheckErrors::CouldNotDetermineSerializationType) => { if no_ok_type { // if both the ok type and the error type are NoType, // throw a CheckError. This should not be possible, but the check // is done out of caution. - return Err(CodecError::CouldNotDetermineSerializationType); + return Err(CheckErrors::CouldNotDetermineSerializationType); } else { 0 } @@ -430,13 +505,13 @@ impl TypeSignature { cmp::max(ok_type_max_size, err_type_max_size) } TypeSignature::ListUnionType(_) => { - return Err(CodecError::CouldNotDetermineSerializationType); + return Err(CheckErrors::CouldNotDetermineSerializationType); } }; max_output_size .checked_add(type_prefix_size) - .ok_or_else(|| CodecError::ValueTooLarge) + .ok_or_else(|| CheckErrors::ValueTooLarge) } } @@ -445,7 +520,7 @@ impl Value { r: &mut R, expected_type: Option<&TypeSignature>, sanitize: bool, - ) -> Result { + ) -> Result { Self::deserialize_read_count(r, expected_type, sanitize).map(|(value, _)| value) } @@ -458,7 +533,7 @@ impl Value { r: &mut R, expected_type: Option<&TypeSignature>, sanitize: bool, - ) -> Result<(Value, u64), CodecError> { + ) -> Result<(Value, u64), SerializationError> { let bound_value_serialization_bytes = if sanitize && expected_type.is_some() { SANITIZATION_READ_BOUND } else { @@ -482,7 +557,7 @@ impl Value { if bytes_read > expect_size as u64 { // this can happen due to sanitization, so its no longer indicative of a *problem* with the node. debug!( - "Deserialized more bytes than expected size during deserialization. Expected size = {expect_size}, bytes read = {bytes_read}, type = {expected_type:?}" + "Deserialized more bytes than expected size during deserialization. Expected size = {expect_size}, bytes read = {bytes_read}, type = {expected_type}" ); } } @@ -494,7 +569,7 @@ impl Value { r: &mut R, top_expected_type: Option<&TypeSignature>, sanitize: bool, - ) -> Result { + ) -> Result { use super::Value::*; let mut stack = vec![DeserializeStackItem::TopLevel { @@ -508,7 +583,7 @@ impl Value { UNSANITIZED_DEPTH_CHECK }; if stack.len() > depth_check { - return Err(CodecError::TypeSignatureTooDeep); + return Err(CheckErrors::TypeSignatureTooDeep.into()); } #[allow(clippy::expect_used)] @@ -519,8 +594,7 @@ impl Value { let mut header = [0]; r.read_exact(&mut header)?; - let prefix = TypePrefix::from_u8(header[0]) - .ok_or(CodecError::Deserialization("Bad type prefix".into()))?; + let prefix = TypePrefix::from_u8(header[0]).ok_or("Bad type prefix")?; let item = match prefix { TypePrefix::Int => { @@ -548,7 +622,9 @@ impl Value { _ => false, }; if !passed_test { - return Err(CodecError::DeserializeExpected(Box::new(x.clone()))); + return Err(SerializationError::DeserializeExpected(Box::new( + x.clone(), + ))); } } @@ -556,8 +632,7 @@ impl Value { r.read_exact(&mut data[..])?; - Value::buff_from(data) - .map_err(|_| CodecError::Deserialization("Bad buffer".into())) + Value::buff_from(data).map_err(|_| "Bad buffer".into()) } TypePrefix::BoolTrue => { check_match!(expected_type, TypeSignature::BoolType)?; @@ -586,7 +661,9 @@ impl Value { let contained_type = match (committed, x) { (true, TypeSignature::ResponseType(types)) => Ok(&types.0), (false, TypeSignature::ResponseType(types)) => Ok(&types.1), - _ => Err(CodecError::DeserializeExpected(Box::new(x.clone()))), + _ => Err(SerializationError::DeserializeExpected(Box::new( + x.clone(), + ))), }?; Some(contained_type) } @@ -615,7 +692,9 @@ impl Value { Some(x) => { let contained_type = match x { TypeSignature::OptionalType(some_type) => Ok(some_type.as_ref()), - _ => Err(CodecError::DeserializeExpected(Box::new(x.clone()))), + _ => Err(SerializationError::DeserializeExpected(Box::new( + x.clone(), + ))), }?; Some(contained_type) } @@ -634,7 +713,7 @@ impl Value { let len = u32::from_be_bytes(len); if len > MAX_VALUE_SIZE { - return Err(CodecError::Deserialization("Illegal list type".into())); + return Err("Illegal list type".into()); } let (list_type, _entry_type) = match expected_type.as_ref() { @@ -643,14 +722,16 @@ impl Value { if len > list_type.get_max_len() { // unwrap is safe because of the match condition #[allow(clippy::unwrap_used)] - return Err(CodecError::DeserializeExpected(Box::new( + return Err(SerializationError::DeserializeExpected(Box::new( expected_type.unwrap(), ))); } (Some(list_type), Some(list_type.get_list_item_type())) } Some(x) => { - return Err(CodecError::DeserializeExpected(Box::new(x.clone()))); + return Err(SerializationError::DeserializeExpected(Box::new( + x.clone(), + ))); } }; @@ -671,11 +752,9 @@ impl Value { vec![], list_type.clone(), ) - .map_err(|_| CodecError::Deserialization("Illegal list type".into()))? + .map_err(|_| "Illegal list type")? } else { - Value::cons_list_unsanitized(vec![]).map_err(|_| { - CodecError::Deserialization("Illegal list type".into()) - })? + Value::cons_list_unsanitized(vec![]).map_err(|_| "Illegal list type")? }; Ok(finished_list) @@ -688,7 +767,7 @@ impl Value { let expected_len = u64::from(len); if len > MAX_VALUE_SIZE { - return Err(CodecError::Deserialization( + return Err(SerializationError::DeserializationError( "Illegal tuple type".to_string(), )); } @@ -700,21 +779,23 @@ impl Value { if u64::from(len) < tuple_type.len() { // unwrap is safe because of the match condition #[allow(clippy::unwrap_used)] - return Err(CodecError::DeserializeExpected(Box::new( + return Err(SerializationError::DeserializeExpected(Box::new( expected_type.unwrap(), ))); } } else if u64::from(len) != tuple_type.len() { // unwrap is safe because of the match condition #[allow(clippy::unwrap_used)] - return Err(CodecError::DeserializeExpected(Box::new( + return Err(SerializationError::DeserializeExpected(Box::new( expected_type.unwrap(), ))); } Some(tuple_type) } Some(x) => { - return Err(CodecError::DeserializeExpected(Box::new(x.clone()))); + return Err(SerializationError::DeserializeExpected(Box::new( + x.clone(), + ))); } }; @@ -750,13 +831,11 @@ impl Value { vec![], tuple_type, ) - .map_err(|_| CodecError::Deserialization("Illegal tuple type".into())) + .map_err(|_| "Illegal tuple type") .map(Value::from)? } else { TupleData::from_data(vec![]) - .map_err(|_| { - CodecError::Deserialization("Illegal tuple type".into()) - }) + .map_err(|_| "Illegal tuple type") .map(Value::from)? }; Ok(finished_tuple) @@ -775,7 +854,9 @@ impl Value { _ => false, }; if !passed_test { - return Err(CodecError::DeserializeExpected(Box::new(x.clone()))); + return Err(SerializationError::DeserializeExpected(Box::new( + x.clone(), + ))); } } @@ -783,8 +864,7 @@ impl Value { r.read_exact(&mut data[..])?; - Value::string_ascii_from_bytes(data) - .map_err(|_| CodecError::Deserialization("Bad string".into())) + Value::string_ascii_from_bytes(data).map_err(|_| "Bad string".into()) } TypePrefix::StringUTF8 => { let mut total_len = [0; 4]; @@ -795,9 +875,8 @@ impl Value { r.read_exact(&mut data[..])?; - let value = Value::string_utf8_from_bytes(data).map_err(|_| { - CodecError::Deserialization("Illegal string_utf8 type".into()) - }); + let value = Value::string_utf8_from_bytes(data) + .map_err(|_| "Illegal string_utf8 type".into()); if let Some(x) = &expected_type { let passed_test = match (x, &value) { @@ -810,7 +889,9 @@ impl Value { _ => false, }; if !passed_test { - return Err(CodecError::DeserializeExpected(Box::new(x.clone()))); + return Err(SerializationError::DeserializeExpected(Box::new( + x.clone(), + ))); } } @@ -828,9 +909,7 @@ impl Value { "Deserializer reached unexpected path: item processed, but deserializer stack does not expect another value"; "item" => %item, ); - return Err(CodecError::Deserialization( - "Deserializer processed item, but deserializer stack does not expect another value".into(), - )); + return Err("Deserializer processed item, but deserializer stack does not expect another value".into()); }; match stack_bottom { DeserializeStackItem::TopLevel { .. } => return Ok(item), @@ -848,13 +927,10 @@ impl Value { items, list_type.clone(), ) - .map_err(|_| { - CodecError::Deserialization("Illegal list type".into()) - })? + .map_err(|_| "Illegal list type")? } else { - Value::cons_list_unsanitized(items).map_err(|_| { - CodecError::Deserialization("Illegal list type".into()) - })? + Value::cons_list_unsanitized(items) + .map_err(|_| "Illegal list type")? }; finished_item.replace(finished_list); @@ -896,7 +972,7 @@ impl Value { // tuple is finished! let finished_tuple = if let Some(tuple_type) = expected_type { if items.len() != tuple_type.len() as usize { - return Err(CodecError::DeserializeExpected(Box::new( + return Err(SerializationError::DeserializeExpected(Box::new( TypeSignature::TupleType(tuple_type), ))); } @@ -905,15 +981,11 @@ impl Value { items, &tuple_type, ) - .map_err(|_| { - CodecError::Deserialization("Illegal tuple type".into()) - }) + .map_err(|_| "Illegal tuple type") .map(Value::from)? } else { TupleData::from_data(items) - .map_err(|_| { - CodecError::Deserialization("Illegal tuple type".into()) - }) + .map_err(|_| "Illegal tuple type") .map(Value::from)? }; @@ -944,30 +1016,27 @@ impl Value { } } DeserializeStackItem::OptionSome { .. } => { - let finished_some = Value::some(item) - .map_err(|_x| CodecError::Deserialization("Value too large".into()))?; + let finished_some = Value::some(item).map_err(|_x| "Value too large")?; finished_item.replace(finished_some); } DeserializeStackItem::ResponseOk { .. } => { - let finished_some = Value::okay(item) - .map_err(|_x| CodecError::Deserialization("Value too large".into()))?; + let finished_some = Value::okay(item).map_err(|_x| "Value too large")?; finished_item.replace(finished_some); } DeserializeStackItem::ResponseErr { .. } => { - let finished_some = Value::error(item) - .map_err(|_x| CodecError::Deserialization("Value too large".into()))?; + let finished_some = Value::error(item).map_err(|_x| "Value too large")?; finished_item.replace(finished_some); } }; } } - Err(CodecError::Deserialization( + Err(SerializationError::DeserializationError( "Invalid data: stack ran out before finishing parsing".into(), )) } - pub fn serialize_write(&self, w: &mut W) -> Result<(), CodecError> { + pub fn serialize_write(&self, w: &mut W) -> Result<(), SerializationError> { use super::CharType::*; use super::PrincipalData::*; use super::SequenceData::{self, *}; @@ -997,7 +1066,7 @@ impl Value { Sequence(List(data)) => { let len_bytes = data .len() - .map_err(|e| CodecError::Serialization(e.to_string()))? + .map_err(|e| SerializationError::SerializationError(e.to_string()))? .to_be_bytes(); w.write_all(&len_bytes)?; for item in data.data.iter() { @@ -1008,7 +1077,7 @@ impl Value { let len_bytes = u32::from( value .len() - .map_err(|e| CodecError::Serialization(e.to_string()))?, + .map_err(|e| SerializationError::SerializationError(e.to_string()))?, ) .to_be_bytes(); w.write_all(&len_bytes)?; @@ -1025,7 +1094,7 @@ impl Value { let len_bytes = u32::from( value .len() - .map_err(|e| CodecError::Serialization(e.to_string()))?, + .map_err(|e| SerializationError::SerializationError(e.to_string()))?, ) .to_be_bytes(); w.write_all(&len_bytes)?; @@ -1033,7 +1102,7 @@ impl Value { } Tuple(data) => { let len_bytes = u32::try_from(data.data_map.len()) - .map_err(|e| CodecError::Serialization(e.to_string()))? + .map_err(|e| SerializationError::SerializationError(e.to_string()))? .to_be_bytes(); w.write_all(&len_bytes)?; for (key, value) in data.data_map.iter() { @@ -1054,7 +1123,7 @@ impl Value { bytes: &Vec, expected: &TypeSignature, sanitize: bool, - ) -> Result { + ) -> Result { Value::deserialize_read(&mut bytes.as_slice(), Some(expected), sanitize) } @@ -1066,9 +1135,8 @@ impl Value { hex: &str, expected: &TypeSignature, sanitize: bool, - ) -> Result { - let data = - hex_bytes(hex).map_err(|_| CodecError::Deserialization("Bad hex string".into()))?; + ) -> Result { + let data = hex_bytes(hex).map_err(|_| "Bad hex string")?; Value::try_deserialize_bytes(&data, expected, sanitize) } @@ -1084,12 +1152,12 @@ impl Value { bytes: &Vec, expected: &TypeSignature, sanitize: bool, - ) -> Result { + ) -> Result { let input_length = bytes.len(); let (value, read_count) = Value::deserialize_read_count(&mut bytes.as_slice(), Some(expected), sanitize)?; if read_count != (input_length as u64) { - Err(CodecError::LeftoverBytesInDeserialization) + Err(SerializationError::LeftoverBytesInDeserialization) } else { Ok(value) } @@ -1097,31 +1165,25 @@ impl Value { /// Try to deserialize a value without type information. This *does not* perform sanitization /// so it should not be used when decoding clarity database values. - #[cfg(any(test, feature = "testing"))] - pub fn try_deserialize_bytes_untyped(bytes: &Vec) -> Result { - Value::deserialize_read(&mut bytes.as_slice(), None, false) - } - - /// Try to deserialize a value without type information. This *does not* perform sanitization - /// so it should not be used when decoding clarity database values. - #[cfg(not(any(test, feature = "testing")))] - fn try_deserialize_bytes_untyped(bytes: &Vec) -> Result { + /// Public for testing purposes only. + pub(crate) fn try_deserialize_bytes_untyped( + bytes: &Vec, + ) -> Result { Value::deserialize_read(&mut bytes.as_slice(), None, false) } /// Try to deserialize a value from a hex string without type information. This *does not* /// perform sanitization. - pub fn try_deserialize_hex_untyped(hex: &str) -> Result { + pub fn try_deserialize_hex_untyped(hex: &str) -> Result { let hex = hex.strip_prefix("0x").unwrap_or(hex); - let data = - hex_bytes(hex).map_err(|_| CodecError::Deserialization("Bad hex string".into()))?; + let data = hex_bytes(hex).map_err(|_| "Bad hex string")?; Value::try_deserialize_bytes_untyped(&data) } - pub fn serialized_size(&self) -> Result { + pub fn serialized_size(&self) -> Result { let mut counter = WriteCounter { count: 0 }; self.serialize_write(&mut counter).map_err(|_| { - CodecError::Deserialization( + SerializationError::DeserializationError( "Error: Failed to count serialization length of Clarity value".into(), ) })?; @@ -1153,15 +1215,15 @@ impl Write for WriteCounter { } impl Value { - pub fn serialize_to_vec(&self) -> Result, CodecError> { + pub fn serialize_to_vec(&self) -> Result, InterpreterError> { let mut byte_serialization = Vec::new(); self.serialize_write(&mut byte_serialization) - .map_err(|_| CodecError::Expect("IOError filling byte buffer.".into()))?; + .map_err(|_| InterpreterError::Expect("IOError filling byte buffer.".into()))?; Ok(byte_serialization) } /// This does *not* perform any data sanitization - pub fn serialize_to_hex(&self) -> Result { + pub fn serialize_to_hex(&self) -> Result { let byte_serialization = self.serialize_to_vec()?; Ok(to_hex(byte_serialization.as_slice())) } @@ -1287,14 +1349,14 @@ impl Value { impl StacksMessageCodec for Value { fn consensus_serialize(&self, fd: &mut W) -> Result<(), codec_error> { self.serialize_write(fd).map_err(|e| match e { - CodecError::Io(io_e) => codec_error::WriteError(io_e), + SerializationError::IOError(io_e) => codec_error::WriteError(io_e.err), other => codec_error::SerializeError(other.to_string()), }) } fn consensus_deserialize(fd: &mut R) -> Result { Value::deserialize_read(fd, None, false).map_err(|e| match e { - CodecError::Io(io_e) => codec_error::ReadError(io_e), + SerializationError::IOError(e) => codec_error::ReadError(e.err), _ => codec_error::DeserializeError(format!("Failed to decode clarity value: {e:?}")), }) } diff --git a/clarity-serialization/src/types/signatures.rs b/clarity-serialization/src/types/signatures.rs index 5e4c903a97..f03c5baac2 100644 --- a/clarity-serialization/src/types/signatures.rs +++ b/clarity-serialization/src/types/signatures.rs @@ -23,7 +23,7 @@ use lazy_static::lazy_static; use serde::{Deserialize, Serialize}; use stacks_common::types::StacksEpochId; -use crate::errors::CodecError; +use crate::errors::CheckErrors; use crate::representations::{CONTRACT_MAX_NAME_LENGTH, ClarityName, ContractName}; use crate::types::{ CharType, MAX_TYPE_DEPTH, MAX_VALUE_SIZE, PrincipalData, QualifiedContractIdentifier, @@ -31,7 +31,7 @@ use crate::types::{ WRAPPER_VALUE_SIZE, }; -type Result = std::result::Result; +type Result = std::result::Result; #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Serialize, Deserialize, Hash)] pub struct AssetIdentifier { @@ -274,10 +274,10 @@ impl From for u32 { } impl TryFrom for BufferLength { - type Error = CodecError; + type Error = CheckErrors; fn try_from(data: u32) -> Result { if data > MAX_VALUE_SIZE { - Err(CodecError::ValueTooLarge) + Err(CheckErrors::ValueTooLarge) } else { Ok(BufferLength(data)) } @@ -285,10 +285,10 @@ impl TryFrom for BufferLength { } impl TryFrom for BufferLength { - type Error = CodecError; + type Error = CheckErrors; fn try_from(data: usize) -> Result { if data > (MAX_VALUE_SIZE as usize) { - Err(CodecError::ValueTooLarge) + Err(CheckErrors::ValueTooLarge) } else { Ok(BufferLength(data as u32)) } @@ -296,12 +296,12 @@ impl TryFrom for BufferLength { } impl TryFrom for BufferLength { - type Error = CodecError; + type Error = CheckErrors; fn try_from(data: i128) -> Result { if data > (MAX_VALUE_SIZE as i128) { - Err(CodecError::ValueTooLarge) + Err(CheckErrors::ValueTooLarge) } else if data < 0 { - Err(CodecError::ValueOutOfBounds) + Err(CheckErrors::ValueOutOfBounds) } else { Ok(BufferLength(data as u32)) } @@ -321,13 +321,13 @@ impl From for u32 { } impl TryFrom for StringUTF8Length { - type Error = CodecError; + type Error = CheckErrors; fn try_from(data: u32) -> Result { let len = data .checked_mul(4) - .ok_or_else(|| CodecError::ValueTooLarge)?; + .ok_or_else(|| CheckErrors::ValueTooLarge)?; if len > MAX_VALUE_SIZE { - Err(CodecError::ValueTooLarge) + Err(CheckErrors::ValueTooLarge) } else { Ok(StringUTF8Length(data)) } @@ -335,13 +335,13 @@ impl TryFrom for StringUTF8Length { } impl TryFrom for StringUTF8Length { - type Error = CodecError; + type Error = CheckErrors; fn try_from(data: usize) -> Result { let len = data .checked_mul(4) - .ok_or_else(|| CodecError::ValueTooLarge)?; + .ok_or_else(|| CheckErrors::ValueTooLarge)?; if len > (MAX_VALUE_SIZE as usize) { - Err(CodecError::ValueTooLarge) + Err(CheckErrors::ValueTooLarge) } else { Ok(StringUTF8Length(data as u32)) } @@ -349,15 +349,15 @@ impl TryFrom for StringUTF8Length { } impl TryFrom for StringUTF8Length { - type Error = CodecError; + type Error = CheckErrors; fn try_from(data: i128) -> Result { let len = data .checked_mul(4) - .ok_or_else(|| CodecError::ValueTooLarge)?; + .ok_or_else(|| CheckErrors::ValueTooLarge)?; if len > (MAX_VALUE_SIZE as i128) { - Err(CodecError::ValueTooLarge) + Err(CheckErrors::ValueTooLarge) } else if data < 0 { - Err(CodecError::ValueOutOfBounds) + Err(CheckErrors::ValueOutOfBounds) } else { Ok(StringUTF8Length(data as u32)) } @@ -368,7 +368,7 @@ impl ListTypeData { pub fn new_list(entry_type: TypeSignature, max_len: u32) -> Result { let would_be_depth = 1 + entry_type.depth(); if would_be_depth > MAX_TYPE_DEPTH { - return Err(CodecError::TypeSignatureTooDeep); + return Err(CheckErrors::TypeSignatureTooDeep); } let list_data = ListTypeData { @@ -377,9 +377,9 @@ impl ListTypeData { }; let would_be_size = list_data .inner_size()? - .ok_or_else(|| CodecError::ValueTooLarge)?; + .ok_or_else(|| CheckErrors::ValueTooLarge)?; if would_be_size > MAX_VALUE_SIZE { - Err(CodecError::ValueTooLarge) + Err(CheckErrors::ValueTooLarge) } else { Ok(list_data) } @@ -411,9 +411,9 @@ impl TypeSignature { let new_size = WRAPPER_VALUE_SIZE + inner_type.size()?; let new_depth = 1 + inner_type.depth(); if new_size > MAX_VALUE_SIZE { - Err(CodecError::ValueTooLarge) + Err(CheckErrors::ValueTooLarge) } else if new_depth > MAX_TYPE_DEPTH { - Err(CodecError::TypeSignatureTooDeep) + Err(CheckErrors::TypeSignatureTooDeep) } else { Ok(OptionalType(Box::new(inner_type))) } @@ -424,9 +424,9 @@ impl TypeSignature { let new_depth = 1 + cmp::max(ok_type.depth(), err_type.depth()); if new_size > MAX_VALUE_SIZE { - Err(CodecError::ValueTooLarge) + Err(CheckErrors::ValueTooLarge) } else if new_depth > MAX_TYPE_DEPTH { - Err(CodecError::TypeSignatureTooDeep) + Err(CheckErrors::TypeSignatureTooDeep) } else { Ok(ResponseType(Box::new((ok_type, err_type)))) } @@ -456,7 +456,7 @@ impl TypeSignature { | StacksEpochId::Epoch30 | StacksEpochId::Epoch31 | StacksEpochId::Epoch32 => self.admits_type_v2_1(other), - StacksEpochId::Epoch10 => Err(CodecError::Expect("epoch 1.0 not supported".into())), + StacksEpochId::Epoch10 => Err(CheckErrors::Expects("epoch 1.0 not supported".into())), } } @@ -542,11 +542,11 @@ impl TypeSignature { Ok(false) } } - NoType => Err(CodecError::CouldNotDetermineType), - CallableType(_) => Err(CodecError::Expect( + NoType => Err(CheckErrors::CouldNotDetermineType), + CallableType(_) => Err(CheckErrors::Expects( "CallableType should not be used in epoch v2.0".into(), )), - ListUnionType(_) => Err(CodecError::Expect( + ListUnionType(_) => Err(CheckErrors::Expects( "ListUnionType should not be used in epoch v2.0".into(), )), _ => Ok(other == self), @@ -642,7 +642,7 @@ impl TypeSignature { Ok(false) } } - NoType => Err(CodecError::CouldNotDetermineType), + NoType => Err(CheckErrors::CouldNotDetermineType), _ => Ok(&other == self), } } @@ -707,20 +707,20 @@ impl TypeSignature { match partial { CallableSubtype::Principal(_) => { if is_trait.is_some() { - return Err(CodecError::TypeError { - expected: Box::new(TypeSignature::PrincipalType), - found: Box::new(TypeSignature::CallableType(partial.clone())), - }); + return Err(CheckErrors::TypeError( + TypeSignature::PrincipalType, + TypeSignature::CallableType(partial.clone()), + )); } else { is_principal = true; } } CallableSubtype::Trait(t) => { if is_principal { - return Err(CodecError::TypeError { - expected: Box::new(TypeSignature::PrincipalType), - found: Box::new(TypeSignature::CallableType(partial.clone())), - }); + return Err(CheckErrors::TypeError( + TypeSignature::PrincipalType, + TypeSignature::CallableType(partial.clone()), + )); } else { is_trait = Some(t.clone()); } @@ -740,10 +740,10 @@ impl TypeSignature { } impl TryFrom> for TupleTypeSignature { - type Error = CodecError; + type Error = CheckErrors; fn try_from(type_data: Vec<(ClarityName, TypeSignature)>) -> Result { if type_data.is_empty() { - return Err(CodecError::EmptyTuplesNotAllowed); + return Err(CheckErrors::EmptyTuplesNotAllowed); } let mut type_map = BTreeMap::new(); @@ -751,7 +751,7 @@ impl TryFrom> for TupleTypeSignature { if let Entry::Vacant(e) = type_map.entry(name.clone()) { e.insert(type_info); } else { - return Err(CodecError::NameAlreadyUsedInTuple(name.into())); + return Err(CheckErrors::NameAlreadyUsed(name.into())); } } TupleTypeSignature::try_from(type_map) @@ -759,23 +759,23 @@ impl TryFrom> for TupleTypeSignature { } impl TryFrom> for TupleTypeSignature { - type Error = CodecError; + type Error = CheckErrors; fn try_from(type_map: BTreeMap) -> Result { if type_map.is_empty() { - return Err(CodecError::EmptyTuplesNotAllowed); + return Err(CheckErrors::EmptyTuplesNotAllowed); } for child_sig in type_map.values() { if (1 + child_sig.depth()) > MAX_TYPE_DEPTH { - return Err(CodecError::TypeSignatureTooDeep); + return Err(CheckErrors::TypeSignatureTooDeep); } } let type_map = Arc::new(type_map.into_iter().collect()); let result = TupleTypeSignature { type_map }; let would_be_size = result .inner_size()? - .ok_or_else(|| CodecError::ValueTooLarge)?; + .ok_or_else(|| CheckErrors::ValueTooLarge)?; if would_be_size > MAX_VALUE_SIZE { - Err(CodecError::ValueTooLarge) + Err(CheckErrors::ValueTooLarge) } else { Ok(result) } @@ -828,7 +828,7 @@ impl TypeSignature { pub fn empty_buffer() -> Result { Ok(SequenceType(SequenceSubtype::BufferType( 0_u32.try_into().map_err(|_| { - CodecError::Expect("FAIL: Empty clarity value size is not realizable".into()) + CheckErrors::Expects("FAIL: Empty clarity value size is not realizable".into()) })?, ))) } @@ -836,7 +836,7 @@ impl TypeSignature { pub fn min_buffer() -> Result { Ok(SequenceType(SequenceSubtype::BufferType( 1_u32.try_into().map_err(|_| { - CodecError::Expect("FAIL: Min clarity value size is not realizable".into()) + CheckErrors::Expects("FAIL: Min clarity value size is not realizable".into()) })?, ))) } @@ -844,7 +844,7 @@ impl TypeSignature { pub fn min_string_ascii() -> Result { Ok(SequenceType(SequenceSubtype::StringType( StringSubtype::ASCII(1_u32.try_into().map_err(|_| { - CodecError::Expect("FAIL: Min clarity value size is not realizable".into()) + CheckErrors::Expects("FAIL: Min clarity value size is not realizable".into()) })?), ))) } @@ -852,7 +852,7 @@ impl TypeSignature { pub fn min_string_utf8() -> Result { Ok(SequenceType(SequenceSubtype::StringType( StringSubtype::UTF8(1_u32.try_into().map_err(|_| { - CodecError::Expect("FAIL: Min clarity value size is not realizable".into()) + CheckErrors::Expects("FAIL: Min clarity value size is not realizable".into()) })?), ))) } @@ -860,7 +860,7 @@ impl TypeSignature { pub fn max_string_ascii() -> Result { Ok(SequenceType(SequenceSubtype::StringType( StringSubtype::ASCII(BufferLength::try_from(MAX_VALUE_SIZE).map_err(|_| { - CodecError::Expect( + CheckErrors::Expects( "FAIL: Max Clarity Value Size is no longer realizable in ASCII Type".into(), ) })?), @@ -870,7 +870,7 @@ impl TypeSignature { pub fn max_string_utf8() -> Result { Ok(SequenceType(SequenceSubtype::StringType( StringSubtype::UTF8(StringUTF8Length::try_from(MAX_VALUE_SIZE / 4).map_err(|_| { - CodecError::Expect( + CheckErrors::Expects( "FAIL: Max Clarity Value Size is no longer realizable in UTF8 Type".into(), ) })?), @@ -880,7 +880,7 @@ impl TypeSignature { pub fn max_buffer() -> Result { Ok(SequenceType(SequenceSubtype::BufferType( BufferLength::try_from(MAX_VALUE_SIZE).map_err(|_| { - CodecError::Expect( + CheckErrors::Expects( "FAIL: Max Clarity Value Size is no longer realizable in Buffer Type".into(), ) })?, @@ -889,14 +889,14 @@ impl TypeSignature { pub fn contract_name_string_ascii_type() -> Result { TypeSignature::bound_string_ascii_type(CONTRACT_MAX_NAME_LENGTH.try_into().map_err( - |_| CodecError::Expect("FAIL: contract name max length exceeds u32 space".into()), + |_| CheckErrors::Expects("FAIL: contract name max length exceeds u32 space".into()), )?) } pub fn bound_string_ascii_type(max_len: u32) -> Result { Ok(SequenceType(SequenceSubtype::StringType( StringSubtype::ASCII(BufferLength::try_from(max_len).map_err(|_| { - CodecError::Expect( + CheckErrors::Expects( "FAIL: Max Clarity Value Size is no longer realizable in ASCII Type".into(), ) })?), @@ -918,7 +918,6 @@ impl TypeSignature { } } - /// /// This function returns the most-restrictive type that admits _both_ A and B (something like a least common supertype), /// or Errors if no such type exists. On error, it throws NoSuperType(A,B), unless a constructor error'ed -- in which case, /// it throws the constructor's error. @@ -954,7 +953,7 @@ impl TypeSignature { | StacksEpochId::Epoch30 | StacksEpochId::Epoch31 | StacksEpochId::Epoch32 => Self::least_supertype_v2_1(a, b), - StacksEpochId::Epoch10 => Err(CodecError::Expect("epoch 1.0 not supported".into())), + StacksEpochId::Epoch10 => Err(CheckErrors::Expects("epoch 1.0 not supported".into())), } } @@ -966,16 +965,15 @@ impl TypeSignature { ) => { let mut type_map_out = BTreeMap::new(); for (name, entry_a) in types_a.iter() { - let entry_b = types_b.get(name).ok_or(CodecError::TypeError { - expected: Box::new(a.clone()), - found: Box::new(b.clone()), - })?; + let entry_b = types_b + .get(name) + .ok_or(CheckErrors::TypeError(a.clone(), b.clone()))?; let entry_out = Self::least_supertype_v2_0(entry_a, entry_b)?; type_map_out.insert(name.clone(), entry_out); } Ok(TupleTypeSignature::try_from(type_map_out) .map(|x| x.into()) - .map_err(|_| CodecError::SupertypeTooLarge)?) + .map_err(|_| CheckErrors::SupertypeTooLarge)?) } ( SequenceType(SequenceSubtype::ListType(ListTypeData { @@ -996,7 +994,7 @@ impl TypeSignature { }; let max_len = cmp::max(len_a, len_b); Ok(Self::list_of(entry_type, *max_len) - .map_err(|_| CodecError::SupertypeTooLarge)?) + .map_err(|_| CheckErrors::SupertypeTooLarge)?) } (ResponseType(resp_a), ResponseType(resp_b)) => { let ok_type = @@ -1055,10 +1053,7 @@ impl TypeSignature { if x == y { Ok(x.clone()) } else { - Err(CodecError::TypeError { - expected: Box::new(a.clone()), - found: Box::new(b.clone()), - }) + Err(CheckErrors::TypeError(a.clone(), b.clone())) } } } @@ -1072,16 +1067,15 @@ impl TypeSignature { ) => { let mut type_map_out = BTreeMap::new(); for (name, entry_a) in types_a.iter() { - let entry_b = types_b.get(name).ok_or(CodecError::TypeError { - expected: Box::new(a.clone()), - found: Box::new(b.clone()), - })?; + let entry_b = types_b + .get(name) + .ok_or(CheckErrors::TypeError(a.clone(), b.clone()))?; let entry_out = Self::least_supertype_v2_1(entry_a, entry_b)?; type_map_out.insert(name.clone(), entry_out); } Ok(TupleTypeSignature::try_from(type_map_out) .map(|x| x.into()) - .map_err(|_| CodecError::SupertypeTooLarge)?) + .map_err(|_| CheckErrors::SupertypeTooLarge)?) } ( SequenceType(SequenceSubtype::ListType(ListTypeData { @@ -1102,7 +1096,7 @@ impl TypeSignature { }; let max_len = cmp::max(len_a, len_b); Ok(Self::list_of(entry_type, *max_len) - .map_err(|_| CodecError::SupertypeTooLarge)?) + .map_err(|_| CheckErrors::SupertypeTooLarge)?) } (ResponseType(resp_a), ResponseType(resp_b)) => { let ok_type = @@ -1183,10 +1177,7 @@ impl TypeSignature { if all_principals { Ok(PrincipalType) } else { - Err(CodecError::TypeError { - expected: Box::new(a.clone()), - found: Box::new(b.clone()), - }) + Err(CheckErrors::TypeError(a.clone(), b.clone())) } } (ListUnionType(l1), ListUnionType(l2)) => { @@ -1196,10 +1187,7 @@ impl TypeSignature { if x == y { Ok(x.clone()) } else { - Err(CodecError::TypeError { - expected: Box::new(a.clone()), - found: Box::new(b.clone()), - }) + Err(CheckErrors::TypeError(a.clone(), b.clone())) } } } @@ -1262,13 +1250,13 @@ impl TypeSignature { pub fn parent_list_type( children: &[TypeSignature], - ) -> std::result::Result { + ) -> std::result::Result { if let Some((first, rest)) = children.split_first() { let mut current_entry_type = first.clone(); for next_entry in rest.iter() { current_entry_type = Self::least_supertype_v2_1(¤t_entry_type, next_entry)?; } - let len = u32::try_from(children.len()).map_err(|_| CodecError::ValueTooLarge)?; + let len = u32::try_from(children.len()).map_err(|_| CheckErrors::ValueTooLarge)?; ListTypeData::new_list(current_entry_type, len) } else { Ok(TypeSignature::empty_list()) @@ -1310,7 +1298,7 @@ impl TypeSignature { pub fn size(&self) -> Result { self.inner_size()?.ok_or_else(|| { - CodecError::Expect( + CheckErrors::Expects( "FAIL: .size() overflowed on too large of a type. construction should have failed!" .into(), ) @@ -1352,7 +1340,7 @@ impl TypeSignature { pub fn type_size(&self) -> Result { self.inner_type_size() - .ok_or_else(|| CodecError::ValueTooLarge) + .ok_or_else(|| CheckErrors::ValueTooLarge) } /// Returns the size of the _type signature_ @@ -1433,7 +1421,7 @@ impl TupleTypeSignature { pub fn size(&self) -> Result { self.inner_size()? - .ok_or_else(|| CodecError::Expect("size() overflowed on a constructed type.".into())) + .ok_or_else(|| CheckErrors::Expects("size() overflowed on a constructed type.".into())) } fn max_depth(&self) -> u8 { diff --git a/clarity/Cargo.toml b/clarity/Cargo.toml index bd03934a1f..812f5d19be 100644 --- a/clarity/Cargo.toml +++ b/clarity/Cargo.toml @@ -18,27 +18,25 @@ name = "clarity" path = "./src/libclarity.rs" [dependencies] -serde = "1" -serde_derive = "1" +clarity-serialization = { package = "clarity-serialization", path = "../clarity-serialization", default-features = false } +serde = { workspace = true } +serde_derive = { workspace = true } +serde_json = { workspace = true } regex = "1" -lazy_static = "1.4.0" +lazy_static = { workspace = true } integer-sqrt = "0.1.3" -slog = { version = "2.5.2", features = [ "max_level_trace" ] } +slog = { workspace = true } stacks_common = { package = "stacks-common", path = "../stacks-common", default-features = false } rstest = { version = "0.17.0", optional = true } rstest_reuse = { version = "0.5.0", optional = true } -hashbrown = { workspace = true } rusqlite = { workspace = true, optional = true } -[dependencies.serde_json] -version = "1.0" -features = ["arbitrary_precision", "unbounded_depth"] - [dev-dependencies] assert-json-diff = "1.0.0" mutants = "0.0.3" rstest = { version = "0.17.0" } rstest_reuse = { version = "0.5.0" } + # a nightly rustc regression (35dbef235 2021-03-02) prevents criterion from compiling # but it isn't necessary for tests: only benchmarks. therefore, commenting out for now. # criterion = "0.3" @@ -48,11 +46,11 @@ serde_stacker = "0.1" [features] default = ["rusqlite"] -developer-mode = ["stacks_common/developer-mode"] +developer-mode = ["stacks_common/developer-mode", "clarity-serialization/developer-mode"] slog_json = ["stacks_common/slog_json"] -rusqlite = ["stacks_common/rusqlite", "dep:rusqlite"] -testing = ["rstest", "rstest_reuse", "rusqlite"] +rusqlite = ["stacks_common/rusqlite", "clarity-serialization/rusqlite", "dep:rusqlite"] +testing = ["rusqlite", "rstest", "rstest_reuse", "clarity-serialization/testing"] devtools = [] rollback_value_check = [] disable-costs = [] -wasm-web = ["stacks_common/wasm-web"] +wasm-web = ["stacks_common/wasm-web", "clarity-serialization/wasm-web"] diff --git a/clarity/src/vm/analysis/analysis_db.rs b/clarity/src/vm/analysis/analysis_db.rs index dda74dd5c0..2510006f54 100644 --- a/clarity/src/vm/analysis/analysis_db.rs +++ b/clarity/src/vm/analysis/analysis_db.rs @@ -16,6 +16,8 @@ use std::collections::{BTreeMap, BTreeSet}; +use clarity_serialization::representations::ClarityName; +use clarity_serialization::types::{QualifiedContractIdentifier, TraitIdentifier}; use stacks_common::types::StacksEpochId; use crate::vm::analysis::errors::{CheckErrors, CheckResult}; @@ -23,9 +25,8 @@ use crate::vm::analysis::type_checker::ContractAnalysis; use crate::vm::database::{ ClarityBackingStore, ClarityDeserializable, ClaritySerializable, RollbackWrapper, }; -use crate::vm::representations::ClarityName; use crate::vm::types::signatures::FunctionSignature; -use crate::vm::types::{FunctionType, QualifiedContractIdentifier, TraitIdentifier}; +use crate::vm::types::FunctionType; use crate::vm::ClarityVersion; pub struct AnalysisDatabase<'a> { diff --git a/clarity/src/vm/analysis/arithmetic_checker/mod.rs b/clarity/src/vm/analysis/arithmetic_checker/mod.rs index fc1b0659c4..23de142306 100644 --- a/clarity/src/vm/analysis/arithmetic_checker/mod.rs +++ b/clarity/src/vm/analysis/arithmetic_checker/mod.rs @@ -14,16 +14,18 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . +use clarity_serialization::representations::ClarityName; + pub use super::errors::{ check_argument_count, check_arguments_at_least, CheckError, CheckErrors, CheckResult, }; use crate::vm::analysis::types::ContractAnalysis; use crate::vm::functions::define::{DefineFunctions, DefineFunctionsParsed}; use crate::vm::functions::NativeFunctions; +use crate::vm::representations::SymbolicExpression; use crate::vm::representations::SymbolicExpressionType::{ Atom, AtomValue, Field, List, LiteralValue, TraitReference, }; -use crate::vm::representations::{ClarityName, SymbolicExpression}; use crate::vm::variables::NativeVariables; use crate::vm::ClarityVersion; diff --git a/clarity/src/vm/analysis/errors.rs b/clarity/src/vm/analysis/errors.rs index 29a5081626..85656d3a52 100644 --- a/clarity/src/vm/analysis/errors.rs +++ b/clarity/src/vm/analysis/errors.rs @@ -14,482 +14,7 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use std::{error, fmt}; - -use crate::vm::costs::{CostErrors, ExecutionCost}; -use crate::vm::diagnostic::{DiagnosableError, Diagnostic}; -use crate::vm::representations::SymbolicExpression; -use crate::vm::types::{TraitIdentifier, TupleTypeSignature, TypeSignature, Value}; - -pub type CheckResult = Result; - -#[derive(Debug, PartialEq)] -pub enum CheckErrors { - // cost checker errors - CostOverflow, - CostBalanceExceeded(ExecutionCost, ExecutionCost), - MemoryBalanceExceeded(u64, u64), - CostComputationFailed(String), - - ValueTooLarge, - ValueOutOfBounds, - TypeSignatureTooDeep, - ExpectedName, - SupertypeTooLarge, - - // unexpected interpreter behavior - Expects(String), - - // match errors - BadMatchOptionSyntax(Box), - BadMatchResponseSyntax(Box), - BadMatchInput(TypeSignature), - - // list typing errors - UnknownListConstructionFailure, - ListTypesMustMatch, - ConstructedListTooLarge, - - // simple type expectation mismatch - TypeError(TypeSignature, TypeSignature), - TypeLiteralError(TypeSignature, TypeSignature), - TypeValueError(TypeSignature, Value), - - NoSuperType(TypeSignature, TypeSignature), - InvalidTypeDescription, - UnknownTypeName(String), - - // union type mismatch - UnionTypeError(Vec, TypeSignature), - UnionTypeValueError(Vec, Value), - - ExpectedLiteral, - ExpectedOptionalType(TypeSignature), - ExpectedResponseType(TypeSignature), - ExpectedOptionalOrResponseType(TypeSignature), - ExpectedOptionalValue(Value), - ExpectedResponseValue(Value), - ExpectedOptionalOrResponseValue(Value), - CouldNotDetermineResponseOkType, - CouldNotDetermineResponseErrType, - CouldNotDetermineSerializationType, - UncheckedIntermediaryResponses, - - CouldNotDetermineMatchTypes, - CouldNotDetermineType, - - // Checker runtime failures - TypeAlreadyAnnotatedFailure, - TypeAnnotationExpectedFailure, - CheckerImplementationFailure, - - // Assets - BadTokenName, - DefineFTBadSignature, - DefineNFTBadSignature, - NoSuchNFT(String), - NoSuchFT(String), - - BadTransferSTXArguments, - BadTransferFTArguments, - BadTransferNFTArguments, - BadMintFTArguments, - BadBurnFTArguments, - - // tuples - BadTupleFieldName, - ExpectedTuple(TypeSignature), - NoSuchTupleField(String, TupleTypeSignature), - EmptyTuplesNotAllowed, - BadTupleConstruction, - TupleExpectsPairs, - - // variables - NoSuchDataVariable(String), - - // data map - BadMapName, - NoSuchMap(String), - - // defines - DefineFunctionBadSignature, - BadFunctionName, - BadMapTypeDefinition, - PublicFunctionMustReturnResponse(TypeSignature), - DefineVariableBadSignature, - ReturnTypesMustMatch(TypeSignature, TypeSignature), - - CircularReference(Vec), - - // contract-call errors - NoSuchContract(String), - NoSuchPublicFunction(String, String), - PublicFunctionNotReadOnly(String, String), - ContractAlreadyExists(String), - ContractCallExpectName, - ExpectedCallableType(TypeSignature), - - // get-block-info? errors - NoSuchBlockInfoProperty(String), - NoSuchBurnBlockInfoProperty(String), - NoSuchStacksBlockInfoProperty(String), - NoSuchTenureInfoProperty(String), - GetBlockInfoExpectPropertyName, - GetBurnBlockInfoExpectPropertyName, - GetStacksBlockInfoExpectPropertyName, - GetTenureInfoExpectPropertyName, - - NameAlreadyUsed(String), - ReservedWord(String), - - // expect a function, or applying a function to a list - NonFunctionApplication, - ExpectedListApplication, - ExpectedSequence(TypeSignature), - MaxLengthOverflow, - - // let syntax - BadLetSyntax, - - // generic binding syntax - BadSyntaxBinding, - BadSyntaxExpectedListOfPairs, - - MaxContextDepthReached, - UndefinedFunction(String), - UndefinedVariable(String), - - // argument counts - RequiresAtLeastArguments(usize, usize), - RequiresAtMostArguments(usize, usize), - IncorrectArgumentCount(usize, usize), - IfArmsMustMatch(TypeSignature, TypeSignature), - MatchArmsMustMatch(TypeSignature, TypeSignature), - DefaultTypesMustMatch(TypeSignature, TypeSignature), - TooManyExpressions, - IllegalOrUnknownFunctionApplication(String), - UnknownFunction(String), - - // traits - NoSuchTrait(String, String), - TraitReferenceUnknown(String), - TraitMethodUnknown(String, String), - ExpectedTraitIdentifier, - ImportTraitBadSignature, - TraitReferenceNotAllowed, - BadTraitImplementation(String, String), - DefineTraitBadSignature, - DefineTraitDuplicateMethod(String), - UnexpectedTraitOrFieldReference, - TraitBasedContractCallInReadOnly, - ContractOfExpectsTrait, - IncompatibleTrait(TraitIdentifier, TraitIdentifier), - - // strings - InvalidCharactersDetected, - InvalidUTF8Encoding, - - // secp256k1 signature - InvalidSecp65k1Signature, - - WriteAttemptedInReadOnly, - AtBlockClosureMustBeReadOnly, - - // time checker errors - ExecutionTimeExpired, -} - -#[derive(Debug, PartialEq)] -pub struct CheckError { - pub err: CheckErrors, - pub expressions: Option>, - pub diagnostic: Diagnostic, -} - -impl CheckErrors { - /// Does this check error indicate that the transaction should be - /// rejected? - pub fn rejectable(&self) -> bool { - matches!( - self, - CheckErrors::SupertypeTooLarge | CheckErrors::Expects(_) - ) - } -} - -impl CheckError { - pub fn new(err: CheckErrors) -> CheckError { - let diagnostic = Diagnostic::err(&err); - CheckError { - err, - expressions: None, - diagnostic, - } - } - - pub fn has_expression(&self) -> bool { - self.expressions.is_some() - } - - pub fn set_expression(&mut self, expr: &SymbolicExpression) { - self.diagnostic.spans = vec![expr.span().clone()]; - self.expressions.replace(vec![expr.clone()]); - } - - pub fn set_expressions(&mut self, exprs: &[SymbolicExpression]) { - self.diagnostic.spans = exprs.iter().map(|e| e.span().clone()).collect(); - self.expressions.replace(exprs.to_vec()); - } -} - -impl fmt::Display for CheckErrors { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{self:?}") - } -} - -impl fmt::Display for CheckError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}", self.err)?; - - if let Some(ref e) = self.expressions { - write!(f, "\nNear:\n{e:?}")?; - } - - Ok(()) - } -} - -impl From for CheckError { - fn from(err: CostErrors) -> Self { - CheckError::from(CheckErrors::from(err)) - } -} - -impl From for CheckErrors { - fn from(err: CostErrors) -> Self { - match err { - CostErrors::CostOverflow => CheckErrors::CostOverflow, - CostErrors::CostBalanceExceeded(a, b) => CheckErrors::CostBalanceExceeded(a, b), - CostErrors::MemoryBalanceExceeded(a, b) => CheckErrors::MemoryBalanceExceeded(a, b), - CostErrors::CostComputationFailed(s) => CheckErrors::CostComputationFailed(s), - CostErrors::CostContractLoadFailure => { - CheckErrors::CostComputationFailed("Failed to load cost contract".into()) - } - CostErrors::InterpreterFailure => { - CheckErrors::Expects("Unexpected interpreter failure in cost computation".into()) - } - CostErrors::Expect(s) => CheckErrors::Expects(s), - CostErrors::ExecutionTimeExpired => CheckErrors::ExecutionTimeExpired, - } - } -} - -impl error::Error for CheckError { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - None - } -} - -impl error::Error for CheckErrors { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - None - } -} - -impl From for CheckError { - fn from(err: CheckErrors) -> Self { - CheckError::new(err) - } -} - -pub fn check_argument_count(expected: usize, args: &[T]) -> Result<(), CheckErrors> { - if args.len() != expected { - Err(CheckErrors::IncorrectArgumentCount(expected, args.len())) - } else { - Ok(()) - } -} - -pub fn check_arguments_at_least(expected: usize, args: &[T]) -> Result<(), CheckErrors> { - if args.len() < expected { - Err(CheckErrors::RequiresAtLeastArguments(expected, args.len())) - } else { - Ok(()) - } -} - -pub fn check_arguments_at_most(expected: usize, args: &[T]) -> Result<(), CheckErrors> { - if args.len() > expected { - Err(CheckErrors::RequiresAtMostArguments(expected, args.len())) - } else { - Ok(()) - } -} - -fn formatted_expected_types(expected_types: &[TypeSignature]) -> String { - let mut expected_types_joined = format!("'{}'", expected_types[0]); - - if expected_types.len() > 2 { - for expected_type in expected_types[1..expected_types.len() - 1].iter() { - expected_types_joined.push_str(&format!(", '{expected_type}'")); - } - } - expected_types_joined.push_str(&format!( - " or '{}'", - expected_types[expected_types.len() - 1] - )); - expected_types_joined -} - -impl DiagnosableError for CheckErrors { - fn message(&self) -> String { - match &self { - CheckErrors::ExpectedLiteral => "expected a literal argument".into(), - CheckErrors::SupertypeTooLarge => "supertype of two types is too large".into(), - CheckErrors::Expects(s) => format!("unexpected interpreter behavior: {s}"), - CheckErrors::BadMatchOptionSyntax(source) => - format!("match on a optional type uses the following syntax: (match input some-name if-some-expression if-none-expression). Caused by: {}", - source.message()), - CheckErrors::BadMatchResponseSyntax(source) => - format!("match on a result type uses the following syntax: (match input ok-name if-ok-expression err-name if-err-expression). Caused by: {}", - source.message()), - CheckErrors::BadMatchInput(t) => - format!("match requires an input of either a response or optional, found input: '{t}'"), - CheckErrors::TypeAnnotationExpectedFailure => "analysis expected type to already be annotated for expression".into(), - CheckErrors::CostOverflow => "contract execution cost overflowed cost counter".into(), - CheckErrors::CostBalanceExceeded(a, b) => format!("contract execution cost exceeded budget: {a:?} > {b:?}"), - CheckErrors::MemoryBalanceExceeded(a, b) => format!("contract execution cost exceeded memory budget: {a:?} > {b:?}"), - CheckErrors::InvalidTypeDescription => "supplied type description is invalid".into(), - CheckErrors::EmptyTuplesNotAllowed => "tuple types may not be empty".into(), - CheckErrors::BadSyntaxExpectedListOfPairs => "bad syntax: function expects a list of pairs to bind names, e.g., ((name-0 a) (name-1 b) ...)".into(), - CheckErrors::UnknownTypeName(name) => format!("failed to parse type: '{name}'"), - CheckErrors::ValueTooLarge => "created a type which was greater than maximum allowed value size".into(), - CheckErrors::ValueOutOfBounds => "created a type which value size was out of defined bounds".into(), - CheckErrors::TypeSignatureTooDeep => "created a type which was deeper than maximum allowed type depth".into(), - CheckErrors::ExpectedName => "expected a name argument to this function".into(), - CheckErrors::NoSuperType(a, b) => format!("unable to create a supertype for the two types: '{a}' and '{b}'"), - CheckErrors::UnknownListConstructionFailure => "invalid syntax for list definition".into(), - CheckErrors::ListTypesMustMatch => "expecting elements of same type in a list".into(), - CheckErrors::ConstructedListTooLarge => "reached limit of elements in a sequence".into(), - CheckErrors::TypeError(expected_type, found_type) => format!("expecting expression of type '{expected_type}', found '{found_type}'"), - CheckErrors::TypeLiteralError(expected_type, found_type) => format!("expecting a literal of type '{expected_type}', found '{found_type}'"), - CheckErrors::TypeValueError(expected_type, found_value) => format!("expecting expression of type '{expected_type}', found '{found_value}'"), - CheckErrors::UnionTypeError(expected_types, found_type) => format!("expecting expression of type {}, found '{}'", formatted_expected_types(expected_types), found_type), - CheckErrors::UnionTypeValueError(expected_types, found_type) => format!("expecting expression of type {}, found '{}'", formatted_expected_types(expected_types), found_type), - CheckErrors::ExpectedOptionalType(found_type) => format!("expecting expression of type 'optional', found '{found_type}'"), - CheckErrors::ExpectedOptionalOrResponseType(found_type) => format!("expecting expression of type 'optional' or 'response', found '{found_type}'"), - CheckErrors::ExpectedOptionalOrResponseValue(found_type) => format!("expecting expression of type 'optional' or 'response', found '{found_type}'"), - CheckErrors::ExpectedResponseType(found_type) => format!("expecting expression of type 'response', found '{found_type}'"), - CheckErrors::ExpectedOptionalValue(found_type) => format!("expecting expression of type 'optional', found '{found_type}'"), - CheckErrors::ExpectedResponseValue(found_type) => format!("expecting expression of type 'response', found '{found_type}'"), - CheckErrors::CouldNotDetermineResponseOkType => "attempted to obtain 'ok' value from response, but 'ok' type is indeterminate".into(), - CheckErrors::CouldNotDetermineResponseErrType => "attempted to obtain 'err' value from response, but 'err' type is indeterminate".into(), - CheckErrors::CouldNotDetermineMatchTypes => "attempted to match on an (optional) or (response) type where either the some, ok, or err type is indeterminate. you may wish to use unwrap-panic or unwrap-err-panic instead.".into(), - CheckErrors::CouldNotDetermineType => "type of expression cannot be determined".into(), - CheckErrors::BadTupleFieldName => "invalid tuple field name".into(), - CheckErrors::ExpectedTuple(type_signature) => format!("expecting tuple, found '{type_signature}'"), - CheckErrors::NoSuchTupleField(field_name, tuple_signature) => format!("cannot find field '{field_name}' in tuple '{tuple_signature}'"), - CheckErrors::BadTupleConstruction => "invalid tuple syntax, expecting list of pair".into(), - CheckErrors::TupleExpectsPairs => "invalid tuple syntax, expecting pair".into(), - CheckErrors::NoSuchDataVariable(var_name) => format!("use of unresolved persisted variable '{var_name}'"), - CheckErrors::BadTransferSTXArguments => "STX transfer expects an int amount, from principal, to principal".into(), - CheckErrors::BadTransferFTArguments => "transfer expects an int amount, from principal, to principal".into(), - CheckErrors::BadTransferNFTArguments => "transfer expects an asset, from principal, to principal".into(), - CheckErrors::BadMintFTArguments => "mint expects a uint amount and from principal".into(), - CheckErrors::BadBurnFTArguments => "burn expects a uint amount and from principal".into(), - CheckErrors::BadMapName => "invalid map name".into(), - CheckErrors::NoSuchMap(map_name) => format!("use of unresolved map '{map_name}'"), - CheckErrors::DefineFunctionBadSignature => "invalid function definition".into(), - CheckErrors::BadFunctionName => "invalid function name".into(), - CheckErrors::BadMapTypeDefinition => "invalid map definition".into(), - CheckErrors::PublicFunctionMustReturnResponse(found_type) => format!("public functions must return an expression of type 'response', found '{found_type}'"), - CheckErrors::DefineVariableBadSignature => "invalid variable definition".into(), - CheckErrors::ReturnTypesMustMatch(type_1, type_2) => format!("detected two execution paths, returning two different expression types (got '{type_1}' and '{type_2}')"), - CheckErrors::NoSuchContract(contract_identifier) => format!("use of unresolved contract '{contract_identifier}'"), - CheckErrors::NoSuchPublicFunction(contract_identifier, function_name) => format!("contract '{contract_identifier}' has no public function '{function_name}'"), - CheckErrors::PublicFunctionNotReadOnly(contract_identifier, function_name) => format!("function '{contract_identifier}' in '{function_name}' is not read-only"), - CheckErrors::ContractAlreadyExists(contract_identifier) => format!("contract name '{contract_identifier}' conflicts with existing contract"), - CheckErrors::ContractCallExpectName => "missing contract name for call".into(), - CheckErrors::ExpectedCallableType(found_type) => format!("expected a callable contract, found {found_type}"), - CheckErrors::NoSuchBlockInfoProperty(property_name) => format!("use of block unknown property '{property_name}'"), - CheckErrors::NoSuchBurnBlockInfoProperty(property_name) => format!("use of burn block unknown property '{property_name}'"), - CheckErrors::NoSuchStacksBlockInfoProperty(property_name) => format!("use of unknown stacks block property '{property_name}'"), - CheckErrors::NoSuchTenureInfoProperty(property_name) => format!("use of unknown tenure property '{property_name}'"), - CheckErrors::GetBlockInfoExpectPropertyName => "missing property name for block info introspection".into(), - CheckErrors::GetBurnBlockInfoExpectPropertyName => "missing property name for burn block info introspection".into(), - CheckErrors::GetStacksBlockInfoExpectPropertyName => "missing property name for stacks block info introspection".into(), - CheckErrors::GetTenureInfoExpectPropertyName => "missing property name for tenure info introspection".into(), - CheckErrors::NameAlreadyUsed(name) => format!("defining '{name}' conflicts with previous value"), - CheckErrors::ReservedWord(name) => format!("{name} is a reserved word"), - CheckErrors::NonFunctionApplication => "expecting expression of type function".into(), - CheckErrors::ExpectedListApplication => "expecting expression of type list".into(), - CheckErrors::ExpectedSequence(found_type) => format!("expecting expression of type 'list', 'buff', 'string-ascii' or 'string-utf8' - found '{found_type}'"), - CheckErrors::MaxLengthOverflow => format!("expecting a value <= {}", u32::MAX), - CheckErrors::BadLetSyntax => "invalid syntax of 'let'".into(), - CheckErrors::CircularReference(references) => format!("detected circular reference: ({})", references.join(", ")), - CheckErrors::BadSyntaxBinding => "invalid syntax binding".into(), - CheckErrors::MaxContextDepthReached => "reached depth limit".into(), - CheckErrors::UndefinedVariable(var_name) => format!("use of unresolved variable '{var_name}'"), - CheckErrors::UndefinedFunction(var_name) => format!("use of unresolved function '{var_name}'"), - CheckErrors::RequiresAtLeastArguments(expected, found) => format!("expecting >= {expected} arguments, got {found}"), - CheckErrors::RequiresAtMostArguments(expected, found) => format!("expecting < {expected} arguments, got {found}"), - CheckErrors::IncorrectArgumentCount(expected_count, found_count) => format!("expecting {expected_count} arguments, got {found_count}"), - CheckErrors::IfArmsMustMatch(type_1, type_2) => format!("expression types returned by the arms of 'if' must match (got '{type_1}' and '{type_2}')"), - CheckErrors::MatchArmsMustMatch(type_1, type_2) => format!("expression types returned by the arms of 'match' must match (got '{type_1}' and '{type_2}')"), - CheckErrors::DefaultTypesMustMatch(type_1, type_2) => format!("expression types passed in 'default-to' must match (got '{type_1}' and '{type_2}')"), - CheckErrors::TooManyExpressions => "reached limit of expressions".into(), - CheckErrors::IllegalOrUnknownFunctionApplication(function_name) => format!("use of illegal / unresolved function '{function_name}"), - CheckErrors::UnknownFunction(function_name) => format!("use of unresolved function '{function_name}'"), - CheckErrors::TraitBasedContractCallInReadOnly => "use of trait based contract calls are not allowed in read-only context".into(), - CheckErrors::WriteAttemptedInReadOnly => "expecting read-only statements, detected a writing operation".into(), - CheckErrors::AtBlockClosureMustBeReadOnly => "(at-block ...) closures expect read-only statements, but detected a writing operation".into(), - CheckErrors::BadTokenName => "expecting an token name as an argument".into(), - CheckErrors::DefineFTBadSignature => "(define-token ...) expects a token name as an argument".into(), - CheckErrors::DefineNFTBadSignature => "(define-asset ...) expects an asset name and an asset identifier type signature as arguments".into(), - CheckErrors::NoSuchNFT(asset_name) => format!("tried to use asset function with a undefined asset ('{asset_name}')"), - CheckErrors::NoSuchFT(asset_name) => format!("tried to use token function with a undefined token ('{asset_name}')"), - CheckErrors::NoSuchTrait(contract_name, trait_name) => format!("use of unresolved trait {contract_name}.{trait_name}"), - CheckErrors::TraitReferenceUnknown(trait_name) => format!("use of undeclared trait <{trait_name}>"), - CheckErrors::TraitMethodUnknown(trait_name, func_name) => format!("method '{func_name}' unspecified in trait <{trait_name}>"), - CheckErrors::ImportTraitBadSignature => "(use-trait ...) expects a trait name and a trait identifier".into(), - CheckErrors::BadTraitImplementation(trait_name, func_name) => format!("invalid signature for method '{func_name}' regarding trait's specification <{trait_name}>"), - CheckErrors::ExpectedTraitIdentifier => "expecting expression of type trait identifier".into(), - CheckErrors::UnexpectedTraitOrFieldReference => "unexpected use of trait reference or field".into(), - CheckErrors::DefineTraitBadSignature => "invalid trait definition".into(), - CheckErrors::DefineTraitDuplicateMethod(method_name) => format!("duplicate method name '{method_name}' in trait definition"), - CheckErrors::TraitReferenceNotAllowed => "trait references can not be stored".into(), - CheckErrors::ContractOfExpectsTrait => "trait reference expected".into(), - CheckErrors::IncompatibleTrait(expected_trait, actual_trait) => format!("trait '{actual_trait}' is not a compatible with expected trait, '{expected_trait}'"), - CheckErrors::InvalidCharactersDetected => "invalid characters detected".into(), - CheckErrors::InvalidUTF8Encoding => "invalid UTF8 encoding".into(), - CheckErrors::InvalidSecp65k1Signature => "invalid seckp256k1 signature".into(), - CheckErrors::TypeAlreadyAnnotatedFailure | CheckErrors::CheckerImplementationFailure => { - "internal error - please file an issue on https://github.com/stacks-network/stacks-blockchain".into() - }, - CheckErrors::UncheckedIntermediaryResponses => "intermediary responses in consecutive statements must be checked".into(), - CheckErrors::CostComputationFailed(s) => format!("contract cost computation failed: {s}"), - CheckErrors::CouldNotDetermineSerializationType => "could not determine the input type for the serialization function".into(), - CheckErrors::ExecutionTimeExpired => "execution time expired".into(), - } - } - - fn suggestion(&self) -> Option { - match &self { - CheckErrors::BadSyntaxBinding => { - Some("binding syntax example: ((supply int) (ttl int))".into()) - } - CheckErrors::BadLetSyntax => Some( - "'let' syntax example: (let ((supply 1000) (ttl 60)) )".into(), - ), - CheckErrors::TraitReferenceUnknown(_) => Some( - "traits should be either defined, with define-trait, or imported, with use-trait." - .into(), - ), - CheckErrors::NoSuchBlockInfoProperty(_) => Some( - "properties available: time, header-hash, burnchain-header-hash, vrf-seed".into(), - ), - _ => None, - } - } -} +pub use clarity_serialization::errors::analysis::{ + check_argument_count, check_arguments_at_least, check_arguments_at_most, CheckError, + CheckErrors, CheckResult, +}; diff --git a/clarity/src/vm/analysis/read_only_checker/mod.rs b/clarity/src/vm/analysis/read_only_checker/mod.rs index a244bf7101..4c3e6d025d 100644 --- a/clarity/src/vm/analysis/read_only_checker/mod.rs +++ b/clarity/src/vm/analysis/read_only_checker/mod.rs @@ -14,7 +14,10 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use hashbrown::HashMap; +use std::collections::HashMap; + +use clarity_serialization::representations::ClarityName; +use clarity_serialization::types::{PrincipalData, Value}; use stacks_common::types::StacksEpochId; pub use super::errors::{ @@ -27,8 +30,7 @@ use crate::vm::functions::NativeFunctions; use crate::vm::representations::SymbolicExpressionType::{ Atom, AtomValue, Field, List, LiteralValue, TraitReference, }; -use crate::vm::representations::{ClarityName, SymbolicExpression, SymbolicExpressionType}; -use crate::vm::types::{PrincipalData, Value}; +use crate::vm::representations::{SymbolicExpression, SymbolicExpressionType}; use crate::vm::ClarityVersion; #[cfg(test)] diff --git a/clarity/src/vm/analysis/type_checker/contexts.rs b/clarity/src/vm/analysis/type_checker/contexts.rs index 4a67ab768e..7637f79a91 100644 --- a/clarity/src/vm/analysis/type_checker/contexts.rs +++ b/clarity/src/vm/analysis/type_checker/contexts.rs @@ -14,9 +14,8 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use std::collections::HashSet; +use std::collections::{HashMap, HashSet}; -use hashbrown::HashMap; use stacks_common::types::StacksEpochId; use crate::vm::analysis::errors::{CheckError, CheckErrors, CheckResult}; diff --git a/clarity/src/vm/analysis/type_checker/v2_05/contexts.rs b/clarity/src/vm/analysis/type_checker/v2_05/contexts.rs index f765878254..d59ff4c2a8 100644 --- a/clarity/src/vm/analysis/type_checker/v2_05/contexts.rs +++ b/clarity/src/vm/analysis/type_checker/v2_05/contexts.rs @@ -14,9 +14,7 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use std::collections::BTreeMap; - -use hashbrown::{HashMap, HashSet}; +use std::collections::{BTreeMap, HashMap, HashSet}; use crate::vm::analysis::errors::{CheckError, CheckErrors, CheckResult}; use crate::vm::analysis::types::ContractAnalysis; diff --git a/clarity/src/vm/analysis/type_checker/v2_05/mod.rs b/clarity/src/vm/analysis/type_checker/v2_05/mod.rs index 40bc742334..4a704abb66 100644 --- a/clarity/src/vm/analysis/type_checker/v2_05/mod.rs +++ b/clarity/src/vm/analysis/type_checker/v2_05/mod.rs @@ -41,7 +41,7 @@ use crate::vm::representations::SymbolicExpressionType::{ Atom, AtomValue, Field, List, LiteralValue, TraitReference, }; use crate::vm::representations::{depth_traverse, ClarityName, SymbolicExpression}; -use crate::vm::types::signatures::FunctionSignature; +use crate::vm::types::signatures::{FunctionSignature, TypeSignatureExt as _}; use crate::vm::types::{ parse_name_type_pairs, FixedFunction, FunctionArg, FunctionType, PrincipalData, QualifiedContractIdentifier, TypeSignature, Value, diff --git a/clarity/src/vm/analysis/type_checker/v2_05/natives/mod.rs b/clarity/src/vm/analysis/type_checker/v2_05/natives/mod.rs index c0dfef5350..9e01c50540 100644 --- a/clarity/src/vm/analysis/type_checker/v2_05/natives/mod.rs +++ b/clarity/src/vm/analysis/type_checker/v2_05/natives/mod.rs @@ -58,9 +58,8 @@ fn check_special_list_cons( type_arg.type_size()?, )?; } - TypeSignature::parent_list_type(&typed_args) - .map_err(|x| x.into()) - .map(TypeSignature::from) + let list_type = TypeSignature::parent_list_type(&typed_args).map_err(CheckError::from)?; + Ok(TypeSignature::from(list_type)) } fn check_special_print( diff --git a/clarity/src/vm/analysis/type_checker/v2_05/natives/options.rs b/clarity/src/vm/analysis/type_checker/v2_05/natives/options.rs index 55469262df..5644ece916 100644 --- a/clarity/src/vm/analysis/type_checker/v2_05/natives/options.rs +++ b/clarity/src/vm/analysis/type_checker/v2_05/natives/options.rs @@ -14,6 +14,8 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . +use clarity_serialization::representations::ClarityName; +use clarity_serialization::types::TypeSignature; use stacks_common::types::StacksEpochId; use crate::vm::analysis::type_checker::v2_05::{ @@ -22,8 +24,7 @@ use crate::vm::analysis::type_checker::v2_05::{ }; use crate::vm::costs::cost_functions::ClarityCostFunction; use crate::vm::costs::{analysis_typecheck_cost, runtime_cost}; -use crate::vm::representations::{ClarityName, SymbolicExpression}; -use crate::vm::types::TypeSignature; +use crate::vm::representations::SymbolicExpression; pub fn check_special_okay( checker: &mut TypeChecker, diff --git a/clarity/src/vm/analysis/type_checker/v2_05/tests/mod.rs b/clarity/src/vm/analysis/type_checker/v2_05/tests/mod.rs index 68c7bec3b6..47f3ed2389 100644 --- a/clarity/src/vm/analysis/type_checker/v2_05/tests/mod.rs +++ b/clarity/src/vm/analysis/type_checker/v2_05/tests/mod.rs @@ -25,7 +25,8 @@ use crate::vm::types::SequenceSubtype::*; use crate::vm::types::StringSubtype::*; use crate::vm::types::TypeSignature::{BoolType, IntType, PrincipalType, UIntType}; use crate::vm::types::{ - FixedFunction, FunctionType, QualifiedContractIdentifier, TypeSignature, BUFF_32, BUFF_64, + FixedFunction, FunctionType, QualifiedContractIdentifier, TypeSignature, TypeSignatureExt as _, + BUFF_32, BUFF_64, }; use crate::vm::ClarityVersion; @@ -1293,7 +1294,7 @@ fn test_high_order_map() { fn test_function_order_tuples() { let snippet = " (define-read-only (get-score) - (ok + (ok (tuple (score (get-zero)) ) diff --git a/clarity/src/vm/analysis/type_checker/v2_1/contexts.rs b/clarity/src/vm/analysis/type_checker/v2_1/contexts.rs index 8ac9ee8254..6621e9bc38 100644 --- a/clarity/src/vm/analysis/type_checker/v2_1/contexts.rs +++ b/clarity/src/vm/analysis/type_checker/v2_1/contexts.rs @@ -14,9 +14,7 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use std::collections::BTreeMap; - -use hashbrown::{HashMap, HashSet}; +use std::collections::{BTreeMap, HashMap, HashSet}; use crate::vm::analysis::errors::{CheckError, CheckErrors, CheckResult}; use crate::vm::analysis::type_checker::is_reserved_word; diff --git a/clarity/src/vm/analysis/type_checker/v2_1/mod.rs b/clarity/src/vm/analysis/type_checker/v2_1/mod.rs index 17ee17f615..21f3dfa98e 100644 --- a/clarity/src/vm/analysis/type_checker/v2_1/mod.rs +++ b/clarity/src/vm/analysis/type_checker/v2_1/mod.rs @@ -49,7 +49,7 @@ use crate::vm::types::{ parse_name_type_pairs, FixedFunction, FunctionArg, FunctionType, ListData, ListTypeData, OptionalData, PrincipalData, QualifiedContractIdentifier, ResponseData, SequenceData, SequenceSubtype, StringSubtype, TraitIdentifier, TupleData, TupleTypeSignature, TypeSignature, - Value, MAX_TYPE_DEPTH, + TypeSignatureExt as _, Value, MAX_TYPE_DEPTH, }; use crate::vm::variables::NativeVariables; use crate::vm::ClarityVersion; diff --git a/clarity/src/vm/analysis/type_checker/v2_1/natives/conversions.rs b/clarity/src/vm/analysis/type_checker/v2_1/natives/conversions.rs index 95fe6f9bf9..89866b5a91 100644 --- a/clarity/src/vm/analysis/type_checker/v2_1/natives/conversions.rs +++ b/clarity/src/vm/analysis/type_checker/v2_1/natives/conversions.rs @@ -4,7 +4,7 @@ use super::{TypeChecker, TypeResult}; use crate::vm::analysis::read_only_checker::check_argument_count; use crate::vm::analysis::type_checker::contexts::TypingContext; use crate::vm::analysis::CheckError; -use crate::vm::types::{BufferLength, SequenceSubtype, TypeSignature}; +use crate::vm::types::{BufferLength, SequenceSubtype, TypeSignature, TypeSignatureExt as _}; use crate::vm::SymbolicExpression; /// `to-consensus-buff?` admits exactly one argument: diff --git a/clarity/src/vm/analysis/type_checker/v2_1/natives/options.rs b/clarity/src/vm/analysis/type_checker/v2_1/natives/options.rs index 0e12f802d2..6b9469f970 100644 --- a/clarity/src/vm/analysis/type_checker/v2_1/natives/options.rs +++ b/clarity/src/vm/analysis/type_checker/v2_1/natives/options.rs @@ -14,6 +14,8 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . +use clarity_serialization::representations::ClarityName; +use clarity_serialization::types::TypeSignature; use stacks_common::types::StacksEpochId; use super::{ @@ -22,8 +24,7 @@ use super::{ use crate::vm::analysis::type_checker::contexts::TypingContext; use crate::vm::costs::cost_functions::ClarityCostFunction; use crate::vm::costs::{analysis_typecheck_cost, runtime_cost, CostErrors, CostTracker}; -use crate::vm::representations::{ClarityName, SymbolicExpression}; -use crate::vm::types::TypeSignature; +use crate::vm::representations::SymbolicExpression; pub fn check_special_okay( checker: &mut TypeChecker, diff --git a/clarity/src/vm/analysis/type_checker/v2_1/tests/contracts.rs b/clarity/src/vm/analysis/type_checker/v2_1/tests/contracts.rs index c4c6da4219..f4955c8ac1 100644 --- a/clarity/src/vm/analysis/type_checker/v2_1/tests/contracts.rs +++ b/clarity/src/vm/analysis/type_checker/v2_1/tests/contracts.rs @@ -111,8 +111,8 @@ const SIMPLE_TOKENS: &str = "(define-map tokens { account: principal } { balance const SIMPLE_NAMES: &str = "(define-constant burn-address 'SP000000000000000000002Q6VF78) (define-private (price-function (name uint)) (if (< name u100000) u1000 u100)) - - (define-map name-map + + (define-map name-map { name: uint } { owner: principal }) (define-map preorder-map { name-hash: (buff 20) } @@ -121,7 +121,7 @@ const SIMPLE_NAMES: &str = "(define-constant burn-address 'SP0000000000000000000 (define-private (check-balance) (contract-call? .tokens my-get-token-balance tx-sender)) - (define-public (preorder + (define-public (preorder (name-hash (buff 20)) (name-price uint)) (let ((xfer-result (contract-call? .tokens token-transfer @@ -145,13 +145,13 @@ const SIMPLE_NAMES: &str = "(define-constant burn-address 'SP0000000000000000000 ;; preorder entry must exist! (unwrap! (map-get? preorder-map (tuple (name-hash (hash160 (xor name salt))))) (err 2))) - (name-entry + (name-entry (map-get? name-map (tuple (name name))))) (if (and ;; name shouldn't *already* exist (is-none name-entry) ;; preorder must have paid enough - (<= (price-function name) + (<= (price-function name) (get paid preorder-entry)) ;; preorder must have been the current principal (is-eq tx-sender @@ -280,7 +280,7 @@ fn test_names_tokens_contracts_interface() { { "name": "tn1", "type": "bool" }, { "name": "tn2", "type": "int128" }, { "name": "tn3", "type": { "buffer": { "length": 1 } }} - ] } } + ] } } }, { "name": "f11", "access": "private", @@ -413,7 +413,7 @@ fn test_names_tokens_contracts_interface() { "name": "n2", "type": "bool" } - ] + ] } }] } @@ -1477,10 +1477,10 @@ fn test_trait_to_subtrait_and_back() { )) (define-private (foo-0 (impl-contract )) (foo-1 impl-contract)) - + (define-private (foo-1 (impl-contract )) (foo-2 impl-contract)) - + (define-private (foo-2 (impl-contract )) true)"; @@ -3484,13 +3484,6 @@ fn clarity_trait_experiments_double_trait_method2_v1_v2( }; } -#[cfg(test)] -impl From for String { - fn from(o: CheckErrors) -> Self { - o.to_string() - } -} - #[apply(test_clarity_versions)] fn clarity_trait_experiments_cross_epochs( #[case] version: ClarityVersion, diff --git a/clarity/src/vm/analysis/type_checker/v2_1/tests/mod.rs b/clarity/src/vm/analysis/type_checker/v2_1/tests/mod.rs index bb3e86a6ec..c3c54b56d6 100644 --- a/clarity/src/vm/analysis/type_checker/v2_1/tests/mod.rs +++ b/clarity/src/vm/analysis/type_checker/v2_1/tests/mod.rs @@ -35,7 +35,7 @@ use crate::vm::types::StringSubtype::*; use crate::vm::types::TypeSignature::{BoolType, IntType, PrincipalType, SequenceType, UIntType}; use crate::vm::types::{ BufferLength, FixedFunction, FunctionType, QualifiedContractIdentifier, TraitIdentifier, - TypeSignature, BUFF_1, BUFF_20, BUFF_21, BUFF_32, BUFF_64, + TypeSignature, TypeSignatureExt as _, BUFF_1, BUFF_20, BUFF_21, BUFF_32, BUFF_64, }; use crate::vm::{execute_v2, ClarityName, ClarityVersion}; @@ -1992,7 +1992,7 @@ fn test_high_order_map() { fn test_function_order_tuples() { let snippet = " (define-read-only (get-score) - (ok + (ok (tuple (score (get-zero)) ) diff --git a/clarity/src/vm/analysis/types.rs b/clarity/src/vm/analysis/types.rs index 5085f2bc46..4e7f2bcb95 100644 --- a/clarity/src/vm/analysis/types.rs +++ b/clarity/src/vm/analysis/types.rs @@ -16,6 +16,8 @@ use std::collections::{BTreeMap, BTreeSet}; +use clarity_serialization::representations::ClarityName; +use clarity_serialization::types::{QualifiedContractIdentifier, TraitIdentifier, TypeSignature}; use stacks_common::types::StacksEpochId; use crate::vm::analysis::analysis_db::AnalysisDatabase; @@ -24,8 +26,8 @@ use crate::vm::analysis::errors::{CheckErrors, CheckResult}; use crate::vm::analysis::type_checker::contexts::TypeMap; use crate::vm::costs::LimitedCostTracker; use crate::vm::types::signatures::FunctionSignature; -use crate::vm::types::{FunctionType, QualifiedContractIdentifier, TraitIdentifier, TypeSignature}; -use crate::vm::{ClarityName, ClarityVersion, SymbolicExpression}; +use crate::vm::types::FunctionType; +use crate::vm::{ClarityVersion, SymbolicExpression}; const DESERIALIZE_FAIL_MESSAGE: &str = "PANIC: Failed to deserialize bad database data in contract analysis."; diff --git a/clarity/src/vm/ast/definition_sorter/mod.rs b/clarity/src/vm/ast/definition_sorter/mod.rs index 2be40271e6..f883d055c6 100644 --- a/clarity/src/vm/ast/definition_sorter/mod.rs +++ b/clarity/src/vm/ast/definition_sorter/mod.rs @@ -14,7 +14,9 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use hashbrown::{HashMap, HashSet}; +use std::collections::{HashMap, HashSet}; + +use clarity_serialization::representations::ClarityName; use crate::vm::ast::errors::{ParseError, ParseErrors, ParseResult}; use crate::vm::ast::types::ContractAST; @@ -22,11 +24,11 @@ use crate::vm::costs::cost_functions::ClarityCostFunction; use crate::vm::costs::{runtime_cost, CostTracker}; use crate::vm::functions::define::DefineFunctions; use crate::vm::functions::NativeFunctions; +use crate::vm::representations::PreSymbolicExpression; use crate::vm::representations::PreSymbolicExpressionType::{ Atom, AtomValue, Comment, FieldIdentifier, List, Placeholder, SugaredContractIdentifier, SugaredFieldIdentifier, TraitReference, Tuple, }; -use crate::vm::representations::{ClarityName, PreSymbolicExpression}; use crate::vm::ClarityVersion; #[cfg(test)] diff --git a/clarity/src/vm/ast/errors.rs b/clarity/src/vm/ast/errors.rs index aa60d75f5f..6d734c475d 100644 --- a/clarity/src/vm/ast/errors.rs +++ b/clarity/src/vm/ast/errors.rs @@ -14,307 +14,4 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use std::{error, fmt}; - -use crate::vm::ast::parser::v2::lexer::error::LexerError; -use crate::vm::ast::parser::v2::lexer::token::Token; -use crate::vm::costs::{CostErrors, ExecutionCost}; -use crate::vm::diagnostic::{DiagnosableError, Diagnostic, Level}; -use crate::vm::representations::{PreSymbolicExpression, Span}; -use crate::vm::MAX_CALL_STACK_DEPTH; - -pub type ParseResult = Result; - -#[derive(Debug, PartialEq)] -pub enum ParseErrors { - CostOverflow, - CostBalanceExceeded(ExecutionCost, ExecutionCost), - MemoryBalanceExceeded(u64, u64), - TooManyExpressions, - ExpressionStackDepthTooDeep, - VaryExpressionStackDepthTooDeep, - FailedCapturingInput, - SeparatorExpected(String), - SeparatorExpectedAfterColon(String), - ProgramTooLarge, - IllegalVariableName(String), - IllegalContractName(String), - UnknownQuotedValue(String), - FailedParsingIntValue(String), - FailedParsingUIntValue(String), - FailedParsingBuffer(String), - FailedParsingHexValue(String, String), - FailedParsingPrincipal(String), - FailedParsingField(String), - FailedParsingRemainder(String), - ClosingParenthesisUnexpected, - ClosingParenthesisExpected, - ClosingTupleLiteralUnexpected, - ClosingTupleLiteralExpected, - CircularReference(Vec), - TupleColonExpected(usize), - TupleCommaExpected(usize), - TupleItemExpected(usize), - NameAlreadyUsed(String), - TraitReferenceNotAllowed, - ImportTraitBadSignature, - DefineTraitBadSignature, - ImplTraitBadSignature, - TraitReferenceUnknown(String), - CommaSeparatorUnexpected, - ColonSeparatorUnexpected, - InvalidCharactersDetected, - InvalidEscaping, - CostComputationFailed(String), - - // V2 Errors - Lexer(LexerError), - ContractNameTooLong(String), - ExpectedContractIdentifier, - ExpectedTraitIdentifier, - IllegalTraitName(String), - InvalidPrincipalLiteral, - InvalidBuffer, - NameTooLong(String), - UnexpectedToken(Token), - ExpectedClosing(Token), - TupleColonExpectedv2, - TupleCommaExpectedv2, - TupleValueExpected, - IllegalClarityName(String), - IllegalASCIIString(String), - IllegalUtf8String(String), - ExpectedWhitespace, - // Notes - NoteToMatchThis(Token), - - /// Should be an unreachable error - UnexpectedParserFailure, - /// Should be an unreachable failure which invalidates the transaction - InterpreterFailure, - - ExecutionTimeExpired, -} - -#[derive(Debug, PartialEq)] -pub struct ParseError { - pub err: ParseErrors, - pub pre_expressions: Option>, - pub diagnostic: Diagnostic, -} - -impl ParseError { - pub fn new(err: ParseErrors) -> ParseError { - let diagnostic = Diagnostic::err(&err); - ParseError { - err, - pre_expressions: None, - diagnostic, - } - } - - pub fn rejectable(&self) -> bool { - matches!(self.err, ParseErrors::InterpreterFailure) - } - - pub fn has_pre_expression(&self) -> bool { - self.pre_expressions.is_some() - } - - pub fn set_pre_expression(&mut self, expr: &PreSymbolicExpression) { - self.diagnostic.spans = vec![expr.span().clone()]; - self.pre_expressions.replace(vec![expr.clone()]); - } - - pub fn set_pre_expressions(&mut self, exprs: Vec) { - self.diagnostic.spans = exprs.iter().map(|e| e.span().clone()).collect(); - self.pre_expressions.replace(exprs.to_vec()); - } -} - -impl fmt::Display for ParseError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{:?}", self.err)?; - - if let Some(ref e) = self.pre_expressions { - write!(f, "\nNear:\n{e:?}")?; - } - - Ok(()) - } -} - -impl error::Error for ParseError { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - None - } -} - -impl From for ParseError { - fn from(err: ParseErrors) -> Self { - ParseError::new(err) - } -} - -impl From for ParseError { - fn from(err: CostErrors) -> Self { - match err { - CostErrors::CostOverflow => ParseError::new(ParseErrors::CostOverflow), - CostErrors::CostBalanceExceeded(a, b) => { - ParseError::new(ParseErrors::CostBalanceExceeded(a, b)) - } - CostErrors::MemoryBalanceExceeded(a, b) => { - ParseError::new(ParseErrors::MemoryBalanceExceeded(a, b)) - } - CostErrors::CostComputationFailed(s) => { - ParseError::new(ParseErrors::CostComputationFailed(s)) - } - CostErrors::CostContractLoadFailure => ParseError::new( - ParseErrors::CostComputationFailed("Failed to load cost contract".into()), - ), - CostErrors::InterpreterFailure | CostErrors::Expect(_) => { - ParseError::new(ParseErrors::InterpreterFailure) - } - CostErrors::ExecutionTimeExpired => ParseError::new(ParseErrors::ExecutionTimeExpired), - } - } -} - -impl DiagnosableError for ParseErrors { - fn message(&self) -> String { - match &self { - ParseErrors::CostOverflow => "Used up cost budget during the parse".into(), - ParseErrors::CostBalanceExceeded(bal, used) => format!( - "Used up cost budget during the parse: {bal} balance, {used} used" - ), - ParseErrors::MemoryBalanceExceeded(bal, used) => format!( - "Used up memory budget during the parse: {bal} balance, {used} used" - ), - ParseErrors::TooManyExpressions => "Too many expressions".into(), - ParseErrors::FailedCapturingInput => "Failed to capture value from input".into(), - ParseErrors::SeparatorExpected(found) => { - format!("Expected whitespace or a close parens. Found: '{found}'") - } - ParseErrors::SeparatorExpectedAfterColon(found) => { - format!("Whitespace expected after colon (:), Found: '{found}'") - } - ParseErrors::ProgramTooLarge => "Program too large to parse".into(), - ParseErrors::IllegalContractName(contract_name) => { - format!("Illegal contract name: '{contract_name}'") - } - ParseErrors::IllegalVariableName(var_name) => { - format!("Illegal variable name: '{var_name}'") - } - ParseErrors::UnknownQuotedValue(value) => format!("Unknown 'quoted value '{value}'"), - ParseErrors::FailedParsingIntValue(value) => { - format!("Failed to parse int literal '{value}'") - } - ParseErrors::FailedParsingUIntValue(value) => { - format!("Failed to parse uint literal 'u{value}'") - } - ParseErrors::FailedParsingHexValue(value, x) => { - format!("Invalid hex-string literal {value}: {x}") - } - ParseErrors::FailedParsingPrincipal(value) => { - format!("Invalid principal literal: {value}") - } - ParseErrors::FailedParsingBuffer(value) => format!("Invalid buffer literal: {value}"), - ParseErrors::FailedParsingField(value) => format!("Invalid field literal: {value}"), - ParseErrors::FailedParsingRemainder(remainder) => { - format!("Failed to lex input remainder: '{remainder}'") - } - ParseErrors::ClosingParenthesisUnexpected => { - "Tried to close list which isn't open.".into() - } - ParseErrors::ClosingParenthesisExpected => "List expressions (..) left opened.".into(), - ParseErrors::ClosingTupleLiteralUnexpected => { - "Tried to close tuple literal which isn't open.".into() - } - ParseErrors::ClosingTupleLiteralExpected => "Tuple literal {{..}} left opened.".into(), - ParseErrors::ColonSeparatorUnexpected => "Misplaced colon.".into(), - ParseErrors::CommaSeparatorUnexpected => "Misplaced comma.".into(), - ParseErrors::TupleColonExpected(i) => { - format!("Tuple literal construction expects a colon at index {i}") - } - ParseErrors::TupleCommaExpected(i) => { - format!("Tuple literal construction expects a comma at index {i}") - } - ParseErrors::TupleItemExpected(i) => format!( - "Tuple literal construction expects a key or value at index {i}" - ), - ParseErrors::CircularReference(function_names) => format!( - "detected interdependent functions ({})", - function_names.join(", ") - ), - ParseErrors::NameAlreadyUsed(name) => { - format!("defining '{name}' conflicts with previous value") - } - ParseErrors::ImportTraitBadSignature => { - "(use-trait ...) expects a trait name and a trait identifier".into() - } - ParseErrors::DefineTraitBadSignature => { - "(define-trait ...) expects a trait name and a trait definition".into() - } - ParseErrors::ImplTraitBadSignature => { - "(impl-trait ...) expects a trait identifier".into() - } - ParseErrors::TraitReferenceNotAllowed => "trait references can not be stored".into(), - ParseErrors::TraitReferenceUnknown(trait_name) => { - format!("use of undeclared trait <{trait_name}>") - } - ParseErrors::ExpressionStackDepthTooDeep => format!( - "AST has too deep of an expression nesting. The maximum stack depth is {MAX_CALL_STACK_DEPTH}" - ), - ParseErrors::VaryExpressionStackDepthTooDeep => format!( - "AST has too deep of an expression nesting. The maximum stack depth is {MAX_CALL_STACK_DEPTH}" - ), - ParseErrors::InvalidCharactersDetected => "invalid characters detected".into(), - ParseErrors::InvalidEscaping => "invalid escaping detected in string".into(), - ParseErrors::CostComputationFailed(s) => format!("Cost computation failed: {s}"), - - // Parser v2 errors - ParseErrors::Lexer(le) => le.message(), - ParseErrors::ContractNameTooLong(name) => { - format!("contract name '{name}' is too long") - } - ParseErrors::ExpectedContractIdentifier => "expected contract identifier".into(), - ParseErrors::ExpectedTraitIdentifier => "expected trait identifier".into(), - ParseErrors::IllegalTraitName(name) => format!("illegal trait name, '{name}'"), - ParseErrors::InvalidPrincipalLiteral => "invalid principal literal".into(), - ParseErrors::InvalidBuffer => "invalid hex-string literal".into(), - ParseErrors::NameTooLong(name) => format!("illegal name (too long), '{name}'"), - ParseErrors::UnexpectedToken(token) => format!("unexpected '{token}'"), - ParseErrors::ExpectedClosing(token) => format!("expected closing '{token}'"), - ParseErrors::TupleColonExpectedv2 => "expected ':' after key in tuple".into(), - ParseErrors::TupleCommaExpectedv2 => { - "expected ',' separating key-value pairs in tuple".into() - } - ParseErrors::TupleValueExpected => "expected value expression for tuple".into(), - ParseErrors::IllegalClarityName(name) => format!("illegal clarity name, '{name}'"), - ParseErrors::IllegalASCIIString(s) => format!("illegal ascii string \"{s}\""), - ParseErrors::IllegalUtf8String(s) => format!("illegal UTF8 string \"{s}\""), - ParseErrors::ExpectedWhitespace => "expected whitespace before expression".into(), - ParseErrors::NoteToMatchThis(token) => format!("to match this '{token}'"), - ParseErrors::UnexpectedParserFailure => "unexpected failure while parsing".to_string(), - ParseErrors::InterpreterFailure => "unexpected failure while parsing".to_string(), - ParseErrors::ExecutionTimeExpired => "max execution time expired".to_string(), - } - } - - fn suggestion(&self) -> Option { - None - } - - fn level(&self) -> crate::vm::diagnostic::Level { - match self { - ParseErrors::NoteToMatchThis(_) => Level::Note, - ParseErrors::Lexer(lexerError) => lexerError.level(), - _ => Level::Error, - } - } -} - -pub struct PlacedError { - pub e: ParseErrors, - pub span: Span, -} +pub use clarity_serialization::errors::ast::{ParseError, ParseErrors, ParseResult, PlacedError}; diff --git a/clarity/src/vm/ast/mod.rs b/clarity/src/vm/ast/mod.rs index 50b2788147..d269403d77 100644 --- a/clarity/src/vm/ast/mod.rs +++ b/clarity/src/vm/ast/mod.rs @@ -318,7 +318,8 @@ pub fn build_ast( #[cfg(test)] mod test { - use hashbrown::HashMap; + use std::collections::HashMap; + use stacks_common::types::StacksEpochId; use crate::vm::ast::errors::ParseErrors; diff --git a/clarity/src/vm/ast/parser/v2/lexer/error.rs b/clarity/src/vm/ast/parser/v2/lexer/error.rs index a933f34715..11bfb0e4d0 100644 --- a/clarity/src/vm/ast/parser/v2/lexer/error.rs +++ b/clarity/src/vm/ast/parser/v2/lexer/error.rs @@ -1,79 +1,16 @@ -use crate::vm::diagnostic::{DiagnosableError, Level}; -use crate::vm::representations::Span; +// Copyright (C) 2025 Stacks Open Internet Foundation +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . -#[derive(Debug, PartialEq, Clone)] -pub enum LexerError { - InvalidCharInt(char), - InvalidCharUint(char), - InvalidCharBuffer(char), - InvalidCharIdent(char), - InvalidCharTraitIdent(char), - InvalidCharPrincipal(char), - InvalidBufferLength(usize), - UnknownEscapeChar(char), - IllegalCharString(char), - IllegalCharUTF8Encoding(char), - UnterminatedUTF8Encoding, - ExpectedClosing(char), - ExpectedSeparator, - EmptyUTF8Encoding, - InvalidUTF8Encoding, - SingleSemiColon, - UnknownSymbol(char), - NonASCIIChar(char), - NoteToMatchThis(char), - UnsupportedLineEnding, - EditorCRLFMode, -} - -#[derive(Debug)] -pub struct PlacedError { - pub e: LexerError, - pub span: Span, -} - -impl DiagnosableError for LexerError { - fn message(&self) -> String { - use self::LexerError::*; - match self { - InvalidCharInt(c) => format!("invalid character, '{c}', in int literal"), - InvalidCharUint(c) => format!("invalid character, '{c}', in uint literal"), - InvalidCharBuffer(c) => format!("invalid character, '{c}', in buffer"), - InvalidCharIdent(c) => format!("invalid character, '{c}', in identifier"), - InvalidCharTraitIdent(c) => format!("invalid character, '{c}', in trait identifier"), - InvalidCharPrincipal(c) => format!("invalid character, '{c}', in principal literal"), - IllegalCharString(c) => format!("invalid character, '{c}', in string literal"), - IllegalCharUTF8Encoding(c) => format!("invalid character, '{c}', in UTF8 encoding"), - InvalidUTF8Encoding => "invalid UTF8 encoding".to_string(), - EmptyUTF8Encoding => "empty UTF8 encoding".to_string(), - UnterminatedUTF8Encoding => "unterminated UTF8 encoding, missing '}'".to_string(), - InvalidBufferLength(size) => format!("invalid buffer length, {size}"), - UnknownEscapeChar(c) => format!("unknown escape character, '{c}'"), - ExpectedClosing(c) => format!("expected closing '{c}'"), - ExpectedSeparator => "expected separator".to_string(), - SingleSemiColon => "unexpected single ';' (comments begin with \";;\"".to_string(), - UnknownSymbol(c) => format!("unknown symbol, '{c}'"), - NonASCIIChar(c) => format!("illegal non-ASCII character, '{c}'"), - NoteToMatchThis(c) => format!("to match this '{c}'"), - UnsupportedLineEnding => { - "unsupported line-ending '\\r', only '\\n' is supported".to_string() - } - EditorCRLFMode => { - "you may need to change your editor from CRLF mode to LF mode".to_string() - } - } - } - - fn suggestion(&self) -> Option { - None - } - - fn level(&self) -> crate::vm::diagnostic::Level { - use self::LexerError::*; - match self { - NoteToMatchThis(_) => Level::Note, - EditorCRLFMode => Level::Note, - _ => Level::Error, - } - } -} +pub use clarity_serialization::errors::lexer::{LexerError, PlacedError}; diff --git a/clarity/src/vm/ast/parser/v2/lexer/token.rs b/clarity/src/vm/ast/parser/v2/lexer/token.rs index 2c1ccd401f..ce4768798f 100644 --- a/clarity/src/vm/ast/parser/v2/lexer/token.rs +++ b/clarity/src/vm/ast/parser/v2/lexer/token.rs @@ -1,110 +1 @@ -use std::fmt::Display; - -use crate::vm::representations::Span; - -#[derive(Debug, PartialEq, Clone)] -pub enum Token { - Eof, - Whitespace, - Lparen, - Rparen, - Lbrace, - Rbrace, - Colon, - Comma, - Dot, - Int(String), - Uint(String), - AsciiString(String), - Utf8String(String), - Bytes(String), - Principal(String), - Ident(String), - TraitIdent(String), - Plus, - Minus, - Multiply, - Divide, - Less, - LessEqual, - Greater, - GreaterEqual, - Comment(String), - Placeholder(String), // used to continue parsing after errors -} - -#[derive(Clone, Debug)] -pub struct PlacedToken { - pub span: Span, - pub token: Token, -} - -impl Display for Token { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - use self::Token::*; - match self { - Eof => write!(f, "EOF"), - Whitespace => write!(f, "whitespace"), - Lparen => write!(f, "("), - Rparen => write!(f, ")"), - Lbrace => write!(f, "{{"), - Rbrace => write!(f, "}}"), - Colon => write!(f, ":"), - Comma => write!(f, ","), - Dot => write!(f, "."), - Int(_) => write!(f, "int"), - Uint(_) => write!(f, "uint"), - AsciiString(_) => write!(f, "string-ascii"), - Utf8String(_) => write!(f, "string-utf8"), - Bytes(_) => write!(f, "bytes"), - Principal(_) => write!(f, "principal"), - Ident(_) => write!(f, "identifier"), - TraitIdent(_) => write!(f, "trait-identifier"), - Plus => write!(f, "+"), - Minus => write!(f, "-"), - Multiply => write!(f, "*"), - Divide => write!(f, "/"), - Less => write!(f, "<"), - LessEqual => write!(f, "<="), - Greater => write!(f, ">"), - GreaterEqual => write!(f, ">="), - Comment(_) => write!(f, "comment"), - Placeholder(_) => write!(f, "placeholder"), - } - } -} - -impl Token { - pub fn reproduce(&self) -> String { - use self::Token::*; - match self { - Eof => "".to_string(), - Whitespace => " ".to_string(), - Lparen => "(".to_string(), - Rparen => ")".to_string(), - Lbrace => "{{".to_string(), - Rbrace => "}}".to_string(), - Colon => ":".to_string(), - Comma => ",".to_string(), - Dot => ".".to_string(), - Int(s) => s.to_string(), - Uint(s) => format!("u{s}"), - AsciiString(s) => format!("\"{s}\""), - Utf8String(s) => s.to_string(), - Bytes(s) => format!("0x{s}"), - Principal(s) => format!("'{s}"), - Ident(s) => s.to_string(), - TraitIdent(s) => format!("<{s}>"), - Plus => "+".to_string(), - Minus => "-".to_string(), - Multiply => "*".to_string(), - Divide => "/".to_string(), - Less => "<".to_string(), - LessEqual => "<=".to_string(), - Greater => ">".to_string(), - GreaterEqual => ">=".to_string(), - Comment(c) => format!(";; {c}"), - Placeholder(s) => s.to_string(), - } - } -} +pub use clarity_serialization::token::{PlacedToken, Token}; diff --git a/clarity/src/vm/ast/parser/v2/mod.rs b/clarity/src/vm/ast/parser/v2/mod.rs index ad0caf29d4..03a62f9560 100644 --- a/clarity/src/vm/ast/parser/v2/mod.rs +++ b/clarity/src/vm/ast/parser/v2/mod.rs @@ -1,5 +1,10 @@ pub mod lexer; +use clarity_serialization::representations::{ClarityName, ContractName}; +use clarity_serialization::types::{ + CharType, PrincipalData, QualifiedContractIdentifier, SequenceData, TraitIdentifier, UTF8Data, + Value, +}; use stacks_common::util::hash::hex_bytes; use self::lexer::token::{PlacedToken, Token}; @@ -7,11 +12,7 @@ use self::lexer::Lexer; use crate::vm::ast::errors::{ParseError, ParseErrors, ParseResult, PlacedError}; use crate::vm::ast::stack_depth_checker::AST_CALL_STACK_DEPTH_BUFFER; use crate::vm::diagnostic::{DiagnosableError, Diagnostic, Level}; -use crate::vm::representations::{ClarityName, ContractName, PreSymbolicExpression, Span}; -use crate::vm::types::{ - CharType, PrincipalData, QualifiedContractIdentifier, SequenceData, TraitIdentifier, UTF8Data, - Value, -}; +use crate::vm::representations::{PreSymbolicExpression, Span}; use crate::vm::MAX_CALL_STACK_DEPTH; pub struct Parser<'a> { diff --git a/clarity/src/vm/ast/sugar_expander/mod.rs b/clarity/src/vm/ast/sugar_expander/mod.rs index f844f5ec39..6f9cac5bd5 100644 --- a/clarity/src/vm/ast/sugar_expander/mod.rs +++ b/clarity/src/vm/ast/sugar_expander/mod.rs @@ -14,14 +14,16 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use hashbrown::{HashMap, HashSet}; +use std::collections::{HashMap, HashSet}; -use crate::vm::ast::errors::{ParseErrors, ParseResult}; -use crate::vm::ast::types::{BuildASTPass, ContractAST, PreExpressionsDrain}; -use crate::vm::representations::{ClarityName, PreSymbolicExpressionType, SymbolicExpression}; -use crate::vm::types::{ +use clarity_serialization::representations::ClarityName; +use clarity_serialization::types::{ PrincipalData, QualifiedContractIdentifier, StandardPrincipalData, TraitIdentifier, Value, }; + +use crate::vm::ast::errors::{ParseErrors, ParseResult}; +use crate::vm::ast::types::{BuildASTPass, ContractAST, PreExpressionsDrain}; +use crate::vm::representations::{PreSymbolicExpressionType, SymbolicExpression}; use crate::vm::ClarityVersion; pub struct SugarExpander { diff --git a/clarity/src/vm/ast/traits_resolver/mod.rs b/clarity/src/vm/ast/traits_resolver/mod.rs index d84e8cb673..27e451b752 100644 --- a/clarity/src/vm/ast/traits_resolver/mod.rs +++ b/clarity/src/vm/ast/traits_resolver/mod.rs @@ -14,7 +14,10 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use hashbrown::HashMap; +use std::collections::HashMap; + +use clarity_serialization::representations::ClarityName; +use clarity_serialization::types::{QualifiedContractIdentifier, TraitIdentifier}; use crate::vm::ast::errors::{ParseError, ParseErrors, ParseResult}; use crate::vm::ast::types::{BuildASTPass, ContractAST}; @@ -22,8 +25,7 @@ use crate::vm::functions::define::DefineFunctions; use crate::vm::representations::PreSymbolicExpressionType::{ Atom, FieldIdentifier, List, SugaredFieldIdentifier, TraitReference, Tuple, }; -use crate::vm::representations::{ClarityName, PreSymbolicExpression, TraitDefinition}; -use crate::vm::types::{QualifiedContractIdentifier, TraitIdentifier}; +use crate::vm::representations::{PreSymbolicExpression, TraitDefinition}; use crate::vm::ClarityVersion; pub struct TraitsResolver {} diff --git a/clarity/src/vm/ast/types.rs b/clarity/src/vm/ast/types.rs index d969ed855f..a4406a3c79 100644 --- a/clarity/src/vm/ast/types.rs +++ b/clarity/src/vm/ast/types.rs @@ -14,10 +14,9 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . +use std::collections::{HashMap, HashSet}; use std::vec::Drain; -use hashbrown::{HashMap, HashSet}; - use crate::vm::ast::errors::ParseResult; use crate::vm::representations::{PreSymbolicExpression, SymbolicExpression, TraitDefinition}; use crate::vm::types::{QualifiedContractIdentifier, TraitIdentifier}; diff --git a/clarity/src/vm/callables.rs b/clarity/src/vm/callables.rs index ccf7b9af28..e7f9aa6a0e 100644 --- a/clarity/src/vm/callables.rs +++ b/clarity/src/vm/callables.rs @@ -15,8 +15,9 @@ // along with this program. If not, see . use std::collections::BTreeMap; -use std::fmt; +use clarity_serialization::representations::ClarityName; +pub use clarity_serialization::types::FunctionIdentifier; use stacks_common::types::StacksEpochId; use super::costs::{CostErrors, CostOverflowingMath}; @@ -28,7 +29,7 @@ use crate::vm::contexts::ContractContext; use crate::vm::costs::cost_functions::ClarityCostFunction; use crate::vm::costs::runtime_cost; use crate::vm::errors::{check_argument_count, Error, InterpreterResult as Result}; -use crate::vm::representations::{ClarityName, SymbolicExpression}; +use crate::vm::representations::SymbolicExpression; use crate::vm::types::{ CallableData, ListData, ListTypeData, OptionalData, PrincipalData, ResponseData, SequenceData, SequenceSubtype, TraitIdentifier, TupleData, TypeSignature, @@ -118,17 +119,6 @@ pub fn cost_input_sized_vararg(args: &[Value]) -> Result { .map_err(Error::from) } -#[derive(Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize)] -pub struct FunctionIdentifier { - identifier: String, -} - -impl fmt::Display for FunctionIdentifier { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}", self.identifier) - } -} - impl DefinedFunction { pub fn new( arguments: Vec<(ClarityName, TypeSignature)>, @@ -393,18 +383,6 @@ impl CallableType { } } -impl FunctionIdentifier { - fn new_native_function(name: &str) -> FunctionIdentifier { - let identifier = format!("_native_:{name}"); - FunctionIdentifier { identifier } - } - - fn new_user_function(name: &str, context: &str) -> FunctionIdentifier { - let identifier = format!("{context}:{name}"); - FunctionIdentifier { identifier } - } -} - // Implicitly cast principals to traits and traits to other traits as needed, // recursing into compound types. This function does not check for legality of // these casts, as that is done in the type-checker. Note: depth of recursion diff --git a/clarity/src/vm/contexts.rs b/clarity/src/vm/contexts.rs index 1e167826ac..b316fcb187 100644 --- a/clarity/src/vm/contexts.rs +++ b/clarity/src/vm/contexts.rs @@ -14,12 +14,13 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use std::collections::BTreeMap; +use std::collections::{BTreeMap, HashMap, HashSet}; use std::fmt; use std::mem::replace; use std::time::{Duration, Instant}; -use hashbrown::{HashMap, HashSet}; +pub use clarity_serialization::errors::StackTrace; +use clarity_serialization::representations::ClarityName; use serde::Serialize; use serde_json::json; use stacks_common::types::chainstate::StacksBlockId; @@ -39,7 +40,7 @@ use crate::vm::errors::{ CheckErrors, InterpreterError, InterpreterResult as Result, RuntimeErrorType, }; use crate::vm::events::*; -use crate::vm::representations::{ClarityName, SymbolicExpression}; +use crate::vm::representations::SymbolicExpression; use crate::vm::types::signatures::FunctionSignature; use crate::vm::types::{ AssetIdentifier, BuffData, CallableData, PrincipalData, QualifiedContractIdentifier, @@ -248,8 +249,6 @@ pub struct CallStack { apply_depth: usize, } -pub type StackTrace = Vec; - pub const TRANSIENT_CONTRACT_NAME: &str = "__transient"; impl Default for AssetMap { diff --git a/clarity/src/vm/costs/mod.rs b/clarity/src/vm/costs/mod.rs index 6989690355..54fc089399 100644 --- a/clarity/src/vm/costs/mod.rs +++ b/clarity/src/vm/costs/mod.rs @@ -14,13 +14,15 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . +use std::collections::HashMap; use std::{cmp, fmt}; +pub use clarity_serialization::errors::CostErrors; +pub use clarity_serialization::execution_cost::ExecutionCost; use costs_1::Costs1; use costs_2::Costs2; use costs_2_testnet::Costs2Testnet; use costs_3::Costs3; -use hashbrown::HashMap; use lazy_static::lazy_static; use serde::{Deserialize, Serialize}; use stacks_common::types::StacksEpochId; @@ -39,7 +41,6 @@ use crate::vm::types::{ FunctionType, PrincipalData, QualifiedContractIdentifier, TupleData, TypeSignature, }; use crate::vm::{CallStack, ClarityName, Environment, LocalContext, SymbolicExpression, Value}; - pub mod constants; pub mod cost_functions; #[allow(unused_variables)] @@ -405,43 +406,6 @@ impl PartialEq for LimitedCostTracker { } } -#[derive(Debug, PartialEq, Eq)] -pub enum CostErrors { - CostComputationFailed(String), - CostOverflow, - CostBalanceExceeded(ExecutionCost, ExecutionCost), - MemoryBalanceExceeded(u64, u64), - CostContractLoadFailure, - InterpreterFailure, - Expect(String), - ExecutionTimeExpired, -} - -impl fmt::Display for CostErrors { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - CostErrors::CostComputationFailed(ref s) => write!(f, "Cost computation failed: {s}"), - CostErrors::CostOverflow => write!(f, "Cost overflow"), - CostErrors::CostBalanceExceeded(ref total, ref limit) => { - write!(f, "Cost balance exceeded: total {total}, limit {limit}") - } - CostErrors::MemoryBalanceExceeded(ref used, ref limit) => { - write!(f, "Memory balance exceeded: used {used}, limit {limit}") - } - CostErrors::CostContractLoadFailure => write!(f, "Failed to load cost contract"), - CostErrors::InterpreterFailure => write!(f, "Interpreter failure"), - CostErrors::Expect(ref s) => write!(f, "Expectation failed: {s}"), - CostErrors::ExecutionTimeExpired => write!(f, "Execution time expired"), - } - } -} - -impl CostErrors { - fn rejectable(&self) -> bool { - matches!(self, CostErrors::InterpreterFailure | CostErrors::Expect(_)) - } -} - fn load_state_summary(mainnet: bool, clarity_db: &mut ClarityDatabase) -> Result { let cost_voting_contract = boot_code_id("cost-voting", mainnet); @@ -1280,22 +1244,6 @@ impl CostTracker for &mut LimitedCostTracker { } } -#[derive(Debug, Deserialize, Serialize, Clone, PartialEq, Eq, Hash)] -pub struct ExecutionCost { - pub write_length: u64, - pub write_count: u64, - pub read_length: u64, - pub read_count: u64, - pub runtime: u64, -} - -impl fmt::Display for ExecutionCost { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{{\"runtime\": {}, \"write_len\": {}, \"write_cnt\": {}, \"read_len\": {}, \"read_cnt\": {}}}", - self.runtime, self.write_length, self.write_count, self.read_length, self.read_count) - } -} - pub trait CostOverflowingMath { fn cost_overflow_mul(self, other: T) -> Result; fn cost_overflow_add(self, other: T) -> Result; @@ -1318,140 +1266,6 @@ impl CostOverflowingMath for u64 { } } -impl ExecutionCost { - pub const ZERO: Self = Self { - runtime: 0, - write_length: 0, - read_count: 0, - write_count: 0, - read_length: 0, - }; - - /// Returns the percentage of self consumed in `numerator`'s largest proportion dimension. - pub fn proportion_largest_dimension(&self, numerator: &ExecutionCost) -> u64 { - // max() should always return because there are > 0 elements - #[allow(clippy::expect_used)] - *[ - numerator.runtime / cmp::max(1, self.runtime / 100), - numerator.write_length / cmp::max(1, self.write_length / 100), - numerator.write_count / cmp::max(1, self.write_count / 100), - numerator.read_length / cmp::max(1, self.read_length / 100), - numerator.read_count / cmp::max(1, self.read_count / 100), - ] - .iter() - .max() - .expect("BUG: should find maximum") - } - - /// Returns the dot product of this execution cost with `resolution`/block_limit - /// This provides a scalar value representing the cumulative consumption - /// of `self` in the provided block_limit. - pub fn proportion_dot_product(&self, block_limit: &ExecutionCost, resolution: u64) -> u64 { - [ - // each field here is calculating `r * self / limit`, using f64 - // use MAX(1, block_limit) to guard against divide by zero - // use MIN(1, self/block_limit) to guard against self > block_limit - resolution as f64 - * 1_f64.min(self.runtime as f64 / 1_f64.max(block_limit.runtime as f64)), - resolution as f64 - * 1_f64.min(self.read_count as f64 / 1_f64.max(block_limit.read_count as f64)), - resolution as f64 - * 1_f64.min(self.write_count as f64 / 1_f64.max(block_limit.write_count as f64)), - resolution as f64 - * 1_f64.min(self.read_length as f64 / 1_f64.max(block_limit.read_length as f64)), - resolution as f64 - * 1_f64.min(self.write_length as f64 / 1_f64.max(block_limit.write_length as f64)), - ] - .iter() - .fold(0, |acc, dim| acc.saturating_add(cmp::max(*dim as u64, 1))) - } - - pub fn max_value() -> ExecutionCost { - Self { - runtime: u64::MAX, - write_length: u64::MAX, - read_count: u64::MAX, - write_count: u64::MAX, - read_length: u64::MAX, - } - } - - pub fn runtime(runtime: u64) -> ExecutionCost { - Self { - runtime, - write_length: 0, - read_count: 0, - write_count: 0, - read_length: 0, - } - } - - pub fn add_runtime(&mut self, runtime: u64) -> Result<()> { - self.runtime = self.runtime.cost_overflow_add(runtime)?; - Ok(()) - } - - pub fn add(&mut self, other: &ExecutionCost) -> Result<()> { - self.runtime = self.runtime.cost_overflow_add(other.runtime)?; - self.read_count = self.read_count.cost_overflow_add(other.read_count)?; - self.read_length = self.read_length.cost_overflow_add(other.read_length)?; - self.write_length = self.write_length.cost_overflow_add(other.write_length)?; - self.write_count = self.write_count.cost_overflow_add(other.write_count)?; - Ok(()) - } - - pub fn sub(&mut self, other: &ExecutionCost) -> Result<()> { - self.runtime = self.runtime.cost_overflow_sub(other.runtime)?; - self.read_count = self.read_count.cost_overflow_sub(other.read_count)?; - self.read_length = self.read_length.cost_overflow_sub(other.read_length)?; - self.write_length = self.write_length.cost_overflow_sub(other.write_length)?; - self.write_count = self.write_count.cost_overflow_sub(other.write_count)?; - Ok(()) - } - - pub fn multiply(&mut self, times: u64) -> Result<()> { - self.runtime = self.runtime.cost_overflow_mul(times)?; - self.read_count = self.read_count.cost_overflow_mul(times)?; - self.read_length = self.read_length.cost_overflow_mul(times)?; - self.write_length = self.write_length.cost_overflow_mul(times)?; - self.write_count = self.write_count.cost_overflow_mul(times)?; - Ok(()) - } - - pub fn divide(&mut self, divisor: u64) -> Result<()> { - self.runtime = self.runtime.cost_overflow_div(divisor)?; - self.read_count = self.read_count.cost_overflow_div(divisor)?; - self.read_length = self.read_length.cost_overflow_div(divisor)?; - self.write_length = self.write_length.cost_overflow_div(divisor)?; - self.write_count = self.write_count.cost_overflow_div(divisor)?; - Ok(()) - } - - /// Returns whether or not this cost exceeds any dimension of the - /// other cost. - pub fn exceeds(&self, other: &ExecutionCost) -> bool { - self.runtime > other.runtime - || self.write_length > other.write_length - || self.write_count > other.write_count - || self.read_count > other.read_count - || self.read_length > other.read_length - } - - pub fn max_cost(first: ExecutionCost, second: ExecutionCost) -> ExecutionCost { - Self { - runtime: first.runtime.max(second.runtime), - write_length: first.write_length.max(second.write_length), - write_count: first.write_count.max(second.write_count), - read_count: first.read_count.max(second.read_count), - read_length: first.read_length.max(second.read_length), - } - } - - pub fn is_zero(&self) -> bool { - *self == Self::ZERO - } -} - // ONLY WORKS IF INPUT IS u64 fn int_log2(input: u64) -> Option { 63_u32.checked_sub(input.leading_zeros()).map(|floor_log| { diff --git a/clarity/src/vm/coverage.rs b/clarity/src/vm/coverage.rs index 7370d78077..15f41b6693 100644 --- a/clarity/src/vm/coverage.rs +++ b/clarity/src/vm/coverage.rs @@ -1,9 +1,7 @@ -use std::collections::BTreeMap; +use std::collections::{BTreeMap, HashMap, HashSet}; use std::fs::File; use std::io::Write; -use hashbrown::{HashMap, HashSet}; - use super::functions::define::DefineFunctionsParsed; use super::EvalHook; use crate::vm::types::QualifiedContractIdentifier; diff --git a/clarity/src/vm/database/key_value_wrapper.rs b/clarity/src/vm/database/key_value_wrapper.rs index 4d16d2dae6..ba01f501a8 100644 --- a/clarity/src/vm/database/key_value_wrapper.rs +++ b/clarity/src/vm/database/key_value_wrapper.rs @@ -14,9 +14,9 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . +use std::collections::HashMap; use std::hash::Hash; -use hashbrown::HashMap; use stacks_common::types::chainstate::{StacksBlockId, TrieHash}; use stacks_common::types::StacksEpochId; use stacks_common::util::hash::Sha512Trunc256Sum; diff --git a/clarity/src/vm/database/sqlite.rs b/clarity/src/vm/database/sqlite.rs index 7264a533bb..c1af553041 100644 --- a/clarity/src/vm/database/sqlite.rs +++ b/clarity/src/vm/database/sqlite.rs @@ -14,7 +14,6 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use rusqlite::types::{FromSql, FromSqlResult, ToSql, ToSqlOutput, ValueRef}; use rusqlite::{params, Connection, OptionalExtension}; use stacks_common::types::chainstate::{BlockHeaderHash, StacksBlockId, TrieHash}; use stacks_common::types::sqlite::NO_PARAMS; @@ -27,7 +26,6 @@ use super::{ NULL_BURN_STATE_DB, NULL_HEADER_DB, }; use crate::vm::analysis::{AnalysisDatabase, CheckErrors}; -use crate::vm::costs::ExecutionCost; use crate::vm::errors::{ IncomparableError, InterpreterError, InterpreterResult as Result, RuntimeErrorType, }; @@ -403,20 +401,3 @@ impl ClarityBackingStore for MemoryBackingStore { sqlite_get_metadata_manual(self, at_height, contract, key) } } - -impl ToSql for ExecutionCost { - fn to_sql(&self) -> rusqlite::Result> { - let val = serde_json::to_string(self) - .map_err(|e| rusqlite::Error::ToSqlConversionFailure(Box::new(e)))?; - Ok(ToSqlOutput::from(val)) - } -} - -impl FromSql for ExecutionCost { - fn column_result(value: ValueRef) -> FromSqlResult { - let str_val = String::column_result(value)?; - let parsed = serde_json::from_str(&str_val) - .map_err(|e| rusqlite::types::FromSqlError::Other(Box::new(e)))?; - Ok(parsed) - } -} diff --git a/clarity/src/vm/diagnostic.rs b/clarity/src/vm/diagnostic.rs index 164875151f..559dd91213 100644 --- a/clarity/src/vm/diagnostic.rs +++ b/clarity/src/vm/diagnostic.rs @@ -14,78 +14,4 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use std::fmt; - -use crate::vm::representations::Span; - -/// In a near future, we can go further in our static analysis and provide different levels -/// of diagnostics, such as warnings, hints, best practices, etc. -#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)] -pub enum Level { - Note, - Warning, - Error, -} - -pub trait DiagnosableError { - fn message(&self) -> String; - fn suggestion(&self) -> Option; - fn level(&self) -> Level { - Level::Error - } -} - -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] -pub struct Diagnostic { - pub level: Level, - pub message: String, - pub spans: Vec, - pub suggestion: Option, -} - -impl Diagnostic { - pub fn err(error: &dyn DiagnosableError) -> Diagnostic { - Diagnostic { - spans: vec![], - level: Level::Error, - message: error.message(), - suggestion: error.suggestion(), - } - } - - pub fn add_span(&mut self, start_line: u32, start_column: u32, end_line: u32, end_column: u32) { - self.spans.push(Span { - start_line, - start_column, - end_line, - end_column, - }); - } -} - -impl fmt::Display for Diagnostic { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{:?}", self.level)?; - match self.spans.len().cmp(&1) { - std::cmp::Ordering::Equal => write!( - f, - " (line {}, column {})", - self.spans[0].start_line, self.spans[0].start_column - )?, - std::cmp::Ordering::Greater => { - let lines: Vec = self - .spans - .iter() - .map(|s| format!("line: {}", s.start_line)) - .collect(); - write!(f, " ({})", lines.join(", "))?; - } - _ => {} - } - write!(f, ": {}.", &self.message)?; - if let Some(suggestion) = &self.suggestion { - write!(f, "\n{suggestion}")?; - } - writeln!(f) - } -} +pub use clarity_serialization::diagnostic::{DiagnosableError, Diagnostic, Level}; diff --git a/clarity/src/vm/docs/contracts.rs b/clarity/src/vm/docs/contracts.rs index 6d48064069..2cb8b2c403 100644 --- a/clarity/src/vm/docs/contracts.rs +++ b/clarity/src/vm/docs/contracts.rs @@ -1,6 +1,5 @@ -use std::collections::BTreeMap; +use std::collections::{BTreeMap, HashMap, HashSet}; -use hashbrown::{HashMap, HashSet}; use stacks_common::consts::CHAIN_ID_TESTNET; use stacks_common::types::StacksEpochId; diff --git a/clarity/src/vm/docs/mod.rs b/clarity/src/vm/docs/mod.rs index 9456884e7d..76ec6996a0 100644 --- a/clarity/src/vm/docs/mod.rs +++ b/clarity/src/vm/docs/mod.rs @@ -511,8 +511,7 @@ const SQRTI_API: SimpleFunctionAPI = SimpleFunctionAPI { name: None, snippet: "sqrti ${1:expr-1}", signature: "(sqrti n)", - description: - "Returns the largest integer that is less than or equal to the square root of `n`. + description: "Returns the largest integer that is less than or equal to the square root of `n`. Fails on a negative numbers. ", example: "(sqrti u11) ;; Returns u3 @@ -527,7 +526,7 @@ const LOG2_API: SimpleFunctionAPI = SimpleFunctionAPI { snippet: "log2 ${1:expr-1}", signature: "(log2 n)", description: - "Returns the power to which the number 2 must be raised to obtain the value `n`, rounded + "Returns the power to which the number 2 must be raised to obtain the value `n`, rounded down to the nearest integer. Fails on a negative numbers. ", example: "(log2 u8) ;; Returns u3 @@ -605,7 +604,7 @@ const BITWISE_LEFT_SHIFT_API: SimpleFunctionAPI = SimpleFunctionAPI { name: None, snippet: "bit-shift-left ${1:expr-1} ${2:expr-2}", signature: "(bit-shift-left i1 shamt)", - description: "Shifts all the bits in `i1` to the left by the number of places specified in `shamt` modulo 128 (the bit width of Clarity integers). + description: "Shifts all the bits in `i1` to the left by the number of places specified in `shamt` modulo 128 (the bit width of Clarity integers). Note that there is a deliberate choice made to ignore arithmetic overflow for this operation. In use cases where overflow should be detected, developers should use `*`, `/`, and `pow` instead of the shift operators. @@ -625,7 +624,7 @@ const BITWISE_RIGHT_SHIFT_API: SimpleFunctionAPI = SimpleFunctionAPI { name: None, snippet: "bit-shift-right ${1:expr-1} ${2:expr-2}", signature: "(bit-shift-right i1 shamt)", - description: "Shifts all the bits in `i1` to the right by the number of places specified in `shamt` modulo 128 (the bit width of Clarity integers). + description: "Shifts all the bits in `i1` to the right by the number of places specified in `shamt` modulo 128 (the bit width of Clarity integers). When `i1` is a `uint` (unsigned), new bits are filled with zeros. When `i1` is an `int` (signed), the sign is preserved, meaning that new bits are filled with the value of the previous sign-bit. Note that there is a deliberate choice made to ignore arithmetic overflow for this operation. In use cases where overflow should be detected, developers should use `*`, `/`, and `pow` instead of the shift operators. @@ -647,8 +646,8 @@ const AND_API: SimpleFunctionAPI = SimpleFunctionAPI { name: None, snippet: "and ${1:expr-1} ${2:expr-2}", signature: "(and b1 b2 ...)", - description: "Returns `true` if all boolean inputs are `true`. Importantly, the supplied arguments are -evaluated in-order and lazily. Lazy evaluation means that if one of the arguments returns `false`, the function + description: "Returns `true` if all boolean inputs are `true`. Importantly, the supplied arguments are +evaluated in-order and lazily. Lazy evaluation means that if one of the arguments returns `false`, the function short-circuits, and no subsequent arguments are evaluated. ", example: "(and true false) ;; Returns false @@ -661,8 +660,8 @@ const OR_API: SimpleFunctionAPI = SimpleFunctionAPI { name: None, snippet: "or ${1:expr-1} ${2:expr-2}", signature: "(or b1 b2 ...)", - description: "Returns `true` if any boolean inputs are `true`. Importantly, the supplied arguments are -evaluated in-order and lazily. Lazy evaluation means that if one of the arguments returns `true`, the function + description: "Returns `true` if any boolean inputs are `true`. Importantly, the supplied arguments are +evaluated in-order and lazily. Lazy evaluation means that if one of the arguments returns `true`, the function short-circuits, and no subsequent arguments are evaluated.", example: "(or true false) ;; Returns true (or (is-eq (+ 1 2) 1) (is-eq 4 4)) ;; Returns true @@ -882,7 +881,7 @@ const EQUALS_API: SpecialAPI = SpecialAPI { snippet: "is-eq ${1:expr-1} ${2:expr-2}", output_type: "bool", signature: "(is-eq v1 v2...)", - description: "Compares the inputted values, returning `true` if they are all equal. Note that + description: "Compares the inputted values, returning `true` if they are all equal. Note that _unlike_ the `(and ...)` function, `(is-eq ...)` will _not_ short-circuit. All values supplied to is-eq _must_ be the same type.", example: "(is-eq 1 1) ;; Returns true @@ -1012,7 +1011,7 @@ The `func` argument must be a literal function name. (fold * (list 2 2 2) 1) ;; Returns 8 (fold * (list 2 2 2) 0) ;; Returns 0 ;; calculates (- 11 (- 7 (- 3 2))) -(fold - (list 3 7 11) 2) ;; Returns 5 +(fold - (list 3 7 11) 2) ;; Returns 5 (define-private (concat-string (a (string-ascii 20)) (b (string-ascii 20))) (unwrap-panic (as-max-len? (concat a b) u20))) (fold concat-string "cdef" "ab") ;; Returns "fedcab" (fold concat-string (list "cd" "ef") "ab") ;; Returns "efcdab" @@ -1725,7 +1724,7 @@ value and type returned are determined by the specified `BlockInfoPropertyName`. not correspond to an existing block prior to the current block, the function returns `none`. The currently available property names are as follows: -- `burnchain-header-hash`: This property returns a `(buff 32)` value containing the header hash of the burnchain (Bitcoin) block that selected the +- `burnchain-header-hash`: This property returns a `(buff 32)` value containing the header hash of the burnchain (Bitcoin) block that selected the Stacks block at the given Stacks chain height. - `id-header-hash`: This property returns a `(buff 32)` value containing the _index block hash_ of a Stacks block. This hash is globally unique, and is derived @@ -1734,7 +1733,7 @@ from the block hash and the history of accepted PoX operations. This is also th - `header-hash`: This property returns a `(buff 32)` value containing the header hash of a Stacks block, given a Stacks chain height. **WARNING* this hash is not guaranteed to be globally unique, since the same Stacks block can be mined in different PoX forks. If you need global uniqueness, you should use `id-header-hash`. -- `miner-address`: This property returns a `principal` value corresponding to the miner of the given block. **WARNING** In Stacks 2.1, this is not guaranteed to +- `miner-address`: This property returns a `principal` value corresponding to the miner of the given block. **WARNING** In Stacks 2.1, this is not guaranteed to be the same `principal` that received the block reward, since Stacks 2.1 supports coinbase transactions that pay the reward to a contract address. This is merely the address of the `principal` that produced the block. @@ -1745,9 +1744,9 @@ For blocks mined after epoch 3.0, all Stacks blocks in one tenure will share the - `vrf-seed`: This property returns a `(buff 32)` value of the VRF seed for the corresponding block. -- `block-reward`: This property returns a `uint` value for the total block reward of the indicated Stacks block. This value is only available once the reward for +- `block-reward`: This property returns a `uint` value for the total block reward of the indicated Stacks block. This value is only available once the reward for the block matures. That is, the latest `block-reward` value available is at least 101 Stacks blocks in the past (on mainnet). The reward includes the coinbase, -the anchored block's transaction fees, and the shares of the confirmed and produced microblock transaction fees earned by this block's miner. Note that this value may +the anchored block's transaction fees, and the shares of the confirmed and produced microblock transaction fees earned by this block's miner. Note that this value may be smaller than the Stacks coinbase at this height, because the miner may have been punished with a valid `PoisonMicroblock` transaction in the event that the miner published two or more microblock stream forks. Added in Clarity 2. @@ -1881,14 +1880,14 @@ The latter, a _contract principal_, is encoded as a standard principal concatena a `(string-ascii 40)` *contract name* that identifies the code body. The `principal-construct?` function allows users to create either standard or contract principals, -depending on which form is used. To create a standard principal, +depending on which form is used. To create a standard principal, `principal-construct?` would be called with two arguments: it takes as input a `(buff 1)` which encodes the principal address's `version-byte`, a `(buff 20)` which encodes the principal address's `hash-bytes`. To create a contract principal, `principal-construct?` would be called with three arguments: the `(buff 1)` and `(buff 20)` to represent the standard principal that created the contract, and a `(string-ascii 40)` which encodes the contract's name. -On success, this function returns either a standard principal or contract principal, +On success, this function returns either a standard principal or contract principal, depending on whether or not the third `(string-ascii 40)` argument is given. This function returns a `Response`. On success, the `ok` value is a `Principal`. @@ -1896,7 +1895,7 @@ The `err` value is a value tuple with the form `{ error_code: uint, value: (opti If the single-byte `version-byte` is in the valid range `0x00` to `0x1f`, but is not an appropriate version byte for the current network, then the error will be `u0`, and `value` will contain -`(some principal)`, where the wrapped value is the principal. If the `version-byte` is not in this range, +`(some principal)`, where the wrapped value is the principal. If the `version-byte` is not in this range, however, then the `value` will be `none`. If the `version-byte` is a `buff` of length 0, if the single-byte `version-byte` is a @@ -2175,10 +2174,10 @@ const MINT_TOKEN: SpecialAPI = SpecialAPI { signature: "(ft-mint? token-name amount recipient)", description: "`ft-mint?` is used to increase the token balance for the `recipient` principal for a token type defined using `define-fungible-token`. The increased token balance is _not_ transfered from another principal, but -rather minted. +rather minted. If a non-positive amount is provided to mint, this function returns `(err 1)`. Otherwise, on successfully mint, it -returns `(ok true)`. If this call would result in more supplied tokens than defined by the total supply in +returns `(ok true)`. If this call would result in more supplied tokens than defined by the total supply in `define-fungible-token`, then a `SupplyOverflow` runtime error is thrown. ", example: " @@ -2242,7 +2241,7 @@ const TOKEN_TRANSFER: SpecialAPI = SpecialAPI { output_type: "(response bool uint)", signature: "(ft-transfer? token-name amount sender recipient)", description: "`ft-transfer?` is used to increase the token balance for the `recipient` principal for a token -type defined using `define-fungible-token` by debiting the `sender` principal. In contrast to `stx-transfer?`, +type defined using `define-fungible-token` by debiting the `sender` principal. In contrast to `stx-transfer?`, any user can transfer the assets. When used, relevant guards need to be added. This function returns (ok true) if the transfer is successful. In the event of an unsuccessful transfer it returns @@ -2267,7 +2266,7 @@ const ASSET_TRANSFER: SpecialAPI = SpecialAPI { signature: "(nft-transfer? asset-class asset-identifier sender recipient)", description: "`nft-transfer?` is used to change the owner of an asset identified by `asset-identifier` from `sender` to `recipient`. The `asset-class` must have been defined by `define-non-fungible-token` and `asset-identifier` -must be of the type specified in that definition. In contrast to `stx-transfer?`, any user can transfer the asset. +must be of the type specified in that definition. In contrast to `stx-transfer?`, any user can transfer the asset. When used, relevant guards need to be added. This function returns (ok true) if the transfer is successful. In the event of an unsuccessful transfer it returns @@ -2307,7 +2306,7 @@ const BURN_TOKEN: SpecialAPI = SpecialAPI { signature: "(ft-burn? token-name amount sender)", description: "`ft-burn?` is used to decrease the token balance for the `sender` principal for a token type defined using `define-fungible-token`. The decreased token balance is _not_ transfered to another principal, but -rather destroyed, reducing the circulating supply. +rather destroyed, reducing the circulating supply. On a successful burn, it returns `(ok true)`. The burn may fail with error code: @@ -2326,7 +2325,7 @@ const BURN_ASSET: SpecialAPI = SpecialAPI { output_type: "(response bool uint)", signature: "(nft-burn? asset-class asset-identifier sender)", description: "`nft-burn?` is used to burn an asset that the `sender` principal owns. -The asset must have been defined using `define-non-fungible-token`, and the supplied +The asset must have been defined using `define-non-fungible-token`, and the supplied `asset-identifier` must be of the same type specified in that definition. On a successful burn, it returns `(ok true)`. In the event of an unsuccessful burn it @@ -2349,7 +2348,7 @@ const STX_GET_BALANCE: SimpleFunctionAPI = SimpleFunctionAPI { description: "`stx-get-balance` is used to query the STX balance of the `owner` principal. This function returns the (unlocked) STX balance, in microstacks (1 STX = 1,000,000 microstacks), of the -`owner` principal. The result is the same as `(get unlocked (stx-account user))`. +`owner` principal. The result is the same as `(get unlocked (stx-account user))`. In the event that the `owner` principal isn't materialized, it returns 0. ", example: " @@ -2406,7 +2405,7 @@ const STX_TRANSFER_MEMO: SpecialAPI = SpecialAPI { snippet: "stx-transfer-memo? ${1:amount} ${2:sender} ${3:recipient} ${4:memo}", output_type: "(response bool uint)", signature: "(stx-transfer-memo? amount sender recipient memo)", - description: "`stx-transfer-memo?` is similar to `stx-transfer?`, except that it adds a `memo` field. + description: "`stx-transfer-memo?` is similar to `stx-transfer?`, except that it adds a `memo` field. This function returns (ok true) if the transfer is successful, or, on an error, returns the same codes as `stx-transfer?`. ", @@ -2496,8 +2495,8 @@ const REPLACE_AT: SpecialAPI = SpecialAPI { snippet: "replace-at? ${1:sequence} ${2:index} ${3:element}", signature: "(replace-at? sequence index element)", description: "The `replace-at?` function takes in a sequence, an index, and an element, -and returns a new sequence with the data at the index position replaced with the given element. -The given element's type must match the type of the sequence, and must correspond to a single +and returns a new sequence with the data at the index position replaced with the given element. +The given element's type must match the type of the sequence, and must correspond to a single index of the input sequence. The return type on success is the same type as the input sequence. If the provided index is out of bounds, this functions returns `none`. diff --git a/clarity/src/vm/errors.rs b/clarity/src/vm/errors.rs index d46f7a9ca1..6e820dac84 100644 --- a/clarity/src/vm/errors.rs +++ b/clarity/src/vm/errors.rs @@ -14,227 +14,17 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use std::{error, fmt}; - -#[cfg(feature = "rusqlite")] -use rusqlite::Error as SqliteError; -use serde_json::Error as SerdeJSONErr; -use stacks_common::types::chainstate::BlockHeaderHash; +pub use clarity_serialization::errors::{ + Error, IncomparableError, InterpreterError, InterpreterResult, RuntimeErrorType, + ShortReturnType, +}; -use super::ast::errors::ParseErrors; pub use crate::vm::analysis::errors::{ check_argument_count, check_arguments_at_least, check_arguments_at_most, CheckErrors, }; -use crate::vm::ast::errors::ParseError; -use crate::vm::contexts::StackTrace; -use crate::vm::costs::CostErrors; -use crate::vm::types::Value; - -#[derive(Debug)] -pub struct IncomparableError { - pub err: T, -} - -#[derive(Debug)] -#[allow(clippy::large_enum_variant)] -pub enum Error { - /// UncheckedErrors are errors that *should* be caught by the - /// TypeChecker and other check passes. Test executions may - /// trigger these errors. - Unchecked(CheckErrors), - Interpreter(InterpreterError), - Runtime(RuntimeErrorType, Option), - ShortReturn(ShortReturnType), -} - -/// InterpreterErrors are errors that *should never* occur. -/// Test executions may trigger these errors. -#[derive(Debug, PartialEq)] -pub enum InterpreterError { - BadSender(Value), - BadSymbolicRepresentation(String), - InterpreterError(String), - UninitializedPersistedVariable, - FailedToConstructAssetTable, - FailedToConstructEventBatch, - #[cfg(feature = "rusqlite")] - SqliteError(IncomparableError), - BadFileName, - FailedToCreateDataDirectory, - MarfFailure(String), - FailureConstructingTupleWithType, - FailureConstructingListWithType, - InsufficientBalance, - CostContractLoadFailure, - DBError(String), - Expect(String), -} - -/// RuntimeErrors are errors that smart contracts are expected -/// to be able to trigger during execution (e.g., arithmetic errors) -#[derive(Debug, PartialEq)] -pub enum RuntimeErrorType { - Arithmetic(String), - ArithmeticOverflow, - ArithmeticUnderflow, - SupplyOverflow(u128, u128), - SupplyUnderflow(u128, u128), - DivisionByZero, - // error in parsing types - ParseError(String), - // error in parsing the AST - ASTError(ParseError), - MaxStackDepthReached, - MaxContextDepthReached, - ListDimensionTooHigh, - BadTypeConstruction, - ValueTooLarge, - BadBlockHeight(String), - TransferNonPositiveAmount, - NoSuchToken, - NotImplemented, - NoCallerInContext, - NoSenderInContext, - NonPositiveTokenSupply, - JSONParseError(IncomparableError), - AttemptToFetchInTransientContext, - BadNameValue(&'static str, String), - UnknownBlockHeaderHash(BlockHeaderHash), - BadBlockHash(Vec), - UnwrapFailure, - DefunctPoxContract, - PoxAlreadyLocked, - MetadataAlreadySet, -} - -#[derive(Debug, PartialEq)] -pub enum ShortReturnType { - ExpectedValue(Value), - AssertionFailed(Value), -} - -pub type InterpreterResult = Result; - -impl PartialEq> for IncomparableError { - fn eq(&self, _other: &IncomparableError) -> bool { - false - } -} - -impl PartialEq for Error { - fn eq(&self, other: &Error) -> bool { - match (self, other) { - (Error::Runtime(x, _), Error::Runtime(y, _)) => x == y, - (Error::Unchecked(x), Error::Unchecked(y)) => x == y, - (Error::ShortReturn(x), Error::ShortReturn(y)) => x == y, - (Error::Interpreter(x), Error::Interpreter(y)) => x == y, - _ => false, - } - } -} - -impl fmt::Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - Error::Runtime(ref err, ref stack) => { - write!(f, "{err}")?; - if let Some(ref stack_trace) = stack { - writeln!(f, "\n Stack Trace: ")?; - for item in stack_trace.iter() { - writeln!(f, "{item}")?; - } - } - Ok(()) - } - _ => write!(f, "{self:?}"), - } - } -} - -impl fmt::Display for RuntimeErrorType { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{self:?}") - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - None - } -} - -impl error::Error for RuntimeErrorType { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - None - } -} - -impl From for Error { - fn from(err: ParseError) -> Self { - match &err.err { - ParseErrors::InterpreterFailure => Error::from(InterpreterError::Expect( - "Unexpected interpreter failure during parsing".into(), - )), - _ => Error::from(RuntimeErrorType::ASTError(err)), - } - } -} - -impl From for Error { - fn from(err: CostErrors) -> Self { - match err { - CostErrors::InterpreterFailure => Error::from(InterpreterError::Expect( - "Interpreter failure during cost calculation".into(), - )), - CostErrors::Expect(s) => Error::from(InterpreterError::Expect(format!( - "Interpreter failure during cost calculation: {s}" - ))), - other_err => Error::from(CheckErrors::from(other_err)), - } - } -} - -impl From for Error { - fn from(err: RuntimeErrorType) -> Self { - Error::Runtime(err, None) - } -} - -impl From for Error { - fn from(err: CheckErrors) -> Self { - Error::Unchecked(err) - } -} - -impl From for Error { - fn from(err: ShortReturnType) -> Self { - Error::ShortReturn(err) - } -} - -impl From for Error { - fn from(err: InterpreterError) -> Self { - Error::Interpreter(err) - } -} - -#[cfg(test)] -impl From for () { - fn from(err: Error) -> Self {} -} - -impl From for Value { - fn from(val: ShortReturnType) -> Self { - match val { - ShortReturnType::ExpectedValue(v) => v, - ShortReturnType::AssertionFailed(v) => v, - } - } -} #[cfg(test)] mod test { - use super::*; #[test] #[cfg(feature = "developer-mode")] @@ -247,20 +37,4 @@ _native_:native_div assert_eq!(format!("{}", crate::vm::execute(t).unwrap_err()), expected); } - - #[test] - fn equality() { - assert_eq!( - Error::ShortReturn(ShortReturnType::ExpectedValue(Value::Bool(true))), - Error::ShortReturn(ShortReturnType::ExpectedValue(Value::Bool(true))) - ); - assert_eq!( - Error::Interpreter(InterpreterError::InterpreterError("".to_string())), - Error::Interpreter(InterpreterError::InterpreterError("".to_string())) - ); - assert!( - Error::ShortReturn(ShortReturnType::ExpectedValue(Value::Bool(true))) - != Error::Interpreter(InterpreterError::InterpreterError("".to_string())) - ); - } } diff --git a/clarity/src/vm/functions/conversions.rs b/clarity/src/vm/functions/conversions.rs index db4c35fc71..b922f13fc0 100644 --- a/clarity/src/vm/functions/conversions.rs +++ b/clarity/src/vm/functions/conversions.rs @@ -14,17 +14,19 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . +use clarity_serialization::types::serialization::SerializationError; + use crate::vm::costs::cost_functions::ClarityCostFunction; use crate::vm::costs::runtime_cost; use crate::vm::errors::{ check_argument_count, CheckErrors, InterpreterError, InterpreterResult as Result, }; use crate::vm::representations::SymbolicExpression; -use crate::vm::types::serialization::SerializationError; use crate::vm::types::SequenceSubtype::BufferType; use crate::vm::types::TypeSignature::SequenceType; use crate::vm::types::{ - ASCIIData, BufferLength, CharType, SequenceData, TypeSignature, UTF8Data, Value, + ASCIIData, BufferLength, CharType, SequenceData, TypeSignature, TypeSignatureExt as _, + UTF8Data, Value, }; use crate::vm::{eval, Environment, LocalContext}; diff --git a/clarity/src/vm/functions/define.rs b/clarity/src/vm/functions/define.rs index 1e11ff76e9..35e7981b3d 100644 --- a/clarity/src/vm/functions/define.rs +++ b/clarity/src/vm/functions/define.rs @@ -25,7 +25,9 @@ use crate::vm::eval; use crate::vm::representations::SymbolicExpressionType::Field; use crate::vm::representations::{ClarityName, SymbolicExpression}; use crate::vm::types::signatures::FunctionSignature; -use crate::vm::types::{parse_name_type_pairs, TraitIdentifier, TypeSignature, Value}; +use crate::vm::types::{ + parse_name_type_pairs, TraitIdentifier, TypeSignature, TypeSignatureExt as _, Value, +}; define_named_enum!(DefineFunctions { Constant("define-constant"), diff --git a/clarity/src/vm/mod.rs b/clarity/src/vm/mod.rs index 5ee82d9de8..22c2b04618 100644 --- a/clarity/src/vm/mod.rs +++ b/clarity/src/vm/mod.rs @@ -55,6 +55,7 @@ pub mod clarity; use std::collections::BTreeMap; +pub use clarity_serialization::MAX_CALL_STACK_DEPTH; use costs::CostErrors; use stacks_common::types::StacksEpochId; @@ -86,8 +87,6 @@ pub use crate::vm::types::Value; use crate::vm::types::{PrincipalData, TypeSignature}; pub use crate::vm::version::ClarityVersion; -pub const MAX_CALL_STACK_DEPTH: usize = 64; - #[derive(Debug, Clone)] pub struct ParsedContract { pub contract_identifier: String, diff --git a/clarity/src/vm/representations.rs b/clarity/src/vm/representations.rs index 08ddedf069..94d87718c1 100644 --- a/clarity/src/vm/representations.rs +++ b/clarity/src/vm/representations.rs @@ -14,662 +14,11 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use std::borrow::Borrow; -use std::fmt; -use std::io::{Read, Write}; -use std::ops::Deref; - -use lazy_static::lazy_static; -use regex::Regex; -use stacks_common::codec::{read_next, write_next, Error as codec_error, StacksMessageCodec}; - -use crate::vm::errors::RuntimeErrorType; -use crate::vm::types::{TraitIdentifier, Value}; - -pub const CONTRACT_MIN_NAME_LENGTH: usize = 1; -pub const CONTRACT_MAX_NAME_LENGTH: usize = 40; -pub const MAX_STRING_LEN: u8 = 128; - -lazy_static! { - pub static ref STANDARD_PRINCIPAL_REGEX_STRING: String = - "[0123456789ABCDEFGHJKMNPQRSTVWXYZ]{28,41}".into(); - pub static ref CONTRACT_NAME_REGEX_STRING: String = format!( - r#"([a-zA-Z](([a-zA-Z0-9]|[-_])){{{},{}}})"#, - CONTRACT_MIN_NAME_LENGTH - 1, - // NOTE: this is deliberate. Earlier versions of the node will accept contract principals whose names are up to - // 128 bytes. This behavior must be preserved for backwards-compatibility. - MAX_STRING_LEN - 1 - ); - pub static ref CONTRACT_PRINCIPAL_REGEX_STRING: String = format!( - r#"{}(\.){}"#, - *STANDARD_PRINCIPAL_REGEX_STRING, *CONTRACT_NAME_REGEX_STRING - ); - pub static ref PRINCIPAL_DATA_REGEX_STRING: String = format!( - "({})|({})", - *STANDARD_PRINCIPAL_REGEX_STRING, *CONTRACT_PRINCIPAL_REGEX_STRING - ); - pub static ref CLARITY_NAME_REGEX_STRING: String = - "^[a-zA-Z]([a-zA-Z0-9]|[-_!?+<>=/*])*$|^[-+=/*]$|^[<>]=?$".into(); - pub static ref CLARITY_NAME_REGEX: Regex = - { - #[allow(clippy::unwrap_used)] - Regex::new(CLARITY_NAME_REGEX_STRING.as_str()).unwrap() - }; - pub static ref CONTRACT_NAME_REGEX: Regex = - { - #[allow(clippy::unwrap_used)] - Regex::new(format!("^{}$|^__transient$", CONTRACT_NAME_REGEX_STRING.as_str()).as_str()) - .unwrap() - }; -} - -guarded_string!( - ClarityName, - "ClarityName", - CLARITY_NAME_REGEX, - MAX_STRING_LEN, - RuntimeErrorType, - RuntimeErrorType::BadNameValue -); -guarded_string!( - ContractName, - "ContractName", - CONTRACT_NAME_REGEX, - MAX_STRING_LEN, - RuntimeErrorType, - RuntimeErrorType::BadNameValue -); - -impl StacksMessageCodec for ClarityName { - #[allow(clippy::needless_as_bytes)] // as_bytes isn't necessary, but verbosity is preferable in the codec impls - fn consensus_serialize(&self, fd: &mut W) -> Result<(), codec_error> { - // ClarityName can't be longer than vm::representations::MAX_STRING_LEN, which itself is - // a u8, so we should be good here. - if self.as_bytes().len() > MAX_STRING_LEN as usize { - return Err(codec_error::SerializeError( - "Failed to serialize clarity name: too long".to_string(), - )); - } - write_next(fd, &(self.as_bytes().len() as u8))?; - fd.write_all(self.as_bytes()) - .map_err(codec_error::WriteError)?; - Ok(()) - } - - fn consensus_deserialize(fd: &mut R) -> Result { - let len_byte: u8 = read_next(fd)?; - if len_byte > MAX_STRING_LEN { - return Err(codec_error::DeserializeError( - "Failed to deserialize clarity name: too long".to_string(), - )); - } - let mut bytes = vec![0u8; len_byte as usize]; - fd.read_exact(&mut bytes).map_err(codec_error::ReadError)?; - - // must encode a valid string - let s = String::from_utf8(bytes).map_err(|_e| { - codec_error::DeserializeError( - "Failed to parse Clarity name: could not contruct from utf8".to_string(), - ) - })?; - - // must decode to a clarity name - let name = ClarityName::try_from(s).map_err(|e| { - codec_error::DeserializeError(format!("Failed to parse Clarity name: {e:?}")) - })?; - Ok(name) - } -} - -impl StacksMessageCodec for ContractName { - #[allow(clippy::needless_as_bytes)] // as_bytes isn't necessary, but verbosity is preferable in the codec impls - fn consensus_serialize(&self, fd: &mut W) -> Result<(), codec_error> { - if self.as_bytes().len() < CONTRACT_MIN_NAME_LENGTH - || self.as_bytes().len() > CONTRACT_MAX_NAME_LENGTH - { - return Err(codec_error::SerializeError(format!( - "Failed to serialize contract name: too short or too long: {}", - self.as_bytes().len() - ))); - } - write_next(fd, &(self.as_bytes().len() as u8))?; - fd.write_all(self.as_bytes()) - .map_err(codec_error::WriteError)?; - Ok(()) - } - - fn consensus_deserialize(fd: &mut R) -> Result { - let len_byte: u8 = read_next(fd)?; - if (len_byte as usize) < CONTRACT_MIN_NAME_LENGTH - || (len_byte as usize) > CONTRACT_MAX_NAME_LENGTH - { - return Err(codec_error::DeserializeError(format!( - "Failed to deserialize contract name: too short or too long: {len_byte}" - ))); - } - let mut bytes = vec![0u8; len_byte as usize]; - fd.read_exact(&mut bytes).map_err(codec_error::ReadError)?; - - // must encode a valid string - let s = String::from_utf8(bytes).map_err(|_e| { - codec_error::DeserializeError( - "Failed to parse Contract name: could not construct from utf8".to_string(), - ) - })?; - - let name = ContractName::try_from(s).map_err(|e| { - codec_error::DeserializeError(format!("Failed to parse Contract name: {e:?}")) - })?; - Ok(name) - } -} - -#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)] -pub enum PreSymbolicExpressionType { - AtomValue(Value), - Atom(ClarityName), - List(Vec), - Tuple(Vec), - SugaredContractIdentifier(ContractName), - SugaredFieldIdentifier(ContractName, ClarityName), - FieldIdentifier(TraitIdentifier), - TraitReference(ClarityName), - Comment(String), - Placeholder(String), -} - -#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)] -pub struct PreSymbolicExpression { - pub pre_expr: PreSymbolicExpressionType, - pub id: u64, - - #[cfg(feature = "developer-mode")] - pub span: Span, -} - -pub trait SymbolicExpressionCommon { - type S: SymbolicExpressionCommon; - fn set_id(&mut self, id: u64); - fn match_list_mut(&mut self) -> Option<&mut [Self::S]>; -} - -impl SymbolicExpressionCommon for PreSymbolicExpression { - type S = PreSymbolicExpression; - fn set_id(&mut self, id: u64) { - self.id = id; - } - fn match_list_mut(&mut self) -> Option<&mut [PreSymbolicExpression]> { - if let PreSymbolicExpressionType::List(ref mut list) = self.pre_expr { - Some(list) - } else { - None - } - } -} - -impl SymbolicExpressionCommon for SymbolicExpression { - type S = SymbolicExpression; - fn set_id(&mut self, id: u64) { - self.id = id; - } - fn match_list_mut(&mut self) -> Option<&mut [SymbolicExpression]> { - if let SymbolicExpressionType::List(ref mut list) = self.expr { - Some(list) - } else { - None - } - } -} - -impl PreSymbolicExpression { - #[cfg(feature = "developer-mode")] - fn cons() -> PreSymbolicExpression { - PreSymbolicExpression { - id: 0, - span: Span::zero(), - pre_expr: PreSymbolicExpressionType::AtomValue(Value::Bool(false)), - } - } - #[cfg(not(feature = "developer-mode"))] - fn cons() -> PreSymbolicExpression { - PreSymbolicExpression { - id: 0, - pre_expr: PreSymbolicExpressionType::AtomValue(Value::Bool(false)), - } - } - - #[cfg(feature = "developer-mode")] - pub fn set_span(&mut self, start_line: u32, start_column: u32, end_line: u32, end_column: u32) { - self.span = Span { - start_line, - start_column, - end_line, - end_column, - } - } - - #[cfg(not(feature = "developer-mode"))] - pub fn set_span( - &mut self, - _start_line: u32, - _start_column: u32, - _end_line: u32, - _end_column: u32, - ) { - } - - #[cfg(feature = "developer-mode")] - pub fn copy_span(&mut self, src: &Span) { - self.span = src.clone(); - } - - #[cfg(not(feature = "developer-mode"))] - pub fn copy_span(&mut self, _src: &Span) {} - - #[cfg(feature = "developer-mode")] - pub fn span(&self) -> &Span { - &self.span - } - - #[cfg(not(feature = "developer-mode"))] - pub fn span(&self) -> &Span { - &Span::ZERO - } - - pub fn sugared_contract_identifier(val: ContractName) -> PreSymbolicExpression { - PreSymbolicExpression { - pre_expr: PreSymbolicExpressionType::SugaredContractIdentifier(val), - ..PreSymbolicExpression::cons() - } - } - - pub fn sugared_field_identifier( - contract_name: ContractName, - name: ClarityName, - ) -> PreSymbolicExpression { - PreSymbolicExpression { - pre_expr: PreSymbolicExpressionType::SugaredFieldIdentifier(contract_name, name), - ..PreSymbolicExpression::cons() - } - } - - pub fn atom_value(val: Value) -> PreSymbolicExpression { - PreSymbolicExpression { - pre_expr: PreSymbolicExpressionType::AtomValue(val), - ..PreSymbolicExpression::cons() - } - } - - pub fn atom(val: ClarityName) -> PreSymbolicExpression { - PreSymbolicExpression { - pre_expr: PreSymbolicExpressionType::Atom(val), - ..PreSymbolicExpression::cons() - } - } - - pub fn trait_reference(val: ClarityName) -> PreSymbolicExpression { - PreSymbolicExpression { - pre_expr: PreSymbolicExpressionType::TraitReference(val), - ..PreSymbolicExpression::cons() - } - } - - pub fn field_identifier(val: TraitIdentifier) -> PreSymbolicExpression { - PreSymbolicExpression { - pre_expr: PreSymbolicExpressionType::FieldIdentifier(val), - ..PreSymbolicExpression::cons() - } - } - - pub fn list(val: Vec) -> PreSymbolicExpression { - PreSymbolicExpression { - pre_expr: PreSymbolicExpressionType::List(val), - ..PreSymbolicExpression::cons() - } - } - - pub fn tuple(val: Vec) -> PreSymbolicExpression { - PreSymbolicExpression { - pre_expr: PreSymbolicExpressionType::Tuple(val), - ..PreSymbolicExpression::cons() - } - } - - pub fn placeholder(s: String) -> PreSymbolicExpression { - PreSymbolicExpression { - pre_expr: PreSymbolicExpressionType::Placeholder(s), - ..PreSymbolicExpression::cons() - } - } - - pub fn comment(comment: String) -> PreSymbolicExpression { - PreSymbolicExpression { - pre_expr: PreSymbolicExpressionType::Comment(comment), - ..PreSymbolicExpression::cons() - } - } - - pub fn match_trait_reference(&self) -> Option<&ClarityName> { - if let PreSymbolicExpressionType::TraitReference(ref value) = self.pre_expr { - Some(value) - } else { - None - } - } - - pub fn match_atom_value(&self) -> Option<&Value> { - if let PreSymbolicExpressionType::AtomValue(ref value) = self.pre_expr { - Some(value) - } else { - None - } - } - - pub fn match_atom(&self) -> Option<&ClarityName> { - if let PreSymbolicExpressionType::Atom(ref value) = self.pre_expr { - Some(value) - } else { - None - } - } - - pub fn match_list(&self) -> Option<&[PreSymbolicExpression]> { - if let PreSymbolicExpressionType::List(ref list) = self.pre_expr { - Some(list) - } else { - None - } - } - - pub fn match_field_identifier(&self) -> Option<&TraitIdentifier> { - if let PreSymbolicExpressionType::FieldIdentifier(ref value) = self.pre_expr { - Some(value) - } else { - None - } - } - - pub fn match_placeholder(&self) -> Option<&str> { - if let PreSymbolicExpressionType::Placeholder(ref s) = self.pre_expr { - Some(s.as_str()) - } else { - None - } - } - - pub fn match_comment(&self) -> Option<&str> { - if let PreSymbolicExpressionType::Comment(ref s) = self.pre_expr { - Some(s.as_str()) - } else { - None - } - } -} - -#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)] -pub enum SymbolicExpressionType { - AtomValue(Value), - Atom(ClarityName), - List(Vec), - LiteralValue(Value), - Field(TraitIdentifier), - TraitReference(ClarityName, TraitDefinition), -} - -#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)] -pub enum TraitDefinition { - Defined(TraitIdentifier), - Imported(TraitIdentifier), -} - -pub fn depth_traverse(expr: &SymbolicExpression, mut visit: F) -> Result, E> -where - F: FnMut(&SymbolicExpression) -> Result, -{ - let mut stack = vec![]; - let mut last = None; - stack.push(expr); - while let Some(current) = stack.pop() { - last = Some(visit(current)?); - if let Some(list) = current.match_list() { - for item in list.iter() { - stack.push(item); - } - } - } - - Ok(last) -} - -#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)] -pub struct SymbolicExpression { - pub expr: SymbolicExpressionType, - // this id field is used by compiler passes to store information in - // maps. - // first pass -> fill out unique ids - // ...typing passes -> store information in hashmap according to id. - // - // this is a fairly standard technique in compiler passes - pub id: u64, - - #[cfg(feature = "developer-mode")] - #[serde(default)] - pub span: Span, - - #[cfg(feature = "developer-mode")] - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub pre_comments: Vec<(String, Span)>, - #[cfg(feature = "developer-mode")] - #[serde(default, skip_serializing_if = "Option::is_none")] - pub end_line_comment: Option, - #[cfg(feature = "developer-mode")] - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub post_comments: Vec<(String, Span)>, -} - -impl SymbolicExpression { - #[cfg(feature = "developer-mode")] - fn cons() -> SymbolicExpression { - SymbolicExpression { - id: 0, - expr: SymbolicExpressionType::AtomValue(Value::Bool(false)), - span: Span::zero(), - pre_comments: vec![], - end_line_comment: None, - post_comments: vec![], - } - } - #[cfg(not(feature = "developer-mode"))] - fn cons() -> SymbolicExpression { - SymbolicExpression { - id: 0, - expr: SymbolicExpressionType::AtomValue(Value::Bool(false)), - } - } - - #[cfg(feature = "developer-mode")] - pub fn set_span(&mut self, start_line: u32, start_column: u32, end_line: u32, end_column: u32) { - self.span = Span { - start_line, - start_column, - end_line, - end_column, - } - } - - #[cfg(not(feature = "developer-mode"))] - pub fn set_span( - &mut self, - _start_line: u32, - _start_column: u32, - _end_line: u32, - _end_column: u32, - ) { - } - - #[cfg(feature = "developer-mode")] - pub fn copy_span(&mut self, src: &Span) { - self.span = src.clone(); - } - - #[cfg(not(feature = "developer-mode"))] - pub fn copy_span(&mut self, _src: &Span) {} - - #[cfg(feature = "developer-mode")] - pub fn span(&self) -> &Span { - &self.span - } - - #[cfg(not(feature = "developer-mode"))] - pub fn span(&self) -> &Span { - &Span::ZERO - } - - pub fn atom_value(val: Value) -> SymbolicExpression { - SymbolicExpression { - expr: SymbolicExpressionType::AtomValue(val), - ..SymbolicExpression::cons() - } - } - - pub fn atom(val: ClarityName) -> SymbolicExpression { - SymbolicExpression { - expr: SymbolicExpressionType::Atom(val), - ..SymbolicExpression::cons() - } - } - - pub fn literal_value(val: Value) -> SymbolicExpression { - SymbolicExpression { - expr: SymbolicExpressionType::LiteralValue(val), - ..SymbolicExpression::cons() - } - } - - pub fn list(val: Vec) -> SymbolicExpression { - SymbolicExpression { - expr: SymbolicExpressionType::List(val), - ..SymbolicExpression::cons() - } - } - - pub fn trait_reference( - val: ClarityName, - trait_definition: TraitDefinition, - ) -> SymbolicExpression { - SymbolicExpression { - expr: SymbolicExpressionType::TraitReference(val, trait_definition), - ..SymbolicExpression::cons() - } - } - - pub fn field(val: TraitIdentifier) -> SymbolicExpression { - SymbolicExpression { - expr: SymbolicExpressionType::Field(val), - ..SymbolicExpression::cons() - } - } - - // These match functions are used to simplify calling code - // areas a lot. There is a frequent code pattern where - // a block _expects_ specific symbolic expressions, leading - // to a lot of very verbose `if let x = {` expressions. - - pub fn match_list(&self) -> Option<&[SymbolicExpression]> { - if let SymbolicExpressionType::List(ref list) = self.expr { - Some(list) - } else { - None - } - } - - pub fn match_atom(&self) -> Option<&ClarityName> { - if let SymbolicExpressionType::Atom(ref value) = self.expr { - Some(value) - } else { - None - } - } - - pub fn match_atom_value(&self) -> Option<&Value> { - if let SymbolicExpressionType::AtomValue(ref value) = self.expr { - Some(value) - } else { - None - } - } - - pub fn match_literal_value(&self) -> Option<&Value> { - if let SymbolicExpressionType::LiteralValue(ref value) = self.expr { - Some(value) - } else { - None - } - } - - pub fn match_trait_reference(&self) -> Option<&ClarityName> { - if let SymbolicExpressionType::TraitReference(ref value, _) = self.expr { - Some(value) - } else { - None - } - } - - pub fn match_field(&self) -> Option<&TraitIdentifier> { - if let SymbolicExpressionType::Field(ref value) = self.expr { - Some(value) - } else { - None - } - } -} - -impl fmt::Display for SymbolicExpression { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self.expr { - SymbolicExpressionType::List(ref list) => { - write!(f, "(")?; - for item in list.iter() { - write!(f, " {item}")?; - } - write!(f, " )")?; - } - SymbolicExpressionType::Atom(ref value) => { - write!(f, "{}", &**value)?; - } - SymbolicExpressionType::AtomValue(ref value) - | SymbolicExpressionType::LiteralValue(ref value) => { - write!(f, "{value}")?; - } - SymbolicExpressionType::TraitReference(ref value, _) => { - write!(f, "<{}>", &**value)?; - } - SymbolicExpressionType::Field(ref value) => { - write!(f, "<{value}>")?; - } - }; - - Ok(()) - } -} - -#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] -pub struct Span { - pub start_line: u32, - pub start_column: u32, - pub end_line: u32, - pub end_column: u32, -} - -impl Span { - pub const ZERO: Span = Span { - start_line: 0, - start_column: 0, - end_line: 0, - end_column: 0, - }; - - pub fn zero() -> Self { - Self::default() - } -} +pub use clarity_serialization::representations::{ + depth_traverse, ClarityName, ContractName, PreSymbolicExpression, PreSymbolicExpressionType, + Span, SymbolicExpression, SymbolicExpressionCommon, SymbolicExpressionType, TraitDefinition, + CLARITY_NAME_REGEX, CLARITY_NAME_REGEX_STRING, CONTRACT_MAX_NAME_LENGTH, + CONTRACT_MIN_NAME_LENGTH, CONTRACT_NAME_REGEX, CONTRACT_NAME_REGEX_STRING, + CONTRACT_PRINCIPAL_REGEX_STRING, MAX_STRING_LEN, PRINCIPAL_DATA_REGEX_STRING, + STANDARD_PRINCIPAL_REGEX_STRING, +}; diff --git a/clarity/src/vm/test_util/mod.rs b/clarity/src/vm/test_util/mod.rs index 3fc21b7f21..e852c1bb48 100644 --- a/clarity/src/vm/test_util/mod.rs +++ b/clarity/src/vm/test_util/mod.rs @@ -1,4 +1,3 @@ -use stacks_common::address::{AddressHashMode, C32_ADDRESS_VERSION_TESTNET_SINGLESIG}; use stacks_common::consts::{ BITCOIN_REGTEST_FIRST_BLOCK_HASH, BITCOIN_REGTEST_FIRST_BLOCK_HEIGHT, BITCOIN_REGTEST_FIRST_BLOCK_TIMESTAMP, FIRST_BURNCHAIN_CONSENSUS_HASH, FIRST_STACKS_BLOCK_HASH, @@ -6,7 +5,7 @@ use stacks_common::consts::{ }; use stacks_common::types::chainstate::{ BlockHeaderHash, BurnchainHeaderHash, ConsensusHash, SortitionId, StacksAddress, StacksBlockId, - StacksPrivateKey, StacksPublicKey, VRFSeed, + VRFSeed, }; use stacks_common::types::StacksEpochId; @@ -14,7 +13,7 @@ use crate::vm::ast::ASTRules; use crate::vm::costs::ExecutionCost; use crate::vm::database::{BurnStateDB, HeadersDB}; use crate::vm::representations::SymbolicExpression; -use crate::vm::types::{PrincipalData, StandardPrincipalData, TupleData, Value}; +use crate::vm::types::{TupleData, Value}; use crate::vm::{execute as vm_execute, execute_on_network as vm_execute_on_network, StacksEpoch}; pub struct UnitTestBurnStateDB { @@ -100,31 +99,6 @@ pub fn is_err_code_i128(v: &Value, e: i128) -> bool { } } -impl From<&StacksPrivateKey> for StandardPrincipalData { - fn from(o: &StacksPrivateKey) -> StandardPrincipalData { - let stacks_addr = StacksAddress::from_public_keys( - C32_ADDRESS_VERSION_TESTNET_SINGLESIG, - &AddressHashMode::SerializeP2PKH, - 1, - &vec![StacksPublicKey::from_private(o)], - ) - .unwrap(); - StandardPrincipalData::from(stacks_addr) - } -} - -impl From<&StacksPrivateKey> for PrincipalData { - fn from(o: &StacksPrivateKey) -> PrincipalData { - PrincipalData::Standard(StandardPrincipalData::from(o)) - } -} - -impl From<&StacksPrivateKey> for Value { - fn from(o: &StacksPrivateKey) -> Value { - Value::from(StandardPrincipalData::from(o)) - } -} - impl HeadersDB for UnitTestHeaderDB { fn get_burn_header_hash_for_block( &self, diff --git a/clarity/src/vm/tests/contracts.rs b/clarity/src/vm/tests/contracts.rs index 5812830245..229f09c923 100644 --- a/clarity/src/vm/tests/contracts.rs +++ b/clarity/src/vm/tests/contracts.rs @@ -75,7 +75,7 @@ const SIMPLE_TOKENS: &str = "(define-map tokens { account: principal } { balance (token-credit! to amount))))) (define-public (faucet) (let ((original-sender tx-sender)) - (as-contract (print (token-transfer (print original-sender) u1))))) + (as-contract (print (token-transfer (print original-sender) u1))))) (define-public (mint-after (block-to-release uint)) (if (>= block-height block-to-release) (faucet) @@ -480,7 +480,7 @@ fn test_simple_naming_system(epoch: StacksEpochId, mut env_factory: MemoryEnviro \"not enough balance\") (err 1) (err 3))))) - (define-public (register + (define-public (register (recipient-principal principal) (name int) (salt int)) @@ -755,7 +755,7 @@ fn test_aborts(epoch: StacksEpochId, mut env_factory: MemoryEnvironmentGenerator (define-private (get-data (id int)) (default-to 0 - (get value + (get value (map-get? data (tuple (id id)))))) "; @@ -1012,7 +1012,7 @@ fn test_at_unknown_block( fn test_as_max_len(epoch: StacksEpochId, mut tl_env_factory: TopLevelMemoryEnvironmentGenerator) { let mut owned_env = tl_env_factory.get_env(epoch); let contract = "(define-data-var token-ids (list 10 uint) (list)) - (var-set token-ids + (var-set token-ids (unwrap! (as-max-len? (append (var-get token-ids) u1) u10) (err 10)))"; owned_env @@ -1027,11 +1027,11 @@ fn test_as_max_len(epoch: StacksEpochId, mut tl_env_factory: TopLevelMemoryEnvir #[test] fn test_ast_stack_depth() { - let program = "(+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ - (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ - (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ - (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ - (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ + let program = "(+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ + (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ + (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ + (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ + (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ 1 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) @@ -1072,15 +1072,15 @@ fn test_cc_stack_depth( mut env_factory: MemoryEnvironmentGenerator, ) { let mut owned_env = env_factory.get_env(epoch); - let contract_one = "(define-public (foo) - (ok (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ - (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ + let contract_one = "(define-public (foo) + (ok (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ + (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ 1 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1)))"; let contract_two = - "(define-private (bar) - (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ - (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ + "(define-private (bar) + (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ + (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (+ (unwrap-panic (contract-call? .c-foo foo ) ) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1) 1)) diff --git a/clarity/src/vm/tests/datamaps.rs b/clarity/src/vm/tests/datamaps.rs index 90d9e2a642..c36454c0a0 100644 --- a/clarity/src/vm/tests/datamaps.rs +++ b/clarity/src/vm/tests/datamaps.rs @@ -13,14 +13,13 @@ // // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use crate::vm::errors::Error; use crate::vm::types::{TupleData, Value}; #[cfg(test)] use crate::vm::{ errors::{CheckErrors, ShortReturnType}, types::{ListData, SequenceData, TupleTypeSignature, TypeSignature}, }; -use crate::vm::{execute, ClarityName}; +use crate::vm::{execute, ClarityName, Error}; fn assert_executes(expected: Result, input: &str) { assert_eq!(expected.unwrap(), execute(input).unwrap().unwrap()); diff --git a/clarity/src/vm/tests/defines.rs b/clarity/src/vm/tests/defines.rs index fea98b7298..7dd8eb7e65 100644 --- a/clarity/src/vm/tests/defines.rs +++ b/clarity/src/vm/tests/defines.rs @@ -30,7 +30,7 @@ use crate::vm::{ errors::{ParseError, ParseErrors}, }, errors::RuntimeErrorType, - types::{QualifiedContractIdentifier, TypeSignature, Value}, + types::{QualifiedContractIdentifier, TypeSignature, TypeSignatureExt as _, Value}, {execute, ClarityVersion}, }; diff --git a/clarity/src/vm/tests/mod.rs b/clarity/src/vm/tests/mod.rs index bf9cc68581..d8fb713d85 100644 --- a/clarity/src/vm/tests/mod.rs +++ b/clarity/src/vm/tests/mod.rs @@ -23,8 +23,6 @@ use super::ClarityVersion; use crate::vm::contexts::OwnedEnvironment; pub use crate::vm::database::BurnStateDB; use crate::vm::database::MemoryBackingStore; -#[cfg(test)] -use crate::{vm::errors::Error, vm::types::Value}; mod assets; mod contracts; @@ -152,13 +150,6 @@ clarity_template! { (Epoch32, Clarity3), } -#[cfg(test)] -impl Value { - pub fn list_from(list_data: Vec) -> Result { - Value::cons_list_unsanitized(list_data) - } -} - #[fixture] pub fn env_factory() -> MemoryEnvironmentGenerator { MemoryEnvironmentGenerator(MemoryBackingStore::new()) diff --git a/clarity/src/vm/tests/sequences.rs b/clarity/src/vm/tests/sequences.rs index 016660df6d..d89a2460b2 100644 --- a/clarity/src/vm/tests/sequences.rs +++ b/clarity/src/vm/tests/sequences.rs @@ -1125,7 +1125,7 @@ fn test_simple_folds_string() { "(define-private (get-slice (x (string-ascii 1)) (acc (tuple (limit uint) (cursor uint) (data (string-ascii 10))))) (if (< (get cursor acc) (get limit acc)) (let ((data (default-to (get data acc) (as-max-len? (concat (get data acc) x) u10)))) - (tuple (limit (get limit acc)) (cursor (+ u1 (get cursor acc))) (data data))) + (tuple (limit (get limit acc)) (cursor (+ u1 (get cursor acc))) (data data))) acc)) (get data (fold get-slice \"0123456789\" (tuple (limit u5) (cursor u0) (data \"\"))))"]; @@ -1159,6 +1159,8 @@ fn test_buff_len() { #[apply(test_clarity_versions)] fn test_construct_bad_list(#[case] version: ClarityVersion, #[case] epoch: StacksEpochId) { + use crate::vm::types::TypeSignatureExt as _; + let test1 = "(list 1 2 3 true)"; assert_eq!( execute(test1).unwrap_err(), diff --git a/clarity/src/vm/tests/simple_apply_eval.rs b/clarity/src/vm/tests/simple_apply_eval.rs index 0c241c8f56..c4ca578119 100644 --- a/clarity/src/vm/tests/simple_apply_eval.rs +++ b/clarity/src/vm/tests/simple_apply_eval.rs @@ -332,7 +332,7 @@ fn test_from_consensus_buff_missed_expectations() { ("0x0200000004deadbeef", "(string-ascii 8)"), ("0x03", "uint"), ("0x04", "(optional int)"), - ("0x0700ffffffffffffffffffffffffffffffff", "(response uint int)"), + ("0x0700ffffffffffffffffffffffffffffffff", "(response uint int)"), ("0x0800ffffffffffffffffffffffffffffffff", "(response int uint)"), ("0x09", "(response int int)"), ("0x0b0000000400000000000000000000000000000000010000000000000000000000000000000002000000000000000000000000000000000300fffffffffffffffffffffffffffffffc", @@ -367,7 +367,7 @@ fn test_to_from_consensus_buff_vectors() { ("0x04", "false", "bool"), ("0x050011deadbeef11ababffff11deadbeef11ababffff", "'S08XXBDYXW8TQAZZZW8XXBDYXW8TQAZZZZ88551S", "principal"), ("0x060011deadbeef11ababffff11deadbeef11ababffff0461626364", "'S08XXBDYXW8TQAZZZW8XXBDYXW8TQAZZZZ88551S.abcd", "principal"), - ("0x0700ffffffffffffffffffffffffffffffff", "(ok -1)", "(response int int)"), + ("0x0700ffffffffffffffffffffffffffffffff", "(ok -1)", "(response int int)"), ("0x0800ffffffffffffffffffffffffffffffff", "(err -1)", "(response int int)"), ("0x09", "none", "(optional int)"), ("0x0a00ffffffffffffffffffffffffffffffff", "(some -1)", "(optional int)"), diff --git a/clarity/src/vm/types/mod.rs b/clarity/src/vm/types/mod.rs index 839c45ad10..daa1ebc716 100644 --- a/clarity/src/vm/types/mod.rs +++ b/clarity/src/vm/types/mod.rs @@ -17,758 +17,28 @@ pub mod serialization; pub mod signatures; -use std::collections::btree_map::Entry; -use std::collections::BTreeMap; -use std::{char, fmt, str}; - -use regex::Regex; -use stacks_common::address::c32; -use stacks_common::types::chainstate::StacksAddress; -use stacks_common::types::StacksEpochId; -use stacks_common::util::hash; - -use crate::vm::errors::{ - CheckErrors, InterpreterError, InterpreterResult as Result, RuntimeErrorType, +use std::str; + +pub use clarity_serialization::types::{ + byte_len_of_serialization, ASCIIData, BuffData, CallableData, CharType, ContractIdentifier, + ListData, OptionalData, PrincipalData, QualifiedContractIdentifier, ResponseData, SequenceData, + SequencedValue, StacksAddressExtensions, TraitIdentifier, TupleData, UTF8Data, Value, + BOUND_VALUE_SERIALIZATION_BYTES, BOUND_VALUE_SERIALIZATION_HEX, MAX_TYPE_DEPTH, MAX_VALUE_SIZE, + NONE, WRAPPER_VALUE_SIZE, }; -use crate::vm::representations::{ClarityName, ContractName, SymbolicExpression}; + +pub use self::std_principals::StandardPrincipalData; +use crate::vm::errors::CheckErrors; pub use crate::vm::types::signatures::{ parse_name_type_pairs, AssetIdentifier, BufferLength, FixedFunction, FunctionArg, FunctionSignature, FunctionType, ListTypeData, SequenceSubtype, StringSubtype, - StringUTF8Length, TupleTypeSignature, TypeSignature, BUFF_1, BUFF_20, BUFF_21, BUFF_32, - BUFF_33, BUFF_64, BUFF_65, + StringUTF8Length, TupleTypeSignature, TypeSignature, TypeSignatureExt, BUFF_1, BUFF_20, + BUFF_21, BUFF_32, BUFF_33, BUFF_64, BUFF_65, }; use crate::vm::ClarityVersion; -pub const MAX_VALUE_SIZE: u32 = 1024 * 1024; // 1MB -pub const BOUND_VALUE_SERIALIZATION_BYTES: u32 = MAX_VALUE_SIZE * 2; -pub const BOUND_VALUE_SERIALIZATION_HEX: u32 = BOUND_VALUE_SERIALIZATION_BYTES * 2; - -pub const MAX_TYPE_DEPTH: u8 = 32; -// this is the charged size for wrapped values, i.e., response or optionals -pub const WRAPPER_VALUE_SIZE: u32 = 1; - -#[derive(Debug, Clone, Eq, Serialize, Deserialize)] -pub struct TupleData { - // todo: remove type_signature - pub type_signature: TupleTypeSignature, - pub data_map: BTreeMap, -} - -#[derive(Clone, PartialEq, Eq, Serialize, Deserialize)] -pub struct BuffData { - pub data: Vec, -} - -#[derive(Debug, Clone, Eq, Serialize, Deserialize)] -pub struct ListData { - pub data: Vec, - // todo: remove type_signature - pub type_signature: ListTypeData, -} - -pub use self::std_principals::StandardPrincipalData; - mod std_principals { - use std::fmt; - - use stacks_common::address::c32; - - use crate::vm::errors::InterpreterError; - - #[derive(Clone, Eq, PartialEq, Hash, Serialize, Deserialize, PartialOrd, Ord)] - pub struct StandardPrincipalData(u8, pub [u8; 20]); - - impl StandardPrincipalData { - pub fn transient() -> StandardPrincipalData { - Self( - 1, - [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], - ) - } - } - - impl StandardPrincipalData { - pub fn new(version: u8, bytes: [u8; 20]) -> Result { - if version >= 32 { - return Err(InterpreterError::Expect("Unexpected principal data".into())); - } - Ok(Self(version, bytes)) - } - - /// NEVER, EVER use this in ANY production code. - /// `version` must NEVER be greater than 31. - #[cfg(any(test, feature = "testing"))] - pub fn new_unsafe(version: u8, bytes: [u8; 20]) -> Self { - Self(version, bytes) - } - - pub fn null_principal() -> Self { - Self::new(0, [0; 20]).unwrap() - } - - pub fn version(&self) -> u8 { - self.0 - } - - pub fn to_address(&self) -> String { - c32::c32_address(self.0, &self.1[..]).unwrap_or_else(|_| "INVALID_C32_ADD".to_string()) - } - - pub fn destruct(self) -> (u8, [u8; 20]) { - let Self(version, bytes) = self; - (version, bytes) - } - } - - impl fmt::Display for StandardPrincipalData { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let c32_str = self.to_address(); - write!(f, "{c32_str}") - } - } - - impl fmt::Debug for StandardPrincipalData { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let c32_str = self.to_address(); - write!(f, "StandardPrincipalData({c32_str})") - } - } -} - -#[derive(Debug, Clone, Eq, PartialEq, Hash, Serialize, Deserialize, PartialOrd, Ord)] -pub struct QualifiedContractIdentifier { - pub issuer: StandardPrincipalData, - pub name: ContractName, -} - -impl QualifiedContractIdentifier { - pub fn new(issuer: StandardPrincipalData, name: ContractName) -> QualifiedContractIdentifier { - Self { issuer, name } - } - - pub fn local(name: &str) -> Result { - let name = name.to_string().try_into()?; - Ok(Self::new(StandardPrincipalData::transient(), name)) - } - - #[allow(clippy::unwrap_used)] - pub fn transient() -> QualifiedContractIdentifier { - let name = String::from("__transient").try_into().unwrap(); - Self { - issuer: StandardPrincipalData::transient(), - name, - } - } - - /// Was this contract issued by the null issuer address? (i.e., is it a "boot contract") - pub fn is_boot(&self) -> bool { - self.issuer.1 == [0; 20] - } - - pub fn parse(literal: &str) -> Result { - let split: Vec<_> = literal.splitn(2, '.').collect(); - if split.len() != 2 { - return Err(RuntimeErrorType::ParseError( - "Invalid principal literal: expected a `.` in a qualified contract name" - .to_string(), - ) - .into()); - } - let sender = PrincipalData::parse_standard_principal(split[0])?; - let name = split[1].to_string().try_into()?; - Ok(QualifiedContractIdentifier::new(sender, name)) - } -} - -impl fmt::Display for QualifiedContractIdentifier { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}.{}", self.issuer, self.name) - } -} - -#[derive(Debug, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)] -pub enum PrincipalData { - Standard(StandardPrincipalData), - Contract(QualifiedContractIdentifier), -} - -pub enum ContractIdentifier { - Relative(ContractName), - Qualified(QualifiedContractIdentifier), -} - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -pub struct OptionalData { - pub data: Option>, -} - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -pub struct ResponseData { - pub committed: bool, - pub data: Box, -} - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -pub struct CallableData { - pub contract_identifier: QualifiedContractIdentifier, - pub trait_identifier: Option, -} - -#[derive(Debug, Clone, Eq, PartialEq, Hash, Serialize, Deserialize, PartialOrd, Ord)] -pub struct TraitIdentifier { - pub name: ClarityName, - pub contract_identifier: QualifiedContractIdentifier, -} - -pub trait StacksAddressExtensions { - fn to_account_principal(&self) -> PrincipalData; -} - -impl StacksAddressExtensions for StacksAddress { - fn to_account_principal(&self) -> PrincipalData { - PrincipalData::Standard( - StandardPrincipalData::new(self.version(), *self.bytes().as_bytes()).unwrap(), - ) - } -} - -impl TraitIdentifier { - pub fn new( - issuer: StandardPrincipalData, - contract_name: ContractName, - name: ClarityName, - ) -> TraitIdentifier { - Self { - name, - contract_identifier: QualifiedContractIdentifier { - issuer, - name: contract_name, - }, - } - } - - pub fn parse_fully_qualified(literal: &str) -> Result { - let (issuer, contract_name, name) = Self::parse(literal)?; - let issuer = issuer.ok_or(RuntimeErrorType::BadTypeConstruction)?; - Ok(TraitIdentifier::new(issuer, contract_name, name)) - } - - pub fn parse_sugared_syntax(literal: &str) -> Result<(ContractName, ClarityName)> { - let (_, contract_name, name) = Self::parse(literal)?; - Ok((contract_name, name)) - } - - pub fn parse( - literal: &str, - ) -> Result<(Option, ContractName, ClarityName)> { - let split: Vec<_> = literal.splitn(3, '.').collect(); - if split.len() != 3 { - return Err(RuntimeErrorType::ParseError( - "Invalid principal literal: expected a `.` in a qualified contract name" - .to_string(), - ) - .into()); - } - - let issuer = match split[0].len() { - 0 => None, - _ => Some(PrincipalData::parse_standard_principal(split[0])?), - }; - let contract_name = split[1].to_string().try_into()?; - let name = split[2].to_string().try_into()?; - - Ok((issuer, contract_name, name)) - } -} - -#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] -pub enum Value { - Int(i128), - UInt(u128), - Bool(bool), - Sequence(SequenceData), - Principal(PrincipalData), - Tuple(TupleData), - Optional(OptionalData), - Response(ResponseData), - CallableContract(CallableData), - // NOTE: any new value variants which may contain _other values_ (i.e., - // compound values like `Optional`, `Tuple`, `Response`, or `Sequence(List)`) - // must be handled in the value sanitization routine! -} - -#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] -pub enum SequenceData { - Buffer(BuffData), - List(ListData), - String(CharType), -} - -impl SequenceData { - pub fn atom_values(&mut self) -> Result> { - match self { - SequenceData::Buffer(ref mut data) => data.atom_values(), - SequenceData::List(ref mut data) => data.atom_values(), - SequenceData::String(CharType::ASCII(ref mut data)) => data.atom_values(), - SequenceData::String(CharType::UTF8(ref mut data)) => data.atom_values(), - } - } - - pub fn element_size(&self) -> Result { - let out = match self { - SequenceData::Buffer(..) => TypeSignature::min_buffer()?.size(), - SequenceData::List(ref data) => data.type_signature.get_list_item_type().size(), - SequenceData::String(CharType::ASCII(..)) => TypeSignature::min_string_ascii()?.size(), - SequenceData::String(CharType::UTF8(..)) => TypeSignature::min_string_utf8()?.size(), - }?; - Ok(out) - } - - pub fn len(&self) -> usize { - match &self { - SequenceData::Buffer(data) => data.items().len(), - SequenceData::List(data) => data.items().len(), - SequenceData::String(CharType::ASCII(data)) => data.items().len(), - SequenceData::String(CharType::UTF8(data)) => data.items().len(), - } - } - - pub fn is_empty(&self) -> bool { - self.len() == 0 - } - - pub fn element_at(self, index: usize) -> Result> { - if self.len() <= index { - return Ok(None); - } - let result = match self { - SequenceData::Buffer(data) => Value::buff_from_byte(data.data[index]), - SequenceData::List(mut data) => data.data.remove(index), - SequenceData::String(CharType::ASCII(data)) => { - Value::string_ascii_from_bytes(vec![data.data[index]]).map_err(|_| { - InterpreterError::Expect( - "BUG: failed to initialize single-byte ASCII buffer".into(), - ) - })? - } - SequenceData::String(CharType::UTF8(mut data)) => { - Value::Sequence(SequenceData::String(CharType::UTF8(UTF8Data { - data: vec![data.data.remove(index)], - }))) - } - }; - - Ok(Some(result)) - } - - pub fn replace_at(self, epoch: &StacksEpochId, index: usize, element: Value) -> Result { - let seq_length = self.len(); - - // Check that the length of the provided element is 1. In the case that SequenceData - // is a list, we check that the provided element is the right type below. - if !self.is_list() { - if let Value::Sequence(data) = &element { - let elem_length = data.len(); - if elem_length != 1 { - return Err(RuntimeErrorType::BadTypeConstruction.into()); - } - } else { - return Err(RuntimeErrorType::BadTypeConstruction.into()); - } - } - if index >= seq_length { - return Err(CheckErrors::ValueOutOfBounds.into()); - } - - let new_seq_data = match (self, element) { - (SequenceData::Buffer(mut data), Value::Sequence(SequenceData::Buffer(elem))) => { - data.data[index] = elem.data[0]; - SequenceData::Buffer(data) - } - (SequenceData::List(mut data), elem) => { - let entry_type = data.type_signature.get_list_item_type(); - if !entry_type.admits(epoch, &elem)? { - return Err(CheckErrors::ListTypesMustMatch.into()); - } - data.data[index] = elem; - SequenceData::List(data) - } - ( - SequenceData::String(CharType::ASCII(mut data)), - Value::Sequence(SequenceData::String(CharType::ASCII(elem))), - ) => { - data.data[index] = elem.data[0]; - SequenceData::String(CharType::ASCII(data)) - } - ( - SequenceData::String(CharType::UTF8(mut data)), - Value::Sequence(SequenceData::String(CharType::UTF8(mut elem))), - ) => { - data.data[index] = elem.data.swap_remove(0); - SequenceData::String(CharType::UTF8(data)) - } - _ => return Err(CheckErrors::ListTypesMustMatch.into()), - }; - - Value::some(Value::Sequence(new_seq_data)) - } - - pub fn contains(&self, to_find: Value) -> Result> { - match self { - SequenceData::Buffer(ref data) => { - if let Value::Sequence(SequenceData::Buffer(to_find_vec)) = to_find { - if to_find_vec.data.len() != 1 { - Ok(None) - } else { - for (index, entry) in data.data.iter().enumerate() { - if entry == &to_find_vec.data[0] { - return Ok(Some(index)); - } - } - Ok(None) - } - } else { - Err(CheckErrors::TypeValueError(TypeSignature::min_buffer()?, to_find).into()) - } - } - SequenceData::List(ref data) => { - for (index, entry) in data.data.iter().enumerate() { - if entry == &to_find { - return Ok(Some(index)); - } - } - Ok(None) - } - SequenceData::String(CharType::ASCII(ref data)) => { - if let Value::Sequence(SequenceData::String(CharType::ASCII(to_find_vec))) = to_find - { - if to_find_vec.data.len() != 1 { - Ok(None) - } else { - for (index, entry) in data.data.iter().enumerate() { - if entry == &to_find_vec.data[0] { - return Ok(Some(index)); - } - } - Ok(None) - } - } else { - Err( - CheckErrors::TypeValueError(TypeSignature::min_string_ascii()?, to_find) - .into(), - ) - } - } - SequenceData::String(CharType::UTF8(ref data)) => { - if let Value::Sequence(SequenceData::String(CharType::UTF8(to_find_vec))) = to_find - { - if to_find_vec.data.len() != 1 { - Ok(None) - } else { - for (index, entry) in data.data.iter().enumerate() { - if entry == &to_find_vec.data[0] { - return Ok(Some(index)); - } - } - Ok(None) - } - } else { - Err( - CheckErrors::TypeValueError(TypeSignature::min_string_utf8()?, to_find) - .into(), - ) - } - } - } - } - - pub fn filter(&mut self, filter: &mut F) -> Result<()> - where - F: FnMut(SymbolicExpression) -> Result, - { - // Note: this macro can probably get removed once - // ```Vec::drain_filter(&mut self, filter: F) -> DrainFilter``` - // is available in rust stable channel (experimental at this point). - macro_rules! drain_filter { - ($data:expr, $seq_type:ident) => { - let mut i = 0; - while i != $data.data.len() { - let atom_value = - SymbolicExpression::atom_value($seq_type::to_value(&$data.data[i])?); - match filter(atom_value) { - Ok(res) if res == false => { - $data.data.remove(i); - } - Ok(_) => { - i += 1; - } - Err(err) => return Err(err), - } - } - }; - } - - match self { - SequenceData::Buffer(ref mut data) => { - drain_filter!(data, BuffData); - } - SequenceData::List(ref mut data) => { - drain_filter!(data, ListData); - } - SequenceData::String(CharType::ASCII(ref mut data)) => { - drain_filter!(data, ASCIIData); - } - SequenceData::String(CharType::UTF8(ref mut data)) => { - drain_filter!(data, UTF8Data); - } - } - Ok(()) - } - - pub fn concat(&mut self, epoch: &StacksEpochId, other_seq: SequenceData) -> Result<()> { - match (self, other_seq) { - (SequenceData::List(ref mut inner_data), SequenceData::List(other_inner_data)) => { - inner_data.append(epoch, other_inner_data) - } - ( - SequenceData::Buffer(ref mut inner_data), - SequenceData::Buffer(ref mut other_inner_data), - ) => inner_data.append(other_inner_data), - ( - SequenceData::String(CharType::ASCII(ref mut inner_data)), - SequenceData::String(CharType::ASCII(ref mut other_inner_data)), - ) => inner_data.append(other_inner_data), - ( - SequenceData::String(CharType::UTF8(ref mut inner_data)), - SequenceData::String(CharType::UTF8(ref mut other_inner_data)), - ) => inner_data.append(other_inner_data), - _ => Err(RuntimeErrorType::BadTypeConstruction.into()), - }?; - Ok(()) - } - - pub fn slice( - self, - epoch: &StacksEpochId, - left_position: usize, - right_position: usize, - ) -> Result { - let empty_seq = left_position == right_position; - - let result = match self { - SequenceData::Buffer(data) => { - let data = if empty_seq { - vec![] - } else { - data.data[left_position..right_position].to_vec() - }; - Value::buff_from(data) - } - SequenceData::List(data) => { - let data = if empty_seq { - vec![] - } else { - data.data[left_position..right_position].to_vec() - }; - Value::cons_list(data, epoch) - } - SequenceData::String(CharType::ASCII(data)) => { - let data = if empty_seq { - vec![] - } else { - data.data[left_position..right_position].to_vec() - }; - Value::string_ascii_from_bytes(data) - } - SequenceData::String(CharType::UTF8(data)) => { - let data = if empty_seq { - vec![] - } else { - data.data[left_position..right_position].to_vec() - }; - Ok(Value::Sequence(SequenceData::String(CharType::UTF8( - UTF8Data { data }, - )))) - } - }?; - - Ok(result) - } - - pub fn is_list(&self) -> bool { - matches!(self, SequenceData::List(..)) - } -} - -#[derive(Clone, Eq, PartialEq, Serialize, Deserialize)] -pub enum CharType { - UTF8(UTF8Data), - ASCII(ASCIIData), -} - -impl fmt::Display for CharType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - CharType::ASCII(string) => write!(f, "{string}"), - CharType::UTF8(string) => write!(f, "{string}"), - } - } -} - -impl fmt::Debug for CharType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{self}") - } -} - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -pub struct ASCIIData { - pub data: Vec, -} - -impl fmt::Display for ASCIIData { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let mut escaped_str = String::new(); - for c in self.data.iter() { - let escaped_char = format!("{}", std::ascii::escape_default(*c)); - escaped_str.push_str(&escaped_char); - } - write!(f, "\"{escaped_str}\"") - } -} - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -pub struct UTF8Data { - pub data: Vec>, -} - -impl fmt::Display for UTF8Data { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let mut result = String::new(); - for c in self.data.iter() { - if c.len() > 1 { - // We escape extended charset - result.push_str(&format!("\\u{{{}}}", hash::to_hex(&c[..]))); - } else { - // We render an ASCII char, escaped - let escaped_char = format!("{}", std::ascii::escape_default(c[0])); - result.push_str(&escaped_char); - } - } - write!(f, "u\"{result}\"") - } -} - -pub trait SequencedValue { - fn type_signature(&self) -> std::result::Result; - - fn items(&self) -> &Vec; - - fn drained_items(&mut self) -> Vec; - - fn to_value(v: &T) -> Result; - - fn atom_values(&mut self) -> Result> { - self.drained_items() - .iter() - .map(|item| Ok(SymbolicExpression::atom_value(Self::to_value(item)?))) - .collect() - } -} - -impl SequencedValue for ListData { - fn items(&self) -> &Vec { - &self.data - } - - fn drained_items(&mut self) -> Vec { - self.data.drain(..).collect() - } - - fn type_signature(&self) -> std::result::Result { - Ok(TypeSignature::SequenceType(SequenceSubtype::ListType( - self.type_signature.clone(), - ))) - } - - fn to_value(v: &Value) -> Result { - Ok(v.clone()) - } -} - -impl SequencedValue for BuffData { - fn items(&self) -> &Vec { - &self.data - } - - fn drained_items(&mut self) -> Vec { - self.data.drain(..).collect() - } - - fn type_signature(&self) -> std::result::Result { - let buff_length = BufferLength::try_from(self.data.len()).map_err(|_| { - CheckErrors::Expects("ERROR: Too large of a buffer successfully constructed.".into()) - })?; - Ok(TypeSignature::SequenceType(SequenceSubtype::BufferType( - buff_length, - ))) - } - - fn to_value(v: &u8) -> Result { - Ok(Value::buff_from_byte(*v)) - } -} - -impl SequencedValue for ASCIIData { - fn items(&self) -> &Vec { - &self.data - } - - fn drained_items(&mut self) -> Vec { - self.data.drain(..).collect() - } - - fn type_signature(&self) -> std::result::Result { - let buff_length = BufferLength::try_from(self.data.len()).map_err(|_| { - CheckErrors::Expects("ERROR: Too large of a buffer successfully constructed.".into()) - })?; - Ok(TypeSignature::SequenceType(SequenceSubtype::StringType( - StringSubtype::ASCII(buff_length), - ))) - } - - fn to_value(v: &u8) -> Result { - Value::string_ascii_from_bytes(vec![*v]).map_err(|_| { - InterpreterError::Expect("ERROR: Invalid ASCII string successfully constructed".into()) - .into() - }) - } -} - -impl SequencedValue> for UTF8Data { - fn items(&self) -> &Vec> { - &self.data - } - - fn drained_items(&mut self) -> Vec> { - self.data.drain(..).collect() - } - - fn type_signature(&self) -> std::result::Result { - let str_len = StringUTF8Length::try_from(self.data.len()).map_err(|_| { - CheckErrors::Expects("ERROR: Too large of a buffer successfully constructed.".into()) - })?; - Ok(TypeSignature::SequenceType(SequenceSubtype::StringType( - StringSubtype::UTF8(str_len), - ))) - } - - fn to_value(v: &Vec) -> Result { - Value::string_utf8_from_bytes(v.clone()).map_err(|_| { - InterpreterError::Expect("ERROR: Invalid UTF8 string successfully constructed".into()) - .into() - }) - } + pub use clarity_serialization::types::StandardPrincipalData; } // Properties for "get-block-info". @@ -806,36 +76,6 @@ define_named_enum!(TenureInfoProperty { BlockReward("block-reward"), }); -impl OptionalData { - pub fn type_signature(&self) -> std::result::Result { - let type_result = match self.data { - Some(ref v) => TypeSignature::new_option(TypeSignature::type_of(v)?), - None => TypeSignature::new_option(TypeSignature::NoType), - }; - type_result.map_err(|_| { - CheckErrors::Expects("Should not have constructed too large of a type.".into()) - }) - } -} - -impl ResponseData { - pub fn type_signature(&self) -> std::result::Result { - let type_result = match self.committed { - true => TypeSignature::new_response( - TypeSignature::type_of(&self.data)?, - TypeSignature::NoType, - ), - false => TypeSignature::new_response( - TypeSignature::NoType, - TypeSignature::type_of(&self.data)?, - ), - }; - type_result.map_err(|_| { - CheckErrors::Expects("Should not have constructed too large of a type.".into()) - }) - } -} - impl BlockInfoProperty { pub fn type_result(&self) -> TypeSignature { use self::BlockInfoProperty::*; @@ -900,967 +140,3 @@ impl TenureInfoProperty { } } } - -impl PartialEq for ListData { - fn eq(&self, other: &ListData) -> bool { - self.data == other.data - } -} - -impl PartialEq for TupleData { - fn eq(&self, other: &TupleData) -> bool { - self.data_map == other.data_map - } -} - -pub const NONE: Value = Value::Optional(OptionalData { data: None }); - -impl Value { - pub fn some(data: Value) -> Result { - if data.size()? + WRAPPER_VALUE_SIZE > MAX_VALUE_SIZE { - Err(CheckErrors::ValueTooLarge.into()) - } else if data.depth()? + 1 > MAX_TYPE_DEPTH { - Err(CheckErrors::TypeSignatureTooDeep.into()) - } else { - Ok(Value::Optional(OptionalData { - data: Some(Box::new(data)), - })) - } - } - - pub fn none() -> Value { - NONE.clone() - } - - pub fn okay_true() -> Value { - Value::Response(ResponseData { - committed: true, - data: Box::new(Value::Bool(true)), - }) - } - - pub fn err_uint(ecode: u128) -> Value { - Value::Response(ResponseData { - committed: false, - data: Box::new(Value::UInt(ecode)), - }) - } - - pub fn err_none() -> Value { - Value::Response(ResponseData { - committed: false, - data: Box::new(NONE.clone()), - }) - } - - pub fn okay(data: Value) -> Result { - if data.size()? + WRAPPER_VALUE_SIZE > MAX_VALUE_SIZE { - Err(CheckErrors::ValueTooLarge.into()) - } else if data.depth()? + 1 > MAX_TYPE_DEPTH { - Err(CheckErrors::TypeSignatureTooDeep.into()) - } else { - Ok(Value::Response(ResponseData { - committed: true, - data: Box::new(data), - })) - } - } - - pub fn error(data: Value) -> Result { - if data.size()? + WRAPPER_VALUE_SIZE > MAX_VALUE_SIZE { - Err(CheckErrors::ValueTooLarge.into()) - } else if data.depth()? + 1 > MAX_TYPE_DEPTH { - Err(CheckErrors::TypeSignatureTooDeep.into()) - } else { - Ok(Value::Response(ResponseData { - committed: false, - data: Box::new(data), - })) - } - } - - pub fn size(&self) -> Result { - Ok(TypeSignature::type_of(self)?.size()?) - } - - pub fn depth(&self) -> Result { - Ok(TypeSignature::type_of(self)?.depth()) - } - - /// Invariant: the supplied Values have already been "checked", i.e., it's a valid Value object - /// this invariant is enforced through the Value constructors, each of which checks to ensure - /// that any typing data is correct. - pub fn list_with_type( - epoch: &StacksEpochId, - list_data: Vec, - expected_type: ListTypeData, - ) -> Result { - // Constructors for TypeSignature ensure that the size of the Value cannot - // be greater than MAX_VALUE_SIZE (they error on such constructions) - // so we do not need to perform that check here. - if (expected_type.get_max_len() as usize) < list_data.len() { - return Err(InterpreterError::FailureConstructingListWithType.into()); - } - - { - let expected_item_type = expected_type.get_list_item_type(); - - for item in &list_data { - if !expected_item_type.admits(epoch, item)? { - return Err(InterpreterError::FailureConstructingListWithType.into()); - } - } - } - - Ok(Value::Sequence(SequenceData::List(ListData { - data: list_data, - type_signature: expected_type, - }))) - } - - pub fn cons_list_unsanitized(list_data: Vec) -> Result { - let type_sig = TypeSignature::construct_parent_list_type(&list_data)?; - Ok(Value::Sequence(SequenceData::List(ListData { - data: list_data, - type_signature: type_sig, - }))) - } - - pub fn cons_list(list_data: Vec, epoch: &StacksEpochId) -> Result { - // Constructors for TypeSignature ensure that the size of the Value cannot - // be greater than MAX_VALUE_SIZE (they error on such constructions) - // Aaron: at this point, we've _already_ allocated memory for this type. - // (e.g., from a (map...) call, or a (list...) call. - // this is a problem _if_ the static analyzer cannot already prevent - // this case. This applies to all the constructor size checks. - let type_sig = TypeSignature::construct_parent_list_type(&list_data)?; - let list_data_opt: Option<_> = list_data - .into_iter() - .map(|item| { - Value::sanitize_value(epoch, type_sig.get_list_item_type(), item) - .map(|(value, _did_sanitize)| value) - }) - .collect(); - let list_data = list_data_opt.ok_or_else(|| CheckErrors::ListTypesMustMatch)?; - Ok(Value::Sequence(SequenceData::List(ListData { - data: list_data, - type_signature: type_sig, - }))) - } - - /// # Errors - /// - CheckErrors::ValueTooLarge if `buff_data` is too large. - pub fn buff_from(buff_data: Vec) -> Result { - // check the buffer size - BufferLength::try_from(buff_data.len())?; - // construct the buffer - Ok(Value::Sequence(SequenceData::Buffer(BuffData { - data: buff_data, - }))) - } - - pub fn buff_from_byte(byte: u8) -> Value { - Value::Sequence(SequenceData::Buffer(BuffData { data: vec![byte] })) - } - - pub fn string_ascii_from_bytes(bytes: Vec) -> Result { - // check the string size - BufferLength::try_from(bytes.len())?; - - for b in bytes.iter() { - if !b.is_ascii_alphanumeric() && !b.is_ascii_punctuation() && !b.is_ascii_whitespace() { - return Err(CheckErrors::InvalidCharactersDetected.into()); - } - } - // construct the string - Ok(Value::Sequence(SequenceData::String(CharType::ASCII( - ASCIIData { data: bytes }, - )))) - } - - pub fn string_utf8_from_string_utf8_literal(tokenized_str: String) -> Result { - let wrapped_codepoints_matcher = Regex::new("^\\\\u\\{(?P[[:xdigit:]]+)\\}") - .map_err(|_| InterpreterError::Expect("Bad regex".into()))?; - let mut window = tokenized_str.as_str(); - let mut cursor = 0; - let mut data: Vec> = vec![]; - while !window.is_empty() { - if let Some(captures) = wrapped_codepoints_matcher.captures(window) { - let matched = captures - .name("value") - .ok_or_else(|| InterpreterError::Expect("Expected capture".into()))?; - let scalar_value = window[matched.start()..matched.end()].to_string(); - let unicode_char = { - let u = u32::from_str_radix(&scalar_value, 16) - .map_err(|_| CheckErrors::InvalidUTF8Encoding)?; - let c = char::from_u32(u).ok_or_else(|| CheckErrors::InvalidUTF8Encoding)?; - let mut encoded_char: Vec = vec![0; c.len_utf8()]; - c.encode_utf8(&mut encoded_char[..]); - encoded_char - }; - - data.push(unicode_char); - cursor += scalar_value.len() + 4; - } else { - let ascii_char = window[0..1].to_string().into_bytes(); - data.push(ascii_char); - cursor += 1; - } - // check the string size - StringUTF8Length::try_from(data.len())?; - - window = &tokenized_str[cursor..]; - } - // construct the string - Ok(Value::Sequence(SequenceData::String(CharType::UTF8( - UTF8Data { data }, - )))) - } - - pub fn string_utf8_from_bytes(bytes: Vec) -> Result { - let validated_utf8_str = match str::from_utf8(&bytes) { - Ok(string) => string, - _ => return Err(CheckErrors::InvalidCharactersDetected.into()), - }; - let data = validated_utf8_str - .chars() - .map(|char| { - let mut encoded_char = vec![0u8; char.len_utf8()]; - char.encode_utf8(&mut encoded_char); - encoded_char - }) - .collect::>(); - // check the string size - StringUTF8Length::try_from(data.len())?; - - Ok(Value::Sequence(SequenceData::String(CharType::UTF8( - UTF8Data { data }, - )))) - } - - pub fn expect_ascii(self) -> Result { - if let Value::Sequence(SequenceData::String(CharType::ASCII(ASCIIData { data }))) = self { - Ok(String::from_utf8(data) - .map_err(|_| InterpreterError::Expect("Non UTF-8 data in string".into()))?) - } else { - error!("Value '{self:?}' is not an ASCII string"); - Err(InterpreterError::Expect("Expected ASCII string".into()).into()) - } - } - - pub fn expect_u128(self) -> Result { - if let Value::UInt(inner) = self { - Ok(inner) - } else { - error!("Value '{self:?}' is not a u128"); - Err(InterpreterError::Expect("Expected u128".into()).into()) - } - } - - pub fn expect_i128(self) -> Result { - if let Value::Int(inner) = self { - Ok(inner) - } else { - error!("Value '{self:?}' is not an i128"); - Err(InterpreterError::Expect("Expected i128".into()).into()) - } - } - - pub fn expect_buff(self, sz: usize) -> Result> { - if let Value::Sequence(SequenceData::Buffer(buffdata)) = self { - if buffdata.data.len() <= sz { - Ok(buffdata.data) - } else { - error!( - "Value buffer has len {}, expected {sz}", - buffdata.data.len() - ); - Err(InterpreterError::Expect("Unexpected buff length".into()).into()) - } - } else { - error!("Value '{self:?}' is not a buff"); - Err(InterpreterError::Expect("Expected buff".into()).into()) - } - } - - pub fn expect_list(self) -> Result> { - if let Value::Sequence(SequenceData::List(listdata)) = self { - Ok(listdata.data) - } else { - error!("Value '{self:?}' is not a list"); - Err(InterpreterError::Expect("Expected list".into()).into()) - } - } - - pub fn expect_buff_padded(self, sz: usize, pad: u8) -> Result> { - let mut data = self.expect_buff(sz)?; - if sz > data.len() { - for _ in data.len()..sz { - data.push(pad) - } - } - Ok(data) - } - - pub fn expect_bool(self) -> Result { - if let Value::Bool(b) = self { - Ok(b) - } else { - error!("Value '{self:?}' is not a bool"); - Err(InterpreterError::Expect("Expected bool".into()).into()) - } - } - - pub fn expect_tuple(self) -> Result { - if let Value::Tuple(data) = self { - Ok(data) - } else { - error!("Value '{self:?}' is not a tuple"); - Err(InterpreterError::Expect("Expected tuple".into()).into()) - } - } - - pub fn expect_optional(self) -> Result> { - if let Value::Optional(opt) = self { - match opt.data { - Some(boxed_value) => Ok(Some(*boxed_value)), - None => Ok(None), - } - } else { - error!("Value '{self:?}' is not an optional"); - Err(InterpreterError::Expect("Expected optional".into()).into()) - } - } - - pub fn expect_principal(self) -> Result { - if let Value::Principal(p) = self { - Ok(p) - } else { - error!("Value '{self:?}' is not a principal"); - Err(InterpreterError::Expect("Expected principal".into()).into()) - } - } - - pub fn expect_callable(self) -> Result { - if let Value::CallableContract(t) = self { - Ok(t) - } else { - error!("Value '{self:?}' is not a callable contract"); - Err(InterpreterError::Expect("Expected callable".into()).into()) - } - } - - pub fn expect_result(self) -> Result> { - if let Value::Response(res_data) = self { - if res_data.committed { - Ok(Ok(*res_data.data)) - } else { - Ok(Err(*res_data.data)) - } - } else { - error!("Value '{self:?}' is not a response"); - Err(InterpreterError::Expect("Expected response".into()).into()) - } - } - - pub fn expect_result_ok(self) -> Result { - if let Value::Response(res_data) = self { - if res_data.committed { - Ok(*res_data.data) - } else { - error!("Value is not a (ok ..)"); - Err(InterpreterError::Expect("Expected ok response".into()).into()) - } - } else { - error!("Value '{self:?}' is not a response"); - Err(InterpreterError::Expect("Expected response".into()).into()) - } - } - - pub fn expect_result_err(self) -> Result { - if let Value::Response(res_data) = self { - if !res_data.committed { - Ok(*res_data.data) - } else { - error!("Value is not a (err ..)"); - Err(InterpreterError::Expect("Expected err response".into()).into()) - } - } else { - error!("Value '{self:?}' is not a response"); - Err(InterpreterError::Expect("Expected response".into()).into()) - } - } -} - -impl BuffData { - pub fn len(&self) -> Result { - self.data - .len() - .try_into() - .map_err(|_| InterpreterError::Expect("Data length should be valid".into()).into()) - } - - pub fn as_slice(&self) -> &[u8] { - self.data.as_slice() - } - - fn append(&mut self, other_seq: &mut BuffData) -> Result<()> { - self.data.append(&mut other_seq.data); - Ok(()) - } - - pub fn empty() -> Self { - Self { data: Vec::new() } - } -} - -impl ListData { - pub fn len(&self) -> Result { - self.data - .len() - .try_into() - .map_err(|_| InterpreterError::Expect("Data length should be valid".into()).into()) - } - - pub fn is_empty(&self) -> bool { - self.data.is_empty() - } - - fn append(&mut self, epoch: &StacksEpochId, other_seq: ListData) -> Result<()> { - let entry_type_a = self.type_signature.get_list_item_type(); - let entry_type_b = other_seq.type_signature.get_list_item_type(); - let entry_type = TypeSignature::factor_out_no_type(epoch, entry_type_a, entry_type_b)?; - let max_len = self.type_signature.get_max_len() + other_seq.type_signature.get_max_len(); - for item in other_seq.data.into_iter() { - let (item, _) = Value::sanitize_value(epoch, &entry_type, item) - .ok_or_else(|| CheckErrors::ListTypesMustMatch)?; - self.data.push(item); - } - - self.type_signature = ListTypeData::new_list(entry_type, max_len)?; - Ok(()) - } -} - -impl ASCIIData { - fn append(&mut self, other_seq: &mut ASCIIData) -> Result<()> { - self.data.append(&mut other_seq.data); - Ok(()) - } - - pub fn len(&self) -> Result { - self.data - .len() - .try_into() - .map_err(|_| InterpreterError::Expect("Data length should be valid".into()).into()) - } -} - -impl UTF8Data { - fn append(&mut self, other_seq: &mut UTF8Data) -> Result<()> { - self.data.append(&mut other_seq.data); - Ok(()) - } - - pub fn len(&self) -> Result { - self.data - .len() - .try_into() - .map_err(|_| InterpreterError::Expect("Data length should be valid".into()).into()) - } -} - -impl fmt::Display for OptionalData { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self.data { - Some(ref x) => write!(f, "(some {x})"), - None => write!(f, "none"), - } - } -} - -impl fmt::Display for ResponseData { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self.committed { - true => write!(f, "(ok {})", self.data), - false => write!(f, "(err {})", self.data), - } - } -} - -impl fmt::Display for BuffData { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}", hash::to_hex(&self.data)) - } -} - -impl fmt::Debug for BuffData { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{self}") - } -} - -impl fmt::Display for Value { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - Value::Int(int) => write!(f, "{int}"), - Value::UInt(int) => write!(f, "u{int}"), - Value::Bool(boolean) => write!(f, "{boolean}"), - Value::Tuple(data) => write!(f, "{data}"), - Value::Principal(principal_data) => write!(f, "{principal_data}"), - Value::Optional(opt_data) => write!(f, "{opt_data}"), - Value::Response(res_data) => write!(f, "{res_data}"), - Value::Sequence(SequenceData::Buffer(vec_bytes)) => write!(f, "0x{vec_bytes}"), - Value::Sequence(SequenceData::String(string)) => write!(f, "{string}"), - Value::Sequence(SequenceData::List(list_data)) => { - write!(f, "(")?; - for (ix, v) in list_data.data.iter().enumerate() { - if ix > 0 { - write!(f, " ")?; - } - write!(f, "{v}")?; - } - write!(f, ")") - } - Value::CallableContract(callable_data) => write!(f, "{callable_data}"), - } - } -} - -impl PrincipalData { - pub fn version(&self) -> u8 { - match self { - PrincipalData::Standard(ref p) => p.version(), - PrincipalData::Contract(QualifiedContractIdentifier { issuer, name: _ }) => { - issuer.version() - } - } - } - - /// A version is only valid if it fits into 5 bits. - /// This is enforced by the constructor, but it was historically possible to assemble invalid - /// addresses. This function is used to validate historic addresses. - pub fn has_valid_version(&self) -> bool { - self.version() < 32 - } - - pub fn parse(literal: &str) -> Result { - // be permissive about leading single-quote - let literal = literal.strip_prefix('\'').unwrap_or(literal); - - if literal.contains('.') { - PrincipalData::parse_qualified_contract_principal(literal) - } else { - PrincipalData::parse_standard_principal(literal).map(PrincipalData::from) - } - } - - pub fn parse_qualified_contract_principal(literal: &str) -> Result { - let contract_id = QualifiedContractIdentifier::parse(literal)?; - Ok(PrincipalData::Contract(contract_id)) - } - - pub fn parse_standard_principal(literal: &str) -> Result { - let (version, data) = c32::c32_address_decode(literal) - .map_err(|x| RuntimeErrorType::ParseError(format!("Invalid principal literal: {x}")))?; - if data.len() != 20 { - return Err(RuntimeErrorType::ParseError( - "Invalid principal literal: Expected 20 data bytes.".to_string(), - ) - .into()); - } - let mut fixed_data = [0; 20]; - fixed_data.copy_from_slice(&data[..20]); - Ok(StandardPrincipalData::new(version, fixed_data)?) - } -} - -impl fmt::Display for PrincipalData { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - PrincipalData::Standard(sender) => write!(f, "{sender}"), - PrincipalData::Contract(contract_identifier) => write!( - f, - "{}.{}", - contract_identifier.issuer, contract_identifier.name - ), - } - } -} - -impl fmt::Display for CallableData { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - if let Some(trait_identifier) = &self.trait_identifier { - write!(f, "({} as <{trait_identifier}>)", self.contract_identifier) - } else { - write!(f, "{}", self.contract_identifier,) - } - } -} - -impl fmt::Display for TraitIdentifier { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}.{}", self.contract_identifier, self.name) - } -} - -impl From for StandardPrincipalData { - fn from(addr: StacksAddress) -> Self { - let (version, bytes) = addr.destruct(); - - // should be infallible because it's impossible to construct a StacksAddress with an - // unsupported version byte - Self::new(version, bytes.0) - .expect("FATAL: could not convert StacksAddress to StandardPrincipalData") - } -} - -impl From for PrincipalData { - fn from(addr: StacksAddress) -> Self { - PrincipalData::from(StandardPrincipalData::from(addr)) - } -} - -impl From for StacksAddress { - fn from(o: StandardPrincipalData) -> StacksAddress { - // should be infallible because it's impossible to construct a StandardPrincipalData with - // an unsupported version byte - StacksAddress::new(o.version(), hash::Hash160(o.1)) - .expect("FATAL: could not convert a StandardPrincipalData to StacksAddress") - } -} - -impl From for Value { - fn from(principal: StandardPrincipalData) -> Self { - Value::Principal(PrincipalData::from(principal)) - } -} - -impl From for Value { - fn from(principal: QualifiedContractIdentifier) -> Self { - Value::Principal(PrincipalData::Contract(principal)) - } -} - -impl From for Value { - fn from(p: PrincipalData) -> Self { - Value::Principal(p) - } -} - -impl From for PrincipalData { - fn from(p: StandardPrincipalData) -> Self { - PrincipalData::Standard(p) - } -} - -impl From for PrincipalData { - fn from(principal: QualifiedContractIdentifier) -> Self { - PrincipalData::Contract(principal) - } -} - -impl From for Value { - fn from(t: TupleData) -> Self { - Value::Tuple(t) - } -} - -impl From for Value { - fn from(ascii: ASCIIData) -> Self { - Value::Sequence(SequenceData::String(CharType::ASCII(ascii))) - } -} -impl From for ASCIIData { - fn from(name: ContractName) -> Self { - // ContractName is guaranteed to be between 5 and 40 bytes and contains only printable - // ASCII already, so this conversion should not fail. - ASCIIData { - data: name.as_str().as_bytes().to_vec(), - } - } -} - -impl TupleData { - fn new( - type_signature: TupleTypeSignature, - data_map: BTreeMap, - ) -> Result { - let t = TupleData { - type_signature, - data_map, - }; - Ok(t) - } - - /// Return the number of fields in this tuple value - pub fn len(&self) -> u64 { - self.data_map.len() as u64 - } - - /// Checks whether the tuple value is empty - pub fn is_empty(&self) -> bool { - self.data_map.is_empty() - } - - // TODO: add tests from mutation testing results #4833 - #[cfg_attr(test, mutants::skip)] - pub fn from_data(data: Vec<(ClarityName, Value)>) -> Result { - let mut type_map = BTreeMap::new(); - let mut data_map = BTreeMap::new(); - for (name, value) in data.into_iter() { - let type_info = TypeSignature::type_of(&value)?; - let entry = type_map.entry(name.clone()); - match entry { - Entry::Vacant(e) => e.insert(type_info), - Entry::Occupied(_) => return Err(CheckErrors::NameAlreadyUsed(name.into()).into()), - }; - data_map.insert(name, value); - } - - Self::new(TupleTypeSignature::try_from(type_map)?, data_map) - } - - // TODO: add tests from mutation testing results #4834 - #[cfg_attr(test, mutants::skip)] - pub fn from_data_typed( - epoch: &StacksEpochId, - data: Vec<(ClarityName, Value)>, - expected: &TupleTypeSignature, - ) -> Result { - let mut data_map = BTreeMap::new(); - for (name, value) in data.into_iter() { - let expected_type = expected - .field_type(&name) - .ok_or(InterpreterError::FailureConstructingTupleWithType)?; - if !expected_type.admits(epoch, &value)? { - return Err(InterpreterError::FailureConstructingTupleWithType.into()); - } - data_map.insert(name, value); - } - Self::new(expected.clone(), data_map) - } - - pub fn get(&self, name: &str) -> Result<&Value> { - self.data_map.get(name).ok_or_else(|| { - CheckErrors::NoSuchTupleField(name.to_string(), self.type_signature.clone()).into() - }) - } - - pub fn get_owned(mut self, name: &str) -> Result { - self.data_map.remove(name).ok_or_else(|| { - CheckErrors::NoSuchTupleField(name.to_string(), self.type_signature.clone()).into() - }) - } - - pub fn shallow_merge(mut base: TupleData, updates: TupleData) -> Result { - let TupleData { - data_map, - mut type_signature, - } = updates; - for (name, value) in data_map.into_iter() { - base.data_map.insert(name, value); - } - base.type_signature.shallow_merge(&mut type_signature); - Ok(base) - } -} - -impl fmt::Display for TupleData { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "(tuple")?; - for (name, value) in self.data_map.iter() { - write!(f, " ")?; - write!(f, "({} {value})", &**name)?; - } - write!(f, ")") - } -} - -/// Given the serialized string representation of a Clarity value, -/// return the size of the same byte representation. -pub fn byte_len_of_serialization(serialized: &str) -> u64 { - serialized.len() as u64 / 2 -} - -#[cfg(test)] -mod test { - use super::*; - #[test] - fn test_constructors() { - assert_eq!( - Value::list_with_type( - &StacksEpochId::latest(), - vec![Value::Int(5), Value::Int(2)], - ListTypeData::new_list(TypeSignature::BoolType, 3).unwrap() - ), - Err(InterpreterError::FailureConstructingListWithType.into()) - ); - assert_eq!( - ListTypeData::new_list(TypeSignature::IntType, MAX_VALUE_SIZE), - Err(CheckErrors::ValueTooLarge) - ); - - assert_eq!( - Value::buff_from(vec![0; (MAX_VALUE_SIZE + 1) as usize]), - Err(CheckErrors::ValueTooLarge.into()) - ); - - // Test that wrappers (okay, error, some) - // correctly error when _they_ cause the value size - // to exceed the max value size (note, the buffer constructor - // isn't causing the error). - assert_eq!( - Value::okay(Value::buff_from(vec![0; (MAX_VALUE_SIZE) as usize]).unwrap()), - Err(CheckErrors::ValueTooLarge.into()) - ); - - assert_eq!( - Value::error(Value::buff_from(vec![0; (MAX_VALUE_SIZE) as usize]).unwrap()), - Err(CheckErrors::ValueTooLarge.into()) - ); - - assert_eq!( - Value::some(Value::buff_from(vec![0; (MAX_VALUE_SIZE) as usize]).unwrap()), - Err(CheckErrors::ValueTooLarge.into()) - ); - - // Test that the depth limit is correctly enforced: - // for tuples, lists, somes, okays, errors. - - let cons = || { - Value::some(Value::some(Value::some(Value::some(Value::some( - Value::some(Value::some(Value::some(Value::some(Value::some( - Value::some(Value::some(Value::some(Value::some(Value::some( - Value::some(Value::some(Value::some(Value::some(Value::some( - Value::some(Value::some(Value::some(Value::some(Value::some( - Value::some(Value::some(Value::some(Value::some( - Value::some(Value::some(Value::Int(1))?)?, - )?)?)?)?, - )?)?)?)?)?, - )?)?)?)?)?, - )?)?)?)?)?, - )?)?)?)?)?, - )?)?)?)?) - }; - let inner_value = cons().unwrap(); - assert_eq!( - TupleData::from_data(vec![("a".into(), inner_value.clone())]), - Err(CheckErrors::TypeSignatureTooDeep.into()) - ); - - assert_eq!( - Value::list_from(vec![inner_value.clone()]), - Err(CheckErrors::TypeSignatureTooDeep.into()) - ); - assert_eq!( - Value::okay(inner_value.clone()), - Err(CheckErrors::TypeSignatureTooDeep.into()) - ); - assert_eq!( - Value::error(inner_value.clone()), - Err(CheckErrors::TypeSignatureTooDeep.into()) - ); - assert_eq!( - Value::some(inner_value), - Err(CheckErrors::TypeSignatureTooDeep.into()) - ); - - if std::env::var("CIRCLE_TESTING") == Ok("1".to_string()) { - println!("Skipping allocation test on Circle"); - return; - } - - // on 32-bit archs, this error cannot even happen, so don't test (and cause an overflow panic) - if (u32::MAX as usize) < usize::MAX { - assert_eq!( - Value::buff_from(vec![0; (u32::MAX as usize) + 10]), - Err(CheckErrors::ValueTooLarge.into()) - ); - } - } - - #[test] - fn simple_size_test() { - assert_eq!(Value::Int(10).size().unwrap(), 16); - } - - #[test] - fn simple_tuple_get_test() { - let t = TupleData::from_data(vec![("abc".into(), Value::Int(0))]).unwrap(); - assert_eq!(t.get("abc"), Ok(&Value::Int(0))); - // should error! - t.get("abcd").unwrap_err(); - } - - #[test] - fn test_some_displays() { - assert_eq!( - &format!( - "{}", - Value::list_from(vec![Value::Int(10), Value::Int(5)]).unwrap() - ), - "(10 5)" - ); - assert_eq!( - &format!("{}", Value::some(Value::Int(10)).unwrap()), - "(some 10)" - ); - assert_eq!( - &format!("{}", Value::okay(Value::Int(10)).unwrap()), - "(ok 10)" - ); - assert_eq!( - &format!("{}", Value::error(Value::Int(10)).unwrap()), - "(err 10)" - ); - assert_eq!(&format!("{}", Value::none()), "none"); - assert_eq!( - &format!( - "{}", - Value::from( - PrincipalData::parse_standard_principal( - "SM2J6ZY48GV1EZ5V2V5RB9MP66SW86PYKKQVX8X0G" - ) - .unwrap() - ) - ), - "SM2J6ZY48GV1EZ5V2V5RB9MP66SW86PYKKQVX8X0G" - ); - - assert_eq!( - &format!( - "{}", - Value::from(TupleData::from_data(vec![("a".into(), Value::Int(2))]).unwrap()) - ), - "(tuple (a 2))" - ); - } - - #[test] - fn expect_buff() { - let buff = Value::Sequence(SequenceData::Buffer(BuffData { - data: vec![1, 2, 3, 4, 5], - })); - assert_eq!(buff.clone().expect_buff(5).unwrap(), vec![1, 2, 3, 4, 5]); - assert_eq!(buff.clone().expect_buff(6).unwrap(), vec![1, 2, 3, 4, 5]); - assert_eq!( - buff.clone().expect_buff_padded(6, 0).unwrap(), - vec![1, 2, 3, 4, 5, 0] - ); - assert_eq!(buff.clone().expect_buff(10).unwrap(), vec![1, 2, 3, 4, 5]); - assert_eq!( - buff.expect_buff_padded(10, 1).unwrap(), - vec![1, 2, 3, 4, 5, 1, 1, 1, 1, 1] - ); - } - - #[test] - #[should_panic] - fn expect_buff_too_small() { - let buff = Value::Sequence(SequenceData::Buffer(BuffData { - data: vec![1, 2, 3, 4, 5], - })); - let _ = buff.expect_buff(4).unwrap(); - } -} diff --git a/clarity/src/vm/types/serialization.rs b/clarity/src/vm/types/serialization.rs index 95d3208065..5989dca9df 100644 --- a/clarity/src/vm/types/serialization.rs +++ b/clarity/src/vm/types/serialization.rs @@ -14,1314 +14,15 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use std::io::{Read, Write}; -use std::{cmp, error, str}; +use std::str; -use lazy_static::lazy_static; -use stacks_common::codec::{Error as codec_error, StacksMessageCodec}; -use stacks_common::types::StacksEpochId; +pub use clarity_serialization::types::serialization::{ + SerializationError, TypePrefix, NONE_SERIALIZATION_LEN, +}; use stacks_common::util::hash::{hex_bytes, to_hex}; -use stacks_common::util::retry::BoundReader; -use super::{ListTypeData, TupleTypeSignature}; use crate::vm::database::{ClarityDeserializable, ClaritySerializable}; -use crate::vm::errors::{CheckErrors, Error as ClarityError, IncomparableError, InterpreterError}; -use crate::vm::representations::{ClarityName, ContractName, MAX_STRING_LEN}; -use crate::vm::types::{ - BufferLength, CallableData, CharType, OptionalData, PrincipalData, QualifiedContractIdentifier, - SequenceData, SequenceSubtype, StandardPrincipalData, StringSubtype, TupleData, TypeSignature, - Value, BOUND_VALUE_SERIALIZATION_BYTES, MAX_TYPE_DEPTH, MAX_VALUE_SIZE, -}; - -/// Errors that may occur in serialization or deserialization -/// If deserialization failed because the described type is a bad type and -/// a CheckError is thrown, it gets wrapped in BadTypeError. -/// Any IOErrrors from the supplied buffer will manifest as IOError variants, -/// except for EOF -- if the deserialization code experiences an EOF, it is caught -/// and rethrown as DeserializationError -#[derive(Debug, PartialEq)] -pub enum SerializationError { - IOError(IncomparableError), - BadTypeError(CheckErrors), - DeserializationError(String), - DeserializeExpected(TypeSignature), - LeftoverBytesInDeserialization, - SerializationError(String), - UnexpectedSerialization, -} - -lazy_static! { - pub static ref NONE_SERIALIZATION_LEN: u64 = { - #[allow(clippy::unwrap_used)] - u64::try_from(Value::none().serialize_to_vec().unwrap().len()).unwrap() - }; -} - -/// Deserialization uses a specific epoch for passing to the type signature checks -/// The reason this is pinned to Epoch21 is so that values stored before epoch-2.4 -/// can still be read from the database. -const DESERIALIZATION_TYPE_CHECK_EPOCH: StacksEpochId = StacksEpochId::Epoch21; - -/// Pre-sanitization values could end up being larger than the deserializer originally -/// supported, so we increase the bound to a higher level limit imposed by the cost checker. -const SANITIZATION_READ_BOUND: u64 = 15_000_000; - -/// Before epoch-2.4, this is the deserialization depth limit. -/// After epoch-2.4, with type sanitization support, the full -/// clarity depth limit is supported. -const UNSANITIZED_DEPTH_CHECK: usize = 16; - -impl std::fmt::Display for SerializationError { - fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { - match self { - SerializationError::IOError(e) => { - write!(f, "Serialization error caused by IO: {}", e.err) - } - SerializationError::BadTypeError(e) => { - write!(f, "Deserialization error, bad type, caused by: {e}") - } - SerializationError::DeserializationError(e) => { - write!(f, "Deserialization error: {e}") - } - SerializationError::SerializationError(e) => { - write!(f, "Serialization error: {e}") - } - SerializationError::DeserializeExpected(e) => write!( - f, - "Deserialization expected the type of the input to be: {e}" - ), - SerializationError::UnexpectedSerialization => { - write!(f, "The serializer handled an input in an unexpected way") - } - SerializationError::LeftoverBytesInDeserialization => { - write!(f, "Deserialization error: bytes left over in buffer") - } - } - } -} - -impl error::Error for SerializationError { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match self { - SerializationError::IOError(e) => Some(&e.err), - SerializationError::BadTypeError(e) => Some(e), - _ => None, - } - } -} - -// Note: a byte stream that describes a longer type than -// there are available bytes to read will result in an IOError(UnexpectedEOF) -impl From for SerializationError { - fn from(err: std::io::Error) -> Self { - SerializationError::IOError(IncomparableError { err }) - } -} - -impl From<&str> for SerializationError { - fn from(e: &str) -> Self { - SerializationError::DeserializationError(e.into()) - } -} - -impl From for SerializationError { - fn from(e: CheckErrors) -> Self { - SerializationError::BadTypeError(e) - } -} - -define_u8_enum!(TypePrefix { - Int = 0, - UInt = 1, - Buffer = 2, - BoolTrue = 3, - BoolFalse = 4, - PrincipalStandard = 5, - PrincipalContract = 6, - ResponseOk = 7, - ResponseErr = 8, - OptionalNone = 9, - OptionalSome = 10, - List = 11, - Tuple = 12, - StringASCII = 13, - StringUTF8 = 14 -}); - -impl From<&PrincipalData> for TypePrefix { - fn from(v: &PrincipalData) -> TypePrefix { - use super::PrincipalData::*; - match v { - Standard(_) => TypePrefix::PrincipalStandard, - Contract(_) => TypePrefix::PrincipalContract, - } - } -} - -impl From<&Value> for TypePrefix { - fn from(v: &Value) -> TypePrefix { - use super::CharType; - use super::SequenceData::*; - use super::Value::*; - - match v { - Int(_) => TypePrefix::Int, - UInt(_) => TypePrefix::UInt, - Bool(value) => { - if *value { - TypePrefix::BoolTrue - } else { - TypePrefix::BoolFalse - } - } - Principal(p) => TypePrefix::from(p), - Response(response) => { - if response.committed { - TypePrefix::ResponseOk - } else { - TypePrefix::ResponseErr - } - } - Optional(OptionalData { data: None }) => TypePrefix::OptionalNone, - Optional(OptionalData { data: Some(_) }) => TypePrefix::OptionalSome, - Tuple(_) => TypePrefix::Tuple, - Sequence(Buffer(_)) => TypePrefix::Buffer, - Sequence(List(_)) => TypePrefix::List, - Sequence(String(CharType::ASCII(_))) => TypePrefix::StringASCII, - Sequence(String(CharType::UTF8(_))) => TypePrefix::StringUTF8, - &CallableContract(_) => TypePrefix::PrincipalContract, - } - } -} - -/// Not a public trait, -/// this is just used to simplify serializing some types that -/// are repeatedly serialized or deserialized. -trait ClarityValueSerializable { - fn serialize_write(&self, w: &mut W) -> std::io::Result<()>; - fn deserialize_read(r: &mut R) -> Result; -} - -impl ClarityValueSerializable for StandardPrincipalData { - fn serialize_write(&self, w: &mut W) -> std::io::Result<()> { - w.write_all(&[self.version()])?; - w.write_all(&self.1) - } - - fn deserialize_read(r: &mut R) -> Result { - let mut version = [0; 1]; - let mut data = [0; 20]; - r.read_exact(&mut version)?; - r.read_exact(&mut data)?; - StandardPrincipalData::new(version[0], data) - .map_err(|_| SerializationError::UnexpectedSerialization) - } -} - -macro_rules! serialize_guarded_string { - ($Name:ident) => { - impl ClarityValueSerializable<$Name> for $Name { - fn serialize_write(&self, w: &mut W) -> std::io::Result<()> { - w.write_all(&self.len().to_be_bytes())?; - // self.as_bytes() is always len bytes, because this is only used for GuardedStrings - // which are a subset of ASCII - w.write_all(self.as_str().as_bytes()) - } - - fn deserialize_read(r: &mut R) -> Result { - let mut len = [0; 1]; - r.read_exact(&mut len)?; - let len = u8::from_be_bytes(len); - if len > MAX_STRING_LEN { - return Err(SerializationError::DeserializationError( - "String too long".to_string(), - )); - } - - let mut data = vec![0; len as usize]; - r.read_exact(&mut data)?; - - String::from_utf8(data) - .map_err(|_| "Non-UTF8 string data".into()) - .and_then(|x| $Name::try_from(x).map_err(|_| "Illegal Clarity string".into())) - } - } - }; -} - -serialize_guarded_string!(ClarityName); -serialize_guarded_string!(ContractName); - -impl PrincipalData { - fn inner_consensus_serialize(&self, w: &mut W) -> std::io::Result<()> { - w.write_all(&[TypePrefix::from(self) as u8])?; - match self { - PrincipalData::Standard(p) => p.serialize_write(w), - PrincipalData::Contract(contract_identifier) => { - contract_identifier.issuer.serialize_write(w)?; - contract_identifier.name.serialize_write(w) - } - } - } - - fn inner_consensus_deserialize( - r: &mut R, - ) -> Result { - let mut header = [0]; - r.read_exact(&mut header)?; - - let prefix = TypePrefix::from_u8(header[0]).ok_or("Bad principal prefix")?; - - match prefix { - TypePrefix::PrincipalStandard => { - StandardPrincipalData::deserialize_read(r).map(PrincipalData::from) - } - TypePrefix::PrincipalContract => { - let issuer = StandardPrincipalData::deserialize_read(r)?; - let name = ContractName::deserialize_read(r)?; - Ok(PrincipalData::from(QualifiedContractIdentifier { - issuer, - name, - })) - } - _ => Err("Bad principal prefix".into()), - } - } -} - -impl StacksMessageCodec for PrincipalData { - fn consensus_serialize(&self, fd: &mut W) -> Result<(), codec_error> { - self.inner_consensus_serialize(fd) - .map_err(codec_error::WriteError) - } - - fn consensus_deserialize(fd: &mut R) -> Result { - PrincipalData::inner_consensus_deserialize(fd) - .map_err(|e| codec_error::DeserializeError(e.to_string())) - } -} - -macro_rules! check_match { - ($item:expr, $Pattern:pat) => { - match $item { - None => Ok(()), - Some($Pattern) => Ok(()), - Some(x) => Err(SerializationError::DeserializeExpected(x.clone())), - } - }; -} - -/// `DeserializeStackItem` objects are used by the deserializer to indicate -/// how the deserialization loop's current object is to be handled once it is -/// deserialized: i.e., is the object the top-level object for the serialization -/// or is it an entry in a composite type (e.g., a list or tuple)? -enum DeserializeStackItem { - List { - items: Vec, - expected_len: u32, - expected_type: Option, - }, - Tuple { - items: Vec<(ClarityName, Value)>, - expected_len: u64, - processed_entries: u64, - expected_type: Option, - next_name: ClarityName, - next_sanitize: bool, - }, - OptionSome { - inner_expected_type: Option, - }, - ResponseOk { - inner_expected_type: Option, - }, - ResponseErr { - inner_expected_type: Option, - }, - TopLevel { - expected_type: Option, - }, -} - -impl DeserializeStackItem { - /// What is the expected type for the child of this deserialization stack item? - /// - /// Returns `None` if this stack item either doesn't have an expected type, or the - /// next child is going to be sanitized/elided. - fn next_expected_type(&self) -> Result, SerializationError> { - match self { - DeserializeStackItem::List { expected_type, .. } => Ok(expected_type - .as_ref() - .map(|lt| lt.get_list_item_type()) - .cloned()), - DeserializeStackItem::Tuple { - expected_type, - next_name, - next_sanitize, - .. - } => match expected_type { - None => Ok(None), - Some(some_tuple) => { - // if we're sanitizing this tuple, and the `next_name` field is to be - // removed, don't return an expected type. - if *next_sanitize { - return Ok(None); - } - let field_type = some_tuple.field_type(next_name).ok_or_else(|| { - SerializationError::DeserializeExpected(TypeSignature::TupleType( - some_tuple.clone(), - )) - })?; - Ok(Some(field_type.clone())) - } - }, - DeserializeStackItem::OptionSome { - inner_expected_type, - } => Ok(inner_expected_type.clone()), - DeserializeStackItem::ResponseOk { - inner_expected_type, - } => Ok(inner_expected_type.clone()), - DeserializeStackItem::ResponseErr { - inner_expected_type, - } => Ok(inner_expected_type.clone()), - DeserializeStackItem::TopLevel { expected_type } => Ok(expected_type.clone()), - } - } -} - -impl TypeSignature { - /// Return the maximum length of the consensus serialization of a - /// Clarity value of this type. The returned length *may* not fit - /// in a Clarity buffer! For example, the maximum serialized - /// size of a `(buff 1024*1024)` is `1+1024*1024` because of the - /// type prefix byte. However, that is 1 byte larger than the maximum - /// buffer size in Clarity. - pub fn max_serialized_size(&self) -> Result { - let type_prefix_size = 1; - - let max_output_size = match self { - TypeSignature::NoType => { - // A `NoType` should *never* actually be evaluated - // (`NoType` corresponds to the Some branch of a - // `none` that is never matched with a corresponding - // `some` or similar with `result` types). So, when - // serializing an object with a `NoType`, the other - // branch should always be used. - return Err(CheckErrors::CouldNotDetermineSerializationType); - } - TypeSignature::IntType => 16, - TypeSignature::UIntType => 16, - TypeSignature::BoolType => 0, - TypeSignature::SequenceType(SequenceSubtype::ListType(list_type)) => { - // u32 length as big-endian bytes - let list_length_encode = 4; - list_type - .get_max_len() - .checked_mul(list_type.get_list_item_type().max_serialized_size()?) - .and_then(|x| x.checked_add(list_length_encode)) - .ok_or_else(|| CheckErrors::ValueTooLarge)? - } - TypeSignature::SequenceType(SequenceSubtype::BufferType(buff_length)) => { - // u32 length as big-endian bytes - let buff_length_encode = 4; - u32::from(buff_length) - .checked_add(buff_length_encode) - .ok_or_else(|| CheckErrors::ValueTooLarge)? - } - TypeSignature::SequenceType(SequenceSubtype::StringType(StringSubtype::ASCII( - length, - ))) => { - // u32 length as big-endian bytes - let str_length_encode = 4; - // ascii is 1-byte per character - u32::from(length) - .checked_add(str_length_encode) - .ok_or_else(|| CheckErrors::ValueTooLarge)? - } - TypeSignature::SequenceType(SequenceSubtype::StringType(StringSubtype::UTF8( - length, - ))) => { - // u32 length as big-endian bytes - let str_length_encode = 4; - // utf-8 is maximum 4 bytes per codepoint (which is the length) - u32::from(length) - .checked_mul(4) - .and_then(|x| x.checked_add(str_length_encode)) - .ok_or_else(|| CheckErrors::ValueTooLarge)? - } - TypeSignature::PrincipalType - | TypeSignature::CallableType(_) - | TypeSignature::TraitReferenceType(_) => { - // version byte + 20 byte hash160 - let maximum_issuer_size = 21; - let contract_name_length_encode = 1; - // contract name maximum length is `MAX_STRING_LEN` (128), and ASCII - let maximum_contract_name = MAX_STRING_LEN as u32; - maximum_contract_name + maximum_issuer_size + contract_name_length_encode - } - TypeSignature::TupleType(tuple_type) => { - let type_map = tuple_type.get_type_map(); - // u32 length as big-endian bytes - let tuple_length_encode: u32 = 4; - let mut total_size = tuple_length_encode; - for (key, value) in type_map.iter() { - let value_size = value.max_serialized_size()?; - total_size = total_size - .checked_add(1) // length of key-name - .and_then(|x| x.checked_add(key.len() as u32)) // ClarityName is ascii-only, so 1 byte per length - .and_then(|x| x.checked_add(value_size)) - .ok_or_else(|| CheckErrors::ValueTooLarge)?; - } - total_size - } - TypeSignature::OptionalType(ref some_type) => { - match some_type.max_serialized_size() { - Ok(size) => size, - // if NoType, then this is just serializing a none - // value, which is only the type prefix - Err(CheckErrors::CouldNotDetermineSerializationType) => 0, - Err(e) => return Err(e), - } - } - TypeSignature::ResponseType(ref response_types) => { - let (ok_type, err_type) = response_types.as_ref(); - let (ok_type_max_size, no_ok_type) = match ok_type.max_serialized_size() { - Ok(size) => (size, false), - Err(CheckErrors::CouldNotDetermineSerializationType) => (0, true), - Err(e) => return Err(e), - }; - let err_type_max_size = match err_type.max_serialized_size() { - Ok(size) => size, - Err(CheckErrors::CouldNotDetermineSerializationType) => { - if no_ok_type { - // if both the ok type and the error type are NoType, - // throw a CheckError. This should not be possible, but the check - // is done out of caution. - return Err(CheckErrors::CouldNotDetermineSerializationType); - } else { - 0 - } - } - Err(e) => return Err(e), - }; - cmp::max(ok_type_max_size, err_type_max_size) - } - TypeSignature::ListUnionType(_) => { - return Err(CheckErrors::CouldNotDetermineSerializationType) - } - }; - - max_output_size - .checked_add(type_prefix_size) - .ok_or_else(|| CheckErrors::ValueTooLarge) - } -} - -impl Value { - pub fn deserialize_read( - r: &mut R, - expected_type: Option<&TypeSignature>, - sanitize: bool, - ) -> Result { - Self::deserialize_read_count(r, expected_type, sanitize).map(|(value, _)| value) - } - - /// Deserialize just like `deserialize_read` but also - /// return the bytes read. - /// If `sanitize` argument is set to true and `expected_type` is supplied, - /// this method will remove any extraneous tuple fields which may have been - /// allowed by `least_super_type`. - pub fn deserialize_read_count( - r: &mut R, - expected_type: Option<&TypeSignature>, - sanitize: bool, - ) -> Result<(Value, u64), SerializationError> { - let bound_value_serialization_bytes = if sanitize && expected_type.is_some() { - SANITIZATION_READ_BOUND - } else { - BOUND_VALUE_SERIALIZATION_BYTES as u64 - }; - let mut bound_reader = BoundReader::from_reader(r, bound_value_serialization_bytes); - let value = Value::inner_deserialize_read(&mut bound_reader, expected_type, sanitize)?; - let bytes_read = bound_reader.num_read(); - if let Some(expected_type) = expected_type { - let expect_size = match expected_type.max_serialized_size() { - Ok(x) => x, - Err(e) => { - debug!( - "Failed to determine max serialized size when checking expected_type argument"; - "err" => ?e - ); - return Ok((value, bytes_read)); - } - }; - - if bytes_read > expect_size as u64 { - // this can happen due to sanitization, so its no longer indicative of a *problem* with the node. - debug!( - "Deserialized more bytes than expected size during deserialization. Expected size = {expect_size}, bytes read = {bytes_read}, type = {expected_type}" - ); - } - } - - Ok((value, bytes_read)) - } - - fn inner_deserialize_read( - r: &mut R, - top_expected_type: Option<&TypeSignature>, - sanitize: bool, - ) -> Result { - use super::Value::*; - - let mut stack = vec![DeserializeStackItem::TopLevel { - expected_type: top_expected_type.cloned(), - }]; - - while !stack.is_empty() { - let depth_check = if sanitize { - MAX_TYPE_DEPTH as usize - } else { - UNSANITIZED_DEPTH_CHECK - }; - if stack.len() > depth_check { - return Err(CheckErrors::TypeSignatureTooDeep.into()); - } - - #[allow(clippy::expect_used)] - let expected_type = stack - .last() - .expect("FATAL: stack.last() should always be some() because of loop condition") - .next_expected_type()?; - - let mut header = [0]; - r.read_exact(&mut header)?; - let prefix = TypePrefix::from_u8(header[0]).ok_or("Bad type prefix")?; - - let item = match prefix { - TypePrefix::Int => { - check_match!(expected_type, TypeSignature::IntType)?; - let mut buffer = [0; 16]; - r.read_exact(&mut buffer)?; - Ok(Int(i128::from_be_bytes(buffer))) - } - TypePrefix::UInt => { - check_match!(expected_type, TypeSignature::UIntType)?; - let mut buffer = [0; 16]; - r.read_exact(&mut buffer)?; - Ok(UInt(u128::from_be_bytes(buffer))) - } - TypePrefix::Buffer => { - let mut buffer_len = [0; 4]; - r.read_exact(&mut buffer_len)?; - let buffer_len = BufferLength::try_from(u32::from_be_bytes(buffer_len))?; - - if let Some(x) = &expected_type { - let passed_test = match x { - TypeSignature::SequenceType(SequenceSubtype::BufferType( - expected_len, - )) => u32::from(&buffer_len) <= u32::from(expected_len), - _ => false, - }; - if !passed_test { - return Err(SerializationError::DeserializeExpected(x.clone())); - } - } - - let mut data = vec![0; u32::from(buffer_len) as usize]; - - r.read_exact(&mut data[..])?; - - Value::buff_from(data).map_err(|_| "Bad buffer".into()) - } - TypePrefix::BoolTrue => { - check_match!(expected_type, TypeSignature::BoolType)?; - Ok(Bool(true)) - } - TypePrefix::BoolFalse => { - check_match!(expected_type, TypeSignature::BoolType)?; - Ok(Bool(false)) - } - TypePrefix::PrincipalStandard => { - check_match!(expected_type, TypeSignature::PrincipalType)?; - StandardPrincipalData::deserialize_read(r).map(Value::from) - } - TypePrefix::PrincipalContract => { - check_match!(expected_type, TypeSignature::PrincipalType)?; - let issuer = StandardPrincipalData::deserialize_read(r)?; - let name = ContractName::deserialize_read(r)?; - Ok(Value::from(QualifiedContractIdentifier { issuer, name })) - } - TypePrefix::ResponseOk | TypePrefix::ResponseErr => { - let committed = prefix == TypePrefix::ResponseOk; - - let expect_contained_type = match &expected_type { - None => None, - Some(x) => { - let contained_type = match (committed, x) { - (true, TypeSignature::ResponseType(types)) => Ok(&types.0), - (false, TypeSignature::ResponseType(types)) => Ok(&types.1), - _ => Err(SerializationError::DeserializeExpected(x.clone())), - }?; - Some(contained_type) - } - }; - - let stack_item = if committed { - DeserializeStackItem::ResponseOk { - inner_expected_type: expect_contained_type.cloned(), - } - } else { - DeserializeStackItem::ResponseErr { - inner_expected_type: expect_contained_type.cloned(), - } - }; - - stack.push(stack_item); - continue; - } - TypePrefix::OptionalNone => { - check_match!(expected_type, TypeSignature::OptionalType(_))?; - Ok(Value::none()) - } - TypePrefix::OptionalSome => { - let expect_contained_type = match &expected_type { - None => None, - Some(x) => { - let contained_type = match x { - TypeSignature::OptionalType(some_type) => Ok(some_type.as_ref()), - _ => Err(SerializationError::DeserializeExpected(x.clone())), - }?; - Some(contained_type) - } - }; - - let stack_item = DeserializeStackItem::OptionSome { - inner_expected_type: expect_contained_type.cloned(), - }; - - stack.push(stack_item); - continue; - } - TypePrefix::List => { - let mut len = [0; 4]; - r.read_exact(&mut len)?; - let len = u32::from_be_bytes(len); - - if len > MAX_VALUE_SIZE { - return Err("Illegal list type".into()); - } - - let (list_type, _entry_type) = match expected_type.as_ref() { - None => (None, None), - Some(TypeSignature::SequenceType(SequenceSubtype::ListType(list_type))) => { - if len > list_type.get_max_len() { - // unwrap is safe because of the match condition - #[allow(clippy::unwrap_used)] - return Err(SerializationError::DeserializeExpected( - expected_type.unwrap(), - )); - } - (Some(list_type), Some(list_type.get_list_item_type())) - } - Some(x) => return Err(SerializationError::DeserializeExpected(x.clone())), - }; - - if len > 0 { - let items = Vec::with_capacity(len as usize); - let stack_item = DeserializeStackItem::List { - items, - expected_len: len, - expected_type: list_type.cloned(), - }; - - stack.push(stack_item); - continue; - } else { - let finished_list = if let Some(list_type) = list_type { - Value::list_with_type( - &DESERIALIZATION_TYPE_CHECK_EPOCH, - vec![], - list_type.clone(), - ) - .map_err(|_| "Illegal list type")? - } else { - Value::cons_list_unsanitized(vec![]).map_err(|_| "Illegal list type")? - }; - - Ok(finished_list) - } - } - TypePrefix::Tuple => { - let mut len = [0; 4]; - r.read_exact(&mut len)?; - let len = u32::from_be_bytes(len); - let expected_len = u64::from(len); - - if len > MAX_VALUE_SIZE { - return Err(SerializationError::DeserializationError( - "Illegal tuple type".to_string(), - )); - } - - let tuple_type = match expected_type.as_ref() { - None => None, - Some(TypeSignature::TupleType(tuple_type)) => { - if sanitize { - if u64::from(len) < tuple_type.len() { - // unwrap is safe because of the match condition - #[allow(clippy::unwrap_used)] - return Err(SerializationError::DeserializeExpected( - expected_type.unwrap(), - )); - } - } else if u64::from(len) != tuple_type.len() { - // unwrap is safe because of the match condition - #[allow(clippy::unwrap_used)] - return Err(SerializationError::DeserializeExpected( - expected_type.unwrap(), - )); - } - Some(tuple_type) - } - Some(x) => return Err(SerializationError::DeserializeExpected(x.clone())), - }; - - if len > 0 { - let items = Vec::with_capacity(expected_len as usize); - let first_key = ClarityName::deserialize_read(r)?; - // figure out if the next (key, value) pair for this - // tuple will be elided (or sanitized) from the tuple. - // the logic here is that the next pair should be elided if: - // * `sanitize` parameter is true - // * `tuple_type` is some (i.e., there is an expected type for the - // tuple) - // * `tuple_type` does not contain an entry for `key` - let next_sanitize = sanitize - && tuple_type - .map(|tt| tt.field_type(&first_key).is_none()) - .unwrap_or(false); - let stack_item = DeserializeStackItem::Tuple { - items, - expected_len, - processed_entries: 0, - expected_type: tuple_type.cloned(), - next_name: first_key, - next_sanitize, - }; - - stack.push(stack_item); - continue; - } else { - let finished_tuple = if let Some(tuple_type) = tuple_type { - TupleData::from_data_typed( - &DESERIALIZATION_TYPE_CHECK_EPOCH, - vec![], - tuple_type, - ) - .map_err(|_| "Illegal tuple type") - .map(Value::from)? - } else { - TupleData::from_data(vec![]) - .map_err(|_| "Illegal tuple type") - .map(Value::from)? - }; - Ok(finished_tuple) - } - } - TypePrefix::StringASCII => { - let mut buffer_len = [0; 4]; - r.read_exact(&mut buffer_len)?; - let buffer_len = BufferLength::try_from(u32::from_be_bytes(buffer_len))?; - - if let Some(x) = &expected_type { - let passed_test = match x { - TypeSignature::SequenceType(SequenceSubtype::StringType( - StringSubtype::ASCII(expected_len), - )) => u32::from(&buffer_len) <= u32::from(expected_len), - _ => false, - }; - if !passed_test { - return Err(SerializationError::DeserializeExpected(x.clone())); - } - } - - let mut data = vec![0; u32::from(buffer_len) as usize]; - - r.read_exact(&mut data[..])?; - - Value::string_ascii_from_bytes(data).map_err(|_| "Bad string".into()) - } - TypePrefix::StringUTF8 => { - let mut total_len = [0; 4]; - r.read_exact(&mut total_len)?; - let total_len = BufferLength::try_from(u32::from_be_bytes(total_len))?; - - let mut data: Vec = vec![0; u32::from(total_len) as usize]; - - r.read_exact(&mut data[..])?; - - let value = Value::string_utf8_from_bytes(data) - .map_err(|_| "Illegal string_utf8 type".into()); - - if let Some(x) = &expected_type { - let passed_test = match (x, &value) { - ( - TypeSignature::SequenceType(SequenceSubtype::StringType( - StringSubtype::UTF8(expected_len), - )), - Ok(Value::Sequence(SequenceData::String(CharType::UTF8(utf8)))), - ) => utf8.data.len() as u32 <= u32::from(expected_len), - _ => false, - }; - if !passed_test { - return Err(SerializationError::DeserializeExpected(x.clone())); - } - } - - value - } - }?; - - let mut finished_item = Some(item); - while let Some(item) = finished_item.take() { - let stack_bottom = if let Some(stack_item) = stack.pop() { - stack_item - } else { - // this should be unreachable! - warn!( - "Deserializer reached unexpected path: item processed, but deserializer stack does not expect another value"; - "item" => %item, - ); - return Err("Deserializer processed item, but deserializer stack does not expect another value".into()); - }; - match stack_bottom { - DeserializeStackItem::TopLevel { .. } => return Ok(item), - DeserializeStackItem::List { - mut items, - expected_len, - expected_type, - } => { - items.push(item); - if expected_len as usize <= items.len() { - // list is finished! - let finished_list = if let Some(list_type) = expected_type { - Value::list_with_type( - &DESERIALIZATION_TYPE_CHECK_EPOCH, - items, - list_type.clone(), - ) - .map_err(|_| "Illegal list type")? - } else { - Value::cons_list_unsanitized(items) - .map_err(|_| "Illegal list type")? - }; - - finished_item.replace(finished_list); - } else { - // list is not finished, reinsert on stack - stack.push(DeserializeStackItem::List { - items, - expected_len, - expected_type, - }); - } - } - DeserializeStackItem::Tuple { - mut items, - expected_len, - expected_type, - next_name, - next_sanitize, - mut processed_entries, - } => { - let push_entry = if sanitize { - if expected_type.is_some() { - // if performing tuple sanitization, don't include a field - // if it was sanitized - !next_sanitize - } else { - // always push the entry if there's no type expectation - true - } - } else { - true - }; - let tuple_entry = (next_name, item); - if push_entry { - items.push(tuple_entry); - } - processed_entries += 1; - if expected_len <= processed_entries { - // tuple is finished! - let finished_tuple = if let Some(tuple_type) = expected_type { - if items.len() != tuple_type.len() as usize { - return Err(SerializationError::DeserializeExpected( - TypeSignature::TupleType(tuple_type), - )); - } - TupleData::from_data_typed( - &DESERIALIZATION_TYPE_CHECK_EPOCH, - items, - &tuple_type, - ) - .map_err(|_| "Illegal tuple type") - .map(Value::from)? - } else { - TupleData::from_data(items) - .map_err(|_| "Illegal tuple type") - .map(Value::from)? - }; - - finished_item.replace(finished_tuple); - } else { - // tuple is not finished, read the next key name and reinsert on stack - let key = ClarityName::deserialize_read(r)?; - // figure out if the next (key, value) pair for this - // tuple will be elided (or sanitized) from the tuple. - // the logic here is that the next pair should be elided if: - // * `sanitize` parameter is true - // * `tuple_type` is some (i.e., there is an expected type for the - // tuple) - // * `tuple_type` does not contain an entry for `key` - let next_sanitize = sanitize - && expected_type - .as_ref() - .map(|tt| tt.field_type(&key).is_none()) - .unwrap_or(false); - stack.push(DeserializeStackItem::Tuple { - items, - expected_type, - expected_len, - next_name: key, - next_sanitize, - processed_entries, - }); - } - } - DeserializeStackItem::OptionSome { .. } => { - let finished_some = Value::some(item).map_err(|_x| "Value too large")?; - finished_item.replace(finished_some); - } - DeserializeStackItem::ResponseOk { .. } => { - let finished_some = Value::okay(item).map_err(|_x| "Value too large")?; - finished_item.replace(finished_some); - } - DeserializeStackItem::ResponseErr { .. } => { - let finished_some = Value::error(item).map_err(|_x| "Value too large")?; - finished_item.replace(finished_some); - } - }; - } - } - - Err(SerializationError::DeserializationError( - "Invalid data: stack ran out before finishing parsing".into(), - )) - } - - pub fn serialize_write(&self, w: &mut W) -> Result<(), SerializationError> { - use super::CharType::*; - use super::PrincipalData::*; - use super::SequenceData::{self, *}; - use super::Value::*; - - w.write_all(&[TypePrefix::from(self) as u8])?; - match self { - Int(value) => w.write_all(&value.to_be_bytes())?, - UInt(value) => w.write_all(&value.to_be_bytes())?, - Principal(Standard(data)) => data.serialize_write(w)?, - Principal(Contract(contract_identifier)) - | CallableContract(CallableData { - contract_identifier, - trait_identifier: _, - }) => { - contract_identifier.issuer.serialize_write(w)?; - contract_identifier.name.serialize_write(w)?; - } - Response(response) => response.data.serialize_write(w)?, - // Bool types don't need any more data. - Bool(_) => {} - // None types don't need any more data. - Optional(OptionalData { data: None }) => {} - Optional(OptionalData { data: Some(value) }) => { - value.serialize_write(w)?; - } - Sequence(List(data)) => { - let len_bytes = data - .len() - .map_err(|e| SerializationError::SerializationError(e.to_string()))? - .to_be_bytes(); - w.write_all(&len_bytes)?; - for item in data.data.iter() { - item.serialize_write(w)?; - } - } - Sequence(Buffer(value)) => { - let len_bytes = u32::from( - value - .len() - .map_err(|e| SerializationError::SerializationError(e.to_string()))?, - ) - .to_be_bytes(); - w.write_all(&len_bytes)?; - w.write_all(&value.data)? - } - Sequence(SequenceData::String(UTF8(value))) => { - let total_len: u32 = value.data.iter().fold(0u32, |len, c| len + c.len() as u32); - w.write_all(&(total_len.to_be_bytes()))?; - for bytes in value.data.iter() { - w.write_all(bytes)? - } - } - Sequence(SequenceData::String(ASCII(value))) => { - let len_bytes = u32::from( - value - .len() - .map_err(|e| SerializationError::SerializationError(e.to_string()))?, - ) - .to_be_bytes(); - w.write_all(&len_bytes)?; - w.write_all(&value.data)? - } - Tuple(data) => { - let len_bytes = u32::try_from(data.data_map.len()) - .map_err(|e| SerializationError::SerializationError(e.to_string()))? - .to_be_bytes(); - w.write_all(&len_bytes)?; - for (key, value) in data.data_map.iter() { - key.serialize_write(w)?; - value.serialize_write(w)?; - } - } - }; - - Ok(()) - } - - /// This function attempts to deserialize a byte buffer into a Clarity Value. - /// The `expected_type` parameter tells the deserializer to expect (and enforce) - /// a particular type. `ClarityDB` uses this to ensure that lists, tuples, etc. loaded from the database - /// have their max-length and other type information set by the type declarations in the contract. - pub fn try_deserialize_bytes( - bytes: &Vec, - expected: &TypeSignature, - sanitize: bool, - ) -> Result { - Value::deserialize_read(&mut bytes.as_slice(), Some(expected), sanitize) - } - - /// This function attempts to deserialize a hex string into a Clarity Value. - /// The `expected_type` parameter tells the deserializer to expect (and enforce) - /// a particular type. `ClarityDB` uses this to ensure that lists, tuples, etc. loaded from the database - /// have their max-length and other type information set by the type declarations in the contract. - pub fn try_deserialize_hex( - hex: &str, - expected: &TypeSignature, - sanitize: bool, - ) -> Result { - let data = hex_bytes(hex).map_err(|_| "Bad hex string")?; - Value::try_deserialize_bytes(&data, expected, sanitize) - } - - /// This function attempts to deserialize a byte buffer into a - /// Clarity Value, while ensuring that the whole byte buffer is - /// consumed by the deserialization, erroring if it is not. The - /// `expected_type` parameter tells the deserializer to expect - /// (and enforce) a particular type. `ClarityDB` uses this to - /// ensure that lists, tuples, etc. loaded from the database have - /// their max-length and other type information set by the type - /// declarations in the contract. - pub fn try_deserialize_bytes_exact( - bytes: &Vec, - expected: &TypeSignature, - sanitize: bool, - ) -> Result { - let input_length = bytes.len(); - let (value, read_count) = - Value::deserialize_read_count(&mut bytes.as_slice(), Some(expected), sanitize)?; - if read_count != (input_length as u64) { - Err(SerializationError::LeftoverBytesInDeserialization) - } else { - Ok(value) - } - } - - /// Try to deserialize a value without type information. This *does not* perform sanitization - /// so it should not be used when decoding clarity database values. - fn try_deserialize_bytes_untyped(bytes: &Vec) -> Result { - Value::deserialize_read(&mut bytes.as_slice(), None, false) - } - - /// Try to deserialize a value from a hex string without type information. This *does not* - /// perform sanitization. - pub fn try_deserialize_hex_untyped(hex: &str) -> Result { - let hex = hex.strip_prefix("0x").unwrap_or(hex); - let data = hex_bytes(hex).map_err(|_| "Bad hex string")?; - Value::try_deserialize_bytes_untyped(&data) - } - - pub fn serialized_size(&self) -> Result { - let mut counter = WriteCounter { count: 0 }; - self.serialize_write(&mut counter).map_err(|_| { - SerializationError::DeserializationError( - "Error: Failed to count serialization length of Clarity value".into(), - ) - })?; - Ok(counter.count) - } -} - -/// A writer that just counts the bytes written -struct WriteCounter { - count: u32, -} - -impl Write for WriteCounter { - fn write(&mut self, buf: &[u8]) -> std::io::Result { - let input: u32 = buf - .len() - .try_into() - .map_err(|_e| std::io::Error::other("Serialization size would overflow u32"))?; - self.count = self - .count - .checked_add(input) - .ok_or_else(|| std::io::Error::other("Serialization size would overflow u32"))?; - Ok(input as usize) - } - - fn flush(&mut self) -> std::io::Result<()> { - Ok(()) - } -} - -impl Value { - pub fn serialize_to_vec(&self) -> Result, InterpreterError> { - let mut byte_serialization = Vec::new(); - self.serialize_write(&mut byte_serialization) - .map_err(|_| InterpreterError::Expect("IOError filling byte buffer.".into()))?; - Ok(byte_serialization) - } - - /// This does *not* perform any data sanitization - pub fn serialize_to_hex(&self) -> Result { - let byte_serialization = self.serialize_to_vec()?; - Ok(to_hex(byte_serialization.as_slice())) - } - - /// Sanitize `value` against pre-2.4 serialization - /// - /// Returns Some if the sanitization is successful, or was not necessary. - /// Returns None if the sanitization failed. - /// - /// Returns the sanitized value _and_ whether or not sanitization was required. - pub fn sanitize_value( - epoch: &StacksEpochId, - expected: &TypeSignature, - value: Value, - ) -> Option<(Value, bool)> { - // in epochs before 2.4, perform no sanitization - if !epoch.value_sanitizing() { - return Some((value, false)); - } - let (output, did_sanitize) = match value { - Value::Sequence(SequenceData::List(l)) => { - let lt = match expected { - TypeSignature::SequenceType(SequenceSubtype::ListType(lt)) => lt, - _ => return None, - }; - // if cannot compute l.len(), sanitization fails, so use ? operator can short return - if l.len().ok()? > lt.get_max_len() { - return None; - } - let mut sanitized_items = Vec::with_capacity(l.data.len()); - let mut did_sanitize_children = false; - for item in l.data.into_iter() { - let (sanitized_item, did_sanitize) = - Self::sanitize_value(epoch, lt.get_list_item_type(), item)?; - sanitized_items.push(sanitized_item); - did_sanitize_children = did_sanitize_children || did_sanitize; - } - // do not sanitize list before construction here, because we're already sanitizing - let output_list = Value::cons_list_unsanitized(sanitized_items).ok()?; - (output_list, did_sanitize_children) - } - Value::Tuple(tuple_data) => { - let tt = match expected { - TypeSignature::TupleType(tt) => tt, - _ => return None, - }; - let type_map = tt.get_type_map(); - let mut sanitized_tuple_entries = Vec::with_capacity(type_map.len()); - let original_tuple_len = tuple_data.len(); - let mut tuple_data_map = tuple_data.data_map; - let mut did_sanitize_children = false; - for (key, expect_key_type) in type_map.iter() { - let field_data = tuple_data_map.remove(key)?; - let (sanitized_field, did_sanitize) = - Self::sanitize_value(epoch, expect_key_type, field_data)?; - sanitized_tuple_entries.push((key.clone(), sanitized_field)); - did_sanitize_children = did_sanitize_children || did_sanitize; - } - if sanitized_tuple_entries.len() as u64 != tt.len() { - // this code should be unreachable, because I think any case that - // could trigger this would have returned None earlier - warn!("Sanitizer handled path that should have errored earlier, skipping sanitization"); - return None; - } - let did_sanitize_tuple = did_sanitize_children || (tt.len() != original_tuple_len); - ( - Value::Tuple(TupleData::from_data(sanitized_tuple_entries).ok()?), - did_sanitize_tuple, - ) - } - Value::Optional(opt_data) => { - let inner_type = match expected { - TypeSignature::OptionalType(inner_type) => inner_type, - _ => return None, - }; - let some_data = match opt_data.data { - Some(data) => *data, - None => return Some((Value::none(), false)), - }; - let (sanitized_data, did_sanitize_child) = - Self::sanitize_value(epoch, inner_type, some_data)?; - (Value::some(sanitized_data).ok()?, did_sanitize_child) - } - Value::Response(response) => { - let rt = match expected { - TypeSignature::ResponseType(rt) => rt, - _ => return None, - }; - - let response_ok = response.committed; - let response_data = *response.data; - let inner_type = if response_ok { &rt.0 } else { &rt.1 }; - let (sanitized_inner, did_sanitize_child) = - Self::sanitize_value(epoch, inner_type, response_data)?; - let sanitized_resp = if response_ok { - Value::okay(sanitized_inner) - } else { - Value::error(sanitized_inner) - }; - (sanitized_resp.ok()?, did_sanitize_child) - } - value => { - if expected.admits(epoch, &value).ok()? { - return Some((value, false)); - } else { - return None; - } - } - }; - - if expected.admits(epoch, &output).ok()? { - Some((output, did_sanitize)) - } else { - None - } - } -} +use crate::vm::errors::{Error as ClarityError, InterpreterError}; impl ClaritySerializable for u32 { fn serialize(&self) -> String { @@ -1341,24 +42,6 @@ impl ClarityDeserializable for u32 { } } -/// Note: the StacksMessageCodec implementation for Clarity values *does not* -/// sanitize its serialization or deserialization. -impl StacksMessageCodec for Value { - fn consensus_serialize(&self, fd: &mut W) -> Result<(), codec_error> { - self.serialize_write(fd).map_err(|e| match e { - SerializationError::IOError(io_e) => codec_error::WriteError(io_e.err), - other => codec_error::SerializeError(other.to_string()), - }) - } - - fn consensus_deserialize(fd: &mut R) -> Result { - Value::deserialize_read(fd, None, false).map_err(|e| match e { - SerializationError::IOError(e) => codec_error::ReadError(e.err), - _ => codec_error::DeserializeError(format!("Failed to decode clarity value: {e:?}")), - }) - } -} - #[cfg(test)] pub mod tests { use std::io::Write; @@ -1373,10 +56,6 @@ pub mod tests { use crate::vm::tests::test_clarity_versions; use crate::vm::ClarityVersion; - fn buff_type(size: u32) -> TypeSignature { - TypeSignature::SequenceType(SequenceSubtype::BufferType(size.try_into().unwrap())) - } - fn test_deser_ser(v: Value) { assert_eq!( &v, @@ -1521,36 +200,6 @@ pub mod tests { } } - #[test] - fn test_bools() { - test_deser_ser(Value::Bool(false)); - test_deser_ser(Value::Bool(true)); - - test_bad_expectation(Value::Bool(false), TypeSignature::IntType); - test_bad_expectation(Value::Bool(true), TypeSignature::IntType); - } - - #[test] - fn test_ints() { - test_deser_ser(Value::Int(0)); - test_deser_ser(Value::Int(1)); - test_deser_ser(Value::Int(-1)); - test_deser_ser(Value::Int(i128::MAX)); - test_deser_ser(Value::Int(i128::MIN)); - - test_bad_expectation(Value::Int(1), TypeSignature::UIntType); - } - - #[test] - fn test_uints() { - test_deser_ser(Value::UInt(0)); - test_deser_ser(Value::UInt(1)); - test_deser_ser(Value::UInt(u128::MAX)); - test_deser_ser(Value::UInt(u128::MIN)); - - test_bad_expectation(Value::UInt(1), TypeSignature::IntType); - } - #[apply(test_clarity_versions)] fn test_opts(#[case] version: ClarityVersion, #[case] epoch: StacksEpochId) { test_deser_ser(Value::none()); @@ -1630,95 +279,6 @@ pub mod tests { ); } - #[test] - fn test_tuples() { - let t_1 = Value::from( - TupleData::from_data(vec![ - ("a".into(), Value::Int(1)), - ("b".into(), Value::Int(1)), - ]) - .unwrap(), - ); - let t_0 = Value::from( - TupleData::from_data(vec![ - ("b".into(), Value::Int(1)), - ("a".into(), Value::Int(1)), - ]) - .unwrap(), - ); - let t_2 = Value::from( - TupleData::from_data(vec![ - ("a".into(), Value::Int(1)), - ("b".into(), Value::Bool(true)), - ]) - .unwrap(), - ); - let t_3 = Value::from(TupleData::from_data(vec![("a".into(), Value::Int(1))]).unwrap()); - let t_4 = Value::from( - TupleData::from_data(vec![ - ("a".into(), Value::Int(1)), - ("c".into(), Value::Bool(true)), - ]) - .unwrap(), - ); - - test_deser_ser(t_0.clone()); - test_deser_ser(t_1.clone()); - test_deser_ser(t_2.clone()); - test_deser_ser(t_3.clone()); - - test_bad_expectation(t_0.clone(), TypeSignature::BoolType); - - // t_0 and t_1 are actually the same - assert_eq!( - Value::try_deserialize_hex( - &t_1.serialize_to_hex().unwrap(), - &TypeSignature::type_of(&t_0).unwrap(), - false - ) - .unwrap(), - Value::try_deserialize_hex( - &t_0.serialize_to_hex().unwrap(), - &TypeSignature::type_of(&t_0).unwrap(), - false - ) - .unwrap() - ); - - // field number not equal to expectations - assert!(matches!( - Value::try_deserialize_hex( - &t_3.serialize_to_hex().unwrap(), - &TypeSignature::type_of(&t_1).unwrap(), - false - ) - .unwrap_err(), - SerializationError::DeserializeExpected(_) - )); - - // field type mismatch - assert!(matches!( - Value::try_deserialize_hex( - &t_2.serialize_to_hex().unwrap(), - &TypeSignature::type_of(&t_1).unwrap(), - false - ) - .unwrap_err(), - SerializationError::DeserializeExpected(_) - )); - - // field not-present in expected - assert!(matches!( - Value::try_deserialize_hex( - &t_1.serialize_to_hex().unwrap(), - &TypeSignature::type_of(&t_4).unwrap(), - false - ) - .unwrap_err(), - SerializationError::DeserializeExpected(_) - )); - } - #[apply(test_clarity_versions)] fn test_sanitization(#[case] version: ClarityVersion, #[case] epoch: StacksEpochId) { let v_1 = Value::list_from(vec![ @@ -2091,112 +651,4 @@ pub mod tests { } } } - - #[test] - fn test_vectors() { - let tests = [ - ("1010", Err("Bad type prefix".into())), - ("0000000000000000000000000000000001", Ok(Value::Int(1))), - ("00ffffffffffffffffffffffffffffffff", Ok(Value::Int(-1))), - ("0100000000000000000000000000000001", Ok(Value::UInt(1))), - ("0200000004deadbeef", Ok(Value::buff_from(vec![0xde, 0xad, 0xbe, 0xef]) - .unwrap())), - ("03", Ok(Value::Bool(true))), - ("04", Ok(Value::Bool(false))), - ("050011deadbeef11ababffff11deadbeef11ababffff", Ok( - StandardPrincipalData::new( - 0x00, - [0x11, 0xde, 0xad, 0xbe, 0xef, 0x11, 0xab, 0xab, 0xff, 0xff, - 0x11, 0xde, 0xad, 0xbe, 0xef, 0x11, 0xab, 0xab, 0xff, 0xff]).unwrap().into())), - ("060011deadbeef11ababffff11deadbeef11ababffff0461626364", Ok( - QualifiedContractIdentifier::new( - StandardPrincipalData::new( - 0x00, - [0x11, 0xde, 0xad, 0xbe, 0xef, 0x11, 0xab, 0xab, 0xff, 0xff, - 0x11, 0xde, 0xad, 0xbe, 0xef, 0x11, 0xab, 0xab, 0xff, 0xff]).unwrap(), - "abcd".into()).into())), - ("0700ffffffffffffffffffffffffffffffff", Ok(Value::okay(Value::Int(-1)).unwrap())), - ("0800ffffffffffffffffffffffffffffffff", Ok(Value::error(Value::Int(-1)).unwrap())), - ("09", Ok(Value::none())), - ("0a00ffffffffffffffffffffffffffffffff", Ok(Value::some(Value::Int(-1)).unwrap())), - ("0b0000000400000000000000000000000000000000010000000000000000000000000000000002000000000000000000000000000000000300fffffffffffffffffffffffffffffffc", - Ok(Value::list_from(vec![ - Value::Int(1), Value::Int(2), Value::Int(3), Value::Int(-4)]).unwrap())), - ("0c000000020362617a0906666f6f62617203", - Ok(Value::from(TupleData::from_data(vec![ - ("baz".into(), Value::none()), ("foobar".into(), Value::Bool(true))]).unwrap()))) - ]; - - for (test, expected) in tests.iter() { - if let Ok(x) = expected { - assert_eq!(test, &x.serialize_to_hex().unwrap()); - } - assert_eq!(expected, &Value::try_deserialize_hex_untyped(test)); - assert_eq!( - expected, - &Value::try_deserialize_hex_untyped(&format!("0x{test}")) - ); - } - - // test the serialized_size implementation - for (test, expected) in tests.iter() { - if let Ok(value) = expected { - assert_eq!( - value.serialized_size().unwrap(), - test.len() as u32 / 2, - "serialized_size() should return the byte length of the serialization (half the length of the hex encoding)", - ); - } - } - } - - #[test] - fn try_deser_large_list() { - let buff = vec![ - 11, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, - ]; - - assert_eq!( - Value::try_deserialize_bytes_untyped(&buff).unwrap_err(), - SerializationError::DeserializationError("Illegal list type".to_string()) - ); - } - - #[test] - fn try_deser_large_tuple() { - let buff = vec![ - 12, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, - ]; - - assert_eq!( - Value::try_deserialize_bytes_untyped(&buff).unwrap_err(), - SerializationError::DeserializationError("Illegal tuple type".to_string()) - ); - } - - #[test] - fn try_overflow_stack() { - let input = "08080808080808080808070707080807080808080808080708080808080708080707080707080807080808080808080708080808080708080707080708070807080808080808080708080808080708080708080808080808080807070807080808080808070808070707080807070808070808080808070808070708070807080808080808080707080708070807080708080808080808070808080808070808070808080808080808080707080708080808080807080807070708080707080807080808080807080807070807080708080808080808070708070808080808080708080707070808070708080807080807070708"; - assert_eq!( - Err(CheckErrors::TypeSignatureTooDeep.into()), - Value::try_deserialize_hex_untyped(input) - ); - } - - #[test] - fn test_principals() { - let issuer = - PrincipalData::parse_standard_principal("SM2J6ZY48GV1EZ5V2V5RB9MP66SW86PYKKQVX8X0G") - .unwrap(); - let standard_p = Value::from(issuer.clone()); - - let contract_identifier = QualifiedContractIdentifier::new(issuer, "foo".into()); - let contract_p2 = Value::from(PrincipalData::Contract(contract_identifier)); - - test_deser_ser(contract_p2.clone()); - test_deser_ser(standard_p.clone()); - - test_bad_expectation(contract_p2, TypeSignature::BoolType); - test_bad_expectation(standard_p, TypeSignature::BoolType); - } } diff --git a/clarity/src/vm/types/signatures.rs b/clarity/src/vm/types/signatures.rs index a7fa8434f0..2a98a464de 100644 --- a/clarity/src/vm/types/signatures.rs +++ b/clarity/src/vm/types/signatures.rs @@ -14,244 +14,26 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use std::collections::btree_map::Entry; use std::collections::BTreeMap; -use std::hash::Hash; -use std::sync::Arc; -use std::{cmp, fmt}; +use std::fmt; -// TypeSignatures -use hashbrown::HashSet; -use lazy_static::lazy_static; +pub use clarity_serialization::types::signatures::{ + AssetIdentifier, BufferLength, CallableSubtype, ListTypeData, SequenceSubtype, StringSubtype, + StringUTF8Length, TupleTypeSignature, TypeSignature, ASCII_40, BUFF_1, BUFF_16, BUFF_20, + BUFF_21, BUFF_32, BUFF_33, BUFF_64, BUFF_65, UTF8_40, +}; +pub use clarity_serialization::types::Value; use stacks_common::types::StacksEpochId; use crate::vm::costs::{runtime_cost, CostOverflowingMath}; use crate::vm::errors::CheckErrors; use crate::vm::representations::{ - ClarityName, ContractName, SymbolicExpression, SymbolicExpressionType, TraitDefinition, - CONTRACT_MAX_NAME_LENGTH, -}; -use crate::vm::types::{ - CharType, PrincipalData, QualifiedContractIdentifier, SequenceData, SequencedValue, - StandardPrincipalData, TraitIdentifier, Value, MAX_TYPE_DEPTH, MAX_VALUE_SIZE, - WRAPPER_VALUE_SIZE, + ClarityName, SymbolicExpression, SymbolicExpressionType, TraitDefinition, }; type Result = std::result::Result; -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Serialize, Deserialize, Hash)] -pub struct AssetIdentifier { - pub contract_identifier: QualifiedContractIdentifier, - pub asset_name: ClarityName, -} - -impl AssetIdentifier { - #[allow(clippy::unwrap_used)] - pub fn STX() -> AssetIdentifier { - AssetIdentifier { - contract_identifier: QualifiedContractIdentifier::new( - StandardPrincipalData::null_principal(), - ContractName::try_from("STX".to_string()).unwrap(), - ), - asset_name: ClarityName::try_from("STX".to_string()).unwrap(), - } - } - - #[allow(clippy::unwrap_used)] - pub fn STX_burned() -> AssetIdentifier { - AssetIdentifier { - contract_identifier: QualifiedContractIdentifier::new( - StandardPrincipalData::null_principal(), - ContractName::try_from("BURNED".to_string()).unwrap(), - ), - asset_name: ClarityName::try_from("BURNED".to_string()).unwrap(), - } - } - - pub fn sugared(&self) -> String { - format!(".{}.{}", self.contract_identifier.name, self.asset_name) - } -} - -#[derive(Clone, PartialEq, Eq, Serialize, Deserialize)] -pub struct TupleTypeSignature { - #[serde(with = "tuple_type_map_serde")] - type_map: Arc>, -} - -mod tuple_type_map_serde { - use std::collections::BTreeMap; - use std::ops::Deref; - use std::sync::Arc; - - use serde::{Deserializer, Serializer}; - - use super::TypeSignature; - use crate::vm::ClarityName; - - pub fn serialize( - map: &Arc>, - ser: S, - ) -> Result { - serde::Serialize::serialize(map.deref(), ser) - } - - pub fn deserialize<'de, D>( - deser: D, - ) -> Result>, D::Error> - where - D: Deserializer<'de>, - { - let map = serde::Deserialize::deserialize(deser)?; - Ok(Arc::new(map)) - } -} - -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)] -pub struct BufferLength(u32); - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -pub struct StringUTF8Length(u32); - -// INVARIANTS enforced by the Type Signatures. -// 1. A TypeSignature constructor will always fail rather than construct a -// type signature for a too large or invalid type. This is why any variable length -// type signature has a guarded constructor. -// 2. The only methods which may be called on TypeSignatures that are too large -// (i.e., the only function that can be called by the constructor before -// it fails) is the `.size()` method, which may be used to check the size. -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -pub enum TypeSignature { - NoType, - IntType, - UIntType, - BoolType, - SequenceType(SequenceSubtype), - PrincipalType, - TupleType(TupleTypeSignature), - OptionalType(Box), - ResponseType(Box<(TypeSignature, TypeSignature)>), - CallableType(CallableSubtype), - // Suppose we have a list of contract principal literals, e.g. - // `(list .foo .bar)`. This list could be used as a list of `principal` - // types, or it could be passed into a function where it is used a list of - // some trait type, which every contract in the list implements, e.g. - // `(list 4 )`. There could also be a trait value, `t`, in that - // list. In that case, the list could no longer be coerced to a list of - // principals, but it could be coerced to a list of traits, either the type - // of `t`, or a compatible sub-trait of that type. `ListUnionType` is a - // data structure to maintain the set of types in the list, so that when - // we reach the place where the coercion needs to happen, we can perform - // the check -- see `concretize` method. - ListUnionType(HashSet), - // This is used only below epoch 2.1. It has been replaced by CallableType. - TraitReferenceType(TraitIdentifier), -} - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -pub enum SequenceSubtype { - BufferType(BufferLength), - ListType(ListTypeData), - StringType(StringSubtype), -} - -impl SequenceSubtype { - pub fn unit_type(&self) -> Result { - match &self { - SequenceSubtype::ListType(ref list_data) => Ok(list_data.clone().destruct().0), - SequenceSubtype::BufferType(_) => TypeSignature::min_buffer(), - SequenceSubtype::StringType(StringSubtype::ASCII(_)) => { - TypeSignature::min_string_ascii() - } - SequenceSubtype::StringType(StringSubtype::UTF8(_)) => TypeSignature::min_string_utf8(), - } - } - - pub fn is_list_type(&self) -> bool { - matches!(self, SequenceSubtype::ListType(_)) - } -} - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -pub enum StringSubtype { - ASCII(BufferLength), - UTF8(StringUTF8Length), -} - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash)] -pub enum CallableSubtype { - Principal(QualifiedContractIdentifier), - Trait(TraitIdentifier), -} - -use self::TypeSignature::{ - BoolType, CallableType, IntType, ListUnionType, NoType, OptionalType, PrincipalType, - ResponseType, SequenceType, TraitReferenceType, TupleType, UIntType, -}; - -lazy_static! { - pub static ref BUFF_64: TypeSignature = { - #[allow(clippy::expect_used)] - SequenceType(SequenceSubtype::BufferType( - BufferLength::try_from(64u32).expect("BUG: Legal Clarity buffer length marked invalid"), - )) - }; - pub static ref BUFF_65: TypeSignature = { - #[allow(clippy::expect_used)] - SequenceType(SequenceSubtype::BufferType( - BufferLength::try_from(65u32).expect("BUG: Legal Clarity buffer length marked invalid"), - )) - }; - pub static ref BUFF_32: TypeSignature = { - #[allow(clippy::expect_used)] - SequenceType(SequenceSubtype::BufferType( - BufferLength::try_from(32u32).expect("BUG: Legal Clarity buffer length marked invalid"), - )) - }; - pub static ref BUFF_33: TypeSignature = { - #[allow(clippy::expect_used)] - SequenceType(SequenceSubtype::BufferType( - BufferLength::try_from(33u32).expect("BUG: Legal Clarity buffer length marked invalid"), - )) - }; - pub static ref BUFF_20: TypeSignature = { - #[allow(clippy::expect_used)] - SequenceType(SequenceSubtype::BufferType( - BufferLength::try_from(20u32).expect("BUG: Legal Clarity buffer length marked invalid"), - )) - }; - pub static ref BUFF_21: TypeSignature = { - #[allow(clippy::expect_used)] - SequenceType(SequenceSubtype::BufferType( - BufferLength::try_from(21u32).expect("BUG: Legal Clarity buffer length marked invalid"), - )) - }; - pub static ref BUFF_1: TypeSignature = { - #[allow(clippy::expect_used)] - SequenceType(SequenceSubtype::BufferType( - BufferLength::try_from(1u32).expect("BUG: Legal Clarity buffer length marked invalid"), - )) - }; - pub static ref BUFF_16: TypeSignature = { - #[allow(clippy::expect_used)] - SequenceType(SequenceSubtype::BufferType( - BufferLength::try_from(16u32).expect("BUG: Legal Clarity buffer length marked invalid"), - )) - }; -} - -pub const ASCII_40: TypeSignature = SequenceType(SequenceSubtype::StringType( - StringSubtype::ASCII(BufferLength(40)), -)); -pub const UTF8_40: TypeSignature = SequenceType(SequenceSubtype::StringType(StringSubtype::UTF8( - StringUTF8Length(40), -))); - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -pub struct ListTypeData { - max_len: u32, - entry_type: Box, -} +use self::TypeSignature::SequenceType; #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub struct FunctionSignature { @@ -304,1171 +86,152 @@ impl FunctionArgSignature { .collect(); FunctionArgSignature::Union(arg_types) } - FunctionArgSignature::Single(arg_type) => { - let arg_type = arg_type.canonicalize(epoch); - FunctionArgSignature::Single(arg_type) - } - } - } -} - -impl FunctionReturnsSignature { - pub fn canonicalize(&self, epoch: &StacksEpochId) -> FunctionReturnsSignature { - match self { - FunctionReturnsSignature::TypeOfArgAtPosition(_) => self.clone(), - FunctionReturnsSignature::Fixed(return_type) => { - let return_type = return_type.canonicalize(epoch); - FunctionReturnsSignature::Fixed(return_type) - } - } - } -} - -impl FunctionType { - pub fn canonicalize(&self, epoch: &StacksEpochId) -> FunctionType { - match self { - FunctionType::Variadic(arg_type, return_type) => { - let arg_type = arg_type.canonicalize(epoch); - let return_type = return_type.canonicalize(epoch); - FunctionType::Variadic(arg_type, return_type) - } - FunctionType::Fixed(fixed_function) => { - let args = fixed_function - .args - .iter() - .map(|arg| FunctionArg { - signature: arg.signature.canonicalize(epoch), - name: arg.name.clone(), - }) - .collect(); - let returns = fixed_function.returns.canonicalize(epoch); - FunctionType::Fixed(FixedFunction { args, returns }) - } - FunctionType::UnionArgs(arg_types, return_type) => { - let arg_types = arg_types - .iter() - .map(|arg_type| arg_type.canonicalize(epoch)) - .collect(); - let return_type = return_type.canonicalize(epoch); - FunctionType::UnionArgs(arg_types, return_type) - } - FunctionType::ArithmeticVariadic => FunctionType::ArithmeticVariadic, - FunctionType::ArithmeticUnary => FunctionType::ArithmeticUnary, - FunctionType::ArithmeticBinary => FunctionType::ArithmeticBinary, - FunctionType::ArithmeticComparison => FunctionType::ArithmeticComparison, - FunctionType::Binary(arg1, arg2, return_type) => { - let arg1 = arg1.canonicalize(epoch); - let arg2 = arg2.canonicalize(epoch); - let return_type = return_type.canonicalize(epoch); - FunctionType::Binary(arg1, arg2, return_type) - } - } - } -} - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -pub struct FunctionArg { - pub signature: TypeSignature, - pub name: ClarityName, -} - -impl From for FunctionSignature { - fn from(data: FixedFunction) -> FunctionSignature { - let FixedFunction { args, returns } = data; - let args = args.into_iter().map(|x| x.signature).collect(); - FunctionSignature { args, returns } - } -} - -impl From for TypeSignature { - fn from(data: ListTypeData) -> Self { - SequenceType(SequenceSubtype::ListType(data)) - } -} - -impl From for TypeSignature { - fn from(data: TupleTypeSignature) -> Self { - TupleType(data) - } -} - -impl From<&BufferLength> for u32 { - fn from(v: &BufferLength) -> u32 { - v.0 - } -} - -impl From for u32 { - fn from(v: BufferLength) -> u32 { - v.0 - } -} - -impl TryFrom for BufferLength { - type Error = CheckErrors; - fn try_from(data: u32) -> Result { - if data > MAX_VALUE_SIZE { - Err(CheckErrors::ValueTooLarge) - } else { - Ok(BufferLength(data)) - } - } -} - -impl TryFrom for BufferLength { - type Error = CheckErrors; - fn try_from(data: usize) -> Result { - if data > (MAX_VALUE_SIZE as usize) { - Err(CheckErrors::ValueTooLarge) - } else { - Ok(BufferLength(data as u32)) - } - } -} - -impl TryFrom for BufferLength { - type Error = CheckErrors; - fn try_from(data: i128) -> Result { - if data > (MAX_VALUE_SIZE as i128) { - Err(CheckErrors::ValueTooLarge) - } else if data < 0 { - Err(CheckErrors::ValueOutOfBounds) - } else { - Ok(BufferLength(data as u32)) - } - } -} - -impl From<&StringUTF8Length> for u32 { - fn from(v: &StringUTF8Length) -> u32 { - v.0 - } -} - -impl From for u32 { - fn from(v: StringUTF8Length) -> u32 { - v.0 - } -} - -impl TryFrom for StringUTF8Length { - type Error = CheckErrors; - fn try_from(data: u32) -> Result { - let len = data - .checked_mul(4) - .ok_or_else(|| CheckErrors::ValueTooLarge)?; - if len > MAX_VALUE_SIZE { - Err(CheckErrors::ValueTooLarge) - } else { - Ok(StringUTF8Length(data)) - } - } -} - -impl TryFrom for StringUTF8Length { - type Error = CheckErrors; - fn try_from(data: usize) -> Result { - let len = data - .checked_mul(4) - .ok_or_else(|| CheckErrors::ValueTooLarge)?; - if len > (MAX_VALUE_SIZE as usize) { - Err(CheckErrors::ValueTooLarge) - } else { - Ok(StringUTF8Length(data as u32)) - } - } -} - -impl TryFrom for StringUTF8Length { - type Error = CheckErrors; - fn try_from(data: i128) -> Result { - let len = data - .checked_mul(4) - .ok_or_else(|| CheckErrors::ValueTooLarge)?; - if len > (MAX_VALUE_SIZE as i128) { - Err(CheckErrors::ValueTooLarge) - } else if data < 0 { - Err(CheckErrors::ValueOutOfBounds) - } else { - Ok(StringUTF8Length(data as u32)) - } - } -} - -impl ListTypeData { - pub fn new_list(entry_type: TypeSignature, max_len: u32) -> Result { - let would_be_depth = 1 + entry_type.depth(); - if would_be_depth > MAX_TYPE_DEPTH { - return Err(CheckErrors::TypeSignatureTooDeep); - } - - let list_data = ListTypeData { - entry_type: Box::new(entry_type), - max_len, - }; - let would_be_size = list_data - .inner_size()? - .ok_or_else(|| CheckErrors::ValueTooLarge)?; - if would_be_size > MAX_VALUE_SIZE { - Err(CheckErrors::ValueTooLarge) - } else { - Ok(list_data) - } - } - - pub fn destruct(self) -> (TypeSignature, u32) { - (*self.entry_type, self.max_len) - } - - // if checks like as-max-len pass, they may _reduce_ - // but should not increase the type signatures max length - pub fn reduce_max_len(&mut self, new_max_len: u32) { - if new_max_len <= self.max_len { - self.max_len = new_max_len; - } - } - - pub fn get_max_len(&self) -> u32 { - self.max_len - } - - pub fn get_list_item_type(&self) -> &TypeSignature { - &self.entry_type - } -} - -impl TypeSignature { - pub fn new_option(inner_type: TypeSignature) -> Result { - let new_size = WRAPPER_VALUE_SIZE + inner_type.size()?; - let new_depth = 1 + inner_type.depth(); - if new_size > MAX_VALUE_SIZE { - Err(CheckErrors::ValueTooLarge) - } else if new_depth > MAX_TYPE_DEPTH { - Err(CheckErrors::TypeSignatureTooDeep) - } else { - Ok(OptionalType(Box::new(inner_type))) - } - } - - pub fn new_response(ok_type: TypeSignature, err_type: TypeSignature) -> Result { - let new_size = WRAPPER_VALUE_SIZE + cmp::max(ok_type.size()?, err_type.size()?); - let new_depth = 1 + cmp::max(ok_type.depth(), err_type.depth()); - - if new_size > MAX_VALUE_SIZE { - Err(CheckErrors::ValueTooLarge) - } else if new_depth > MAX_TYPE_DEPTH { - Err(CheckErrors::TypeSignatureTooDeep) - } else { - Ok(ResponseType(Box::new((ok_type, err_type)))) - } - } - - pub fn is_response_type(&self) -> bool { - matches!(self, TypeSignature::ResponseType(_)) - } - - pub fn is_no_type(&self) -> bool { - &TypeSignature::NoType == self - } - - pub fn admits(&self, epoch: &StacksEpochId, x: &Value) -> Result { - let x_type = TypeSignature::type_of(x)?; - self.admits_type(epoch, &x_type) - } - - pub fn admits_type(&self, epoch: &StacksEpochId, other: &TypeSignature) -> Result { - match epoch { - StacksEpochId::Epoch20 | StacksEpochId::Epoch2_05 => self.admits_type_v2_0(other), - StacksEpochId::Epoch21 - | StacksEpochId::Epoch22 - | StacksEpochId::Epoch23 - | StacksEpochId::Epoch24 - | StacksEpochId::Epoch25 - | StacksEpochId::Epoch30 - | StacksEpochId::Epoch31 - | StacksEpochId::Epoch32 => self.admits_type_v2_1(other), - StacksEpochId::Epoch10 => Err(CheckErrors::Expects("epoch 1.0 not supported".into())), - } - } - - pub fn admits_type_v2_0(&self, other: &TypeSignature) -> Result { - match self { - SequenceType(SequenceSubtype::ListType(ref my_list_type)) => { - if let SequenceType(SequenceSubtype::ListType(other_list_type)) = other { - if other_list_type.max_len == 0 { - // if other is an empty list, a list type should always admit. - Ok(true) - } else if my_list_type.max_len >= other_list_type.max_len { - my_list_type - .entry_type - .admits_type_v2_0(&other_list_type.entry_type) - } else { - Ok(false) - } - } else { - Ok(false) - } - } - SequenceType(SequenceSubtype::BufferType(ref my_len)) => { - if let SequenceType(SequenceSubtype::BufferType(ref other_len)) = other { - Ok(my_len.0 >= other_len.0) - } else { - Ok(false) - } - } - SequenceType(SequenceSubtype::StringType(StringSubtype::ASCII(len))) => { - if let SequenceType(SequenceSubtype::StringType(StringSubtype::ASCII(other_len))) = - other - { - Ok(len.0 >= other_len.0) - } else { - Ok(false) - } - } - SequenceType(SequenceSubtype::StringType(StringSubtype::UTF8(len))) => { - if let SequenceType(SequenceSubtype::StringType(StringSubtype::UTF8(other_len))) = - other - { - Ok(len.0 >= other_len.0) - } else { - Ok(false) - } - } - OptionalType(ref my_inner_type) => { - if let OptionalType(other_inner_type) = other { - // Option types will always admit a "NoType" OptionalType -- which - // can only be a None - if other_inner_type.is_no_type() { - Ok(true) - } else { - my_inner_type.admits_type_v2_0(other_inner_type) - } - } else { - Ok(false) - } - } - ResponseType(ref my_inner_type) => { - if let ResponseType(other_inner_type) = other { - // ResponseTypes admit according to the following rule: - // if other.ErrType is NoType, and other.OkType admits => admit - // if other.OkType is NoType, and other.ErrType admits => admit - // if both OkType and ErrType admit => admit - // otherwise fail. - if other_inner_type.0.is_no_type() { - my_inner_type.1.admits_type_v2_0(&other_inner_type.1) - } else if other_inner_type.1.is_no_type() { - my_inner_type.0.admits_type_v2_0(&other_inner_type.0) - } else { - Ok(my_inner_type.1.admits_type_v2_0(&other_inner_type.1)? - && my_inner_type.0.admits_type_v2_0(&other_inner_type.0)?) - } - } else { - Ok(false) - } - } - TupleType(ref tuple_sig) => { - if let TupleType(ref other_tuple_sig) = other { - tuple_sig.admits(&StacksEpochId::Epoch2_05, other_tuple_sig) - } else { - Ok(false) - } - } - NoType => Err(CheckErrors::CouldNotDetermineType), - CallableType(_) => Err(CheckErrors::Expects( - "CallableType should not be used in epoch v2.0".into(), - )), - ListUnionType(_) => Err(CheckErrors::Expects( - "ListUnionType should not be used in epoch v2.0".into(), - )), - _ => Ok(other == self), - } - } - - fn admits_type_v2_1(&self, other: &TypeSignature) -> Result { - let other = match other.concretize() { - Ok(other) => other, - Err(_) => { - return Ok(false); - } - }; - - match self { - SequenceType(SequenceSubtype::ListType(ref my_list_type)) => { - if let SequenceType(SequenceSubtype::ListType(other_list_type)) = &other { - if other_list_type.max_len == 0 { - // if other is an empty list, a list type should always admit. - Ok(true) - } else if my_list_type.max_len >= other_list_type.max_len { - my_list_type - .entry_type - .admits_type_v2_1(&other_list_type.entry_type) - } else { - Ok(false) - } - } else { - Ok(false) - } - } - SequenceType(SequenceSubtype::BufferType(ref my_len)) => { - if let SequenceType(SequenceSubtype::BufferType(ref other_len)) = &other { - Ok(my_len.0 >= other_len.0) - } else { - Ok(false) - } - } - SequenceType(SequenceSubtype::StringType(StringSubtype::ASCII(len))) => { - if let SequenceType(SequenceSubtype::StringType(StringSubtype::ASCII(other_len))) = - &other - { - Ok(len.0 >= other_len.0) - } else { - Ok(false) - } - } - SequenceType(SequenceSubtype::StringType(StringSubtype::UTF8(len))) => { - if let SequenceType(SequenceSubtype::StringType(StringSubtype::UTF8(other_len))) = - &other - { - Ok(len.0 >= other_len.0) - } else { - Ok(false) - } - } - OptionalType(ref my_inner_type) => { - if let OptionalType(other_inner_type) = &other { - // Option types will always admit a "NoType" OptionalType -- which - // can only be a None - if other_inner_type.is_no_type() { - Ok(true) - } else { - my_inner_type.admits_type_v2_1(other_inner_type) - } - } else { - Ok(false) - } - } - ResponseType(ref my_inner_type) => { - if let ResponseType(other_inner_type) = &other { - // ResponseTypes admit according to the following rule: - // if other.ErrType is NoType, and other.OkType admits => admit - // if other.OkType is NoType, and other.ErrType admits => admit - // if both OkType and ErrType admit => admit - // otherwise fail. - if other_inner_type.0.is_no_type() { - my_inner_type.1.admits_type_v2_1(&other_inner_type.1) - } else if other_inner_type.1.is_no_type() { - my_inner_type.0.admits_type_v2_1(&other_inner_type.0) - } else { - Ok(my_inner_type.1.admits_type_v2_1(&other_inner_type.1)? - && my_inner_type.0.admits_type_v2_1(&other_inner_type.0)?) - } - } else { - Ok(false) - } - } - TupleType(ref tuple_sig) => { - if let TupleType(ref other_tuple_sig) = &other { - tuple_sig.admits(&StacksEpochId::Epoch21, other_tuple_sig) - } else { - Ok(false) - } - } - NoType => Err(CheckErrors::CouldNotDetermineType), - _ => Ok(&other == self), - } - } - - /// Canonicalize a type. - /// This method will convert types from previous epochs with the appropriate - /// types for the specified epoch. - pub fn canonicalize(&self, epoch: &StacksEpochId) -> TypeSignature { - match epoch { - StacksEpochId::Epoch10 - | StacksEpochId::Epoch20 - | StacksEpochId::Epoch2_05 - // Epoch-2.2 had a regression in canonicalization, so it must be preserved here. - | StacksEpochId::Epoch22 => self.clone(), - // Note for future epochs: Epochs >= 2.3 should use the canonicalize_v2_1() routine - StacksEpochId::Epoch21 - | StacksEpochId::Epoch23 - | StacksEpochId::Epoch24 - | StacksEpochId::Epoch25 - | StacksEpochId::Epoch30 - | StacksEpochId::Epoch31 - | StacksEpochId::Epoch32 => self.canonicalize_v2_1(), - } - } - - pub fn canonicalize_v2_1(&self) -> TypeSignature { - match self { - SequenceType(SequenceSubtype::ListType(ref list_type)) => { - SequenceType(SequenceSubtype::ListType(ListTypeData { - max_len: list_type.max_len, - entry_type: Box::new(list_type.entry_type.canonicalize_v2_1()), - })) - } - OptionalType(ref inner_type) => OptionalType(Box::new(inner_type.canonicalize_v2_1())), - ResponseType(ref inner_type) => ResponseType(Box::new(( - inner_type.0.canonicalize_v2_1(), - inner_type.1.canonicalize_v2_1(), - ))), - TupleType(ref tuple_sig) => { - let mut canonicalized_fields = BTreeMap::new(); - for (field_name, field_type) in tuple_sig.get_type_map() { - canonicalized_fields.insert(field_name.clone(), field_type.canonicalize_v2_1()); - } - TypeSignature::from(TupleTypeSignature { - type_map: Arc::new(canonicalized_fields), - }) - } - TraitReferenceType(trait_id) => CallableType(CallableSubtype::Trait(trait_id.clone())), - _ => self.clone(), - } - } - - /// Concretize the type. The input to this method may include - /// `ListUnionType` and the `CallableType` variant for a `principal. - /// This method turns these "temporary" types into actual types. - pub fn concretize(&self) -> Result { - match self { - ListUnionType(types) => { - let mut is_trait = None; - let mut is_principal = true; - for partial in types { - match partial { - CallableSubtype::Principal(_) => { - if is_trait.is_some() { - return Err(CheckErrors::TypeError( - TypeSignature::CallableType(partial.clone()), - TypeSignature::PrincipalType, - )); - } else { - is_principal = true; - } - } - CallableSubtype::Trait(t) => { - if is_principal { - return Err(CheckErrors::TypeError( - TypeSignature::PrincipalType, - TypeSignature::CallableType(partial.clone()), - )); - } else { - is_trait = Some(t.clone()); - } - } - } - } - if let Some(t) = is_trait { - Ok(TypeSignature::CallableType(CallableSubtype::Trait(t))) - } else { - Ok(TypeSignature::PrincipalType) - } - } - CallableType(CallableSubtype::Principal(_)) => Ok(TypeSignature::PrincipalType), - _ => Ok(self.clone()), - } - } -} - -impl TryFrom> for TupleTypeSignature { - type Error = CheckErrors; - fn try_from(type_data: Vec<(ClarityName, TypeSignature)>) -> Result { - if type_data.is_empty() { - return Err(CheckErrors::EmptyTuplesNotAllowed); - } - - let mut type_map = BTreeMap::new(); - for (name, type_info) in type_data.into_iter() { - if let Entry::Vacant(e) = type_map.entry(name.clone()) { - e.insert(type_info); - } else { - return Err(CheckErrors::NameAlreadyUsed(name.into())); - } - } - TupleTypeSignature::try_from(type_map) - } -} - -impl TryFrom> for TupleTypeSignature { - type Error = CheckErrors; - fn try_from(type_map: BTreeMap) -> Result { - if type_map.is_empty() { - return Err(CheckErrors::EmptyTuplesNotAllowed); - } - for child_sig in type_map.values() { - if (1 + child_sig.depth()) > MAX_TYPE_DEPTH { - return Err(CheckErrors::TypeSignatureTooDeep); - } - } - let type_map = Arc::new(type_map.into_iter().collect()); - let result = TupleTypeSignature { type_map }; - let would_be_size = result - .inner_size()? - .ok_or_else(|| CheckErrors::ValueTooLarge)?; - if would_be_size > MAX_VALUE_SIZE { - Err(CheckErrors::ValueTooLarge) - } else { - Ok(result) - } - } -} - -impl TupleTypeSignature { - /// Return the number of fields in this tuple type - pub fn len(&self) -> u64 { - self.type_map.len() as u64 - } - - /// Returns whether the tuple type is empty - pub fn is_empty(&self) -> bool { - self.type_map.is_empty() - } - - pub fn field_type(&self, field: &str) -> Option<&TypeSignature> { - self.type_map.get(field) - } - - pub fn get_type_map(&self) -> &BTreeMap { - &self.type_map - } - - pub fn admits(&self, epoch: &StacksEpochId, other: &TupleTypeSignature) -> Result { - if self.type_map.len() != other.type_map.len() { - return Ok(false); - } - - for (name, my_type_sig) in self.type_map.iter() { - if let Some(other_type_sig) = other.type_map.get(name) { - if !my_type_sig.admits_type(epoch, other_type_sig)? { - return Ok(false); - } - } else { - return Ok(false); - } - } - - Ok(true) - } - - pub fn parse_name_type_pair_list( - epoch: StacksEpochId, - type_def: &SymbolicExpression, - accounting: &mut A, - ) -> Result { - if let SymbolicExpressionType::List(ref name_type_pairs) = type_def.expr { - let mapped_key_types = parse_name_type_pairs(epoch, name_type_pairs, accounting)?; - TupleTypeSignature::try_from(mapped_key_types) - } else { - Err(CheckErrors::BadSyntaxExpectedListOfPairs) - } - } - - pub fn shallow_merge(&mut self, update: &mut TupleTypeSignature) { - Arc::make_mut(&mut self.type_map).append(Arc::make_mut(&mut update.type_map)); - } -} - -impl FixedFunction { - pub fn total_type_size(&self) -> Result { - let mut function_type_size = u64::from(self.returns.type_size()?); - for arg in self.args.iter() { - function_type_size = - function_type_size.cost_overflow_add(u64::from(arg.signature.type_size()?))?; - } - Ok(function_type_size) - } -} - -impl FunctionSignature { - pub fn total_type_size(&self) -> Result { - let mut function_type_size = u64::from(self.returns.type_size()?); - for arg in self.args.iter() { - function_type_size = - function_type_size.cost_overflow_add(u64::from(arg.type_size()?))?; - } - Ok(function_type_size) - } - - pub fn check_args_trait_compliance( - &self, - epoch: &StacksEpochId, - args: Vec, - ) -> Result { - if args.len() != self.args.len() { - return Ok(false); - } - let args_iter = self.args.iter().zip(args.iter()); - for (expected_arg, arg) in args_iter { - if !arg.admits_type(epoch, expected_arg)? { - return Ok(false); - } - } - Ok(true) - } - - pub fn canonicalize(&self, epoch: &StacksEpochId) -> FunctionSignature { - let canonicalized_args = self - .args - .iter() - .map(|arg| arg.canonicalize(epoch)) - .collect(); - - FunctionSignature { - args: canonicalized_args, - returns: self.returns.canonicalize(epoch), - } - } -} - -impl FunctionArg { - pub fn new(signature: TypeSignature, name: ClarityName) -> FunctionArg { - FunctionArg { signature, name } - } -} - -impl TypeSignature { - pub fn empty_buffer() -> Result { - Ok(SequenceType(SequenceSubtype::BufferType( - 0_u32.try_into().map_err(|_| { - CheckErrors::Expects("FAIL: Empty clarity value size is not realizable".into()) - })?, - ))) - } - - pub fn min_buffer() -> Result { - Ok(SequenceType(SequenceSubtype::BufferType( - 1_u32.try_into().map_err(|_| { - CheckErrors::Expects("FAIL: Min clarity value size is not realizable".into()) - })?, - ))) - } - - pub fn min_string_ascii() -> Result { - Ok(SequenceType(SequenceSubtype::StringType( - StringSubtype::ASCII(1_u32.try_into().map_err(|_| { - CheckErrors::Expects("FAIL: Min clarity value size is not realizable".into()) - })?), - ))) - } - - pub fn min_string_utf8() -> Result { - Ok(SequenceType(SequenceSubtype::StringType( - StringSubtype::UTF8(1_u32.try_into().map_err(|_| { - CheckErrors::Expects("FAIL: Min clarity value size is not realizable".into()) - })?), - ))) - } - - pub fn max_string_ascii() -> Result { - Ok(SequenceType(SequenceSubtype::StringType( - StringSubtype::ASCII(BufferLength::try_from(MAX_VALUE_SIZE).map_err(|_| { - CheckErrors::Expects( - "FAIL: Max Clarity Value Size is no longer realizable in ASCII Type".into(), - ) - })?), - ))) - } - - pub fn max_string_utf8() -> Result { - Ok(SequenceType(SequenceSubtype::StringType( - StringSubtype::UTF8(StringUTF8Length::try_from(MAX_VALUE_SIZE / 4).map_err(|_| { - CheckErrors::Expects( - "FAIL: Max Clarity Value Size is no longer realizable in UTF8 Type".into(), - ) - })?), - ))) - } - - pub fn max_buffer() -> Result { - Ok(SequenceType(SequenceSubtype::BufferType( - BufferLength::try_from(MAX_VALUE_SIZE).map_err(|_| { - CheckErrors::Expects( - "FAIL: Max Clarity Value Size is no longer realizable in Buffer Type".into(), - ) - })?, - ))) - } - - pub fn contract_name_string_ascii_type() -> Result { - TypeSignature::bound_string_ascii_type(CONTRACT_MAX_NAME_LENGTH.try_into().map_err( - |_| CheckErrors::Expects("FAIL: contract name max length exceeds u32 space".into()), - )?) - } - - pub fn bound_string_ascii_type(max_len: u32) -> Result { - Ok(SequenceType(SequenceSubtype::StringType( - StringSubtype::ASCII(BufferLength::try_from(max_len).map_err(|_| { - CheckErrors::Expects( - "FAIL: Max Clarity Value Size is no longer realizable in ASCII Type".into(), - ) - })?), - ))) - } - - /// If one of the types is a NoType, return Ok(the other type), otherwise return least_supertype(a, b) - pub fn factor_out_no_type( - epoch: &StacksEpochId, - a: &TypeSignature, - b: &TypeSignature, - ) -> Result { - if a.is_no_type() { - Ok(b.clone()) - } else if b.is_no_type() { - Ok(a.clone()) - } else { - Self::least_supertype(epoch, a, b) - } - } - - /// - /// This function returns the most-restrictive type that admits _both_ A and B (something like a least common supertype), - /// or Errors if no such type exists. On error, it throws NoSuperType(A,B), unless a constructor error'ed -- in which case, - /// it throws the constructor's error. - /// - /// For two Tuples: - /// least_supertype(A, B) := (tuple \for_each(key k) least_supertype(type_a_k, type_b_k)) - /// For two Lists: - /// least_supertype(A, B) := (list max_len: max(max_len A, max_len B), entry: least_supertype(entry_a, entry_b)) - /// if max_len A | max_len B is 0: entry := Non-empty list entry - /// For two responses: - /// least_supertype(A, B) := (response least_supertype(ok_a, ok_b), least_supertype(err_a, err_b)) - /// if any entries are NoType, use the other type's entry - /// For two options: - /// least_supertype(A, B) := (option least_supertype(some_a, some_b)) - /// if some_a | some_b is NoType, use the other type's entry. - /// For buffers: - /// least_supertype(A, B) := (buff len: max(len A, len B)) - /// For ints, uints, principals, bools: - /// least_supertype(A, B) := if A != B, error, else A - /// - pub fn least_supertype( - epoch: &StacksEpochId, - a: &TypeSignature, - b: &TypeSignature, - ) -> Result { - match epoch { - StacksEpochId::Epoch20 | StacksEpochId::Epoch2_05 => Self::least_supertype_v2_0(a, b), - StacksEpochId::Epoch21 - | StacksEpochId::Epoch22 - | StacksEpochId::Epoch23 - | StacksEpochId::Epoch24 - | StacksEpochId::Epoch25 - | StacksEpochId::Epoch30 - | StacksEpochId::Epoch31 - | StacksEpochId::Epoch32 => Self::least_supertype_v2_1(a, b), - StacksEpochId::Epoch10 => Err(CheckErrors::Expects("epoch 1.0 not supported".into())), - } - } - - pub fn least_supertype_v2_0(a: &TypeSignature, b: &TypeSignature) -> Result { - match (a, b) { - ( - TupleType(TupleTypeSignature { type_map: types_a }), - TupleType(TupleTypeSignature { type_map: types_b }), - ) => { - let mut type_map_out = BTreeMap::new(); - for (name, entry_a) in types_a.iter() { - let entry_b = types_b - .get(name) - .ok_or(CheckErrors::TypeError(a.clone(), b.clone()))?; - let entry_out = Self::least_supertype_v2_0(entry_a, entry_b)?; - type_map_out.insert(name.clone(), entry_out); - } - Ok(TupleTypeSignature::try_from(type_map_out) - .map(|x| x.into()) - .map_err(|_| CheckErrors::SupertypeTooLarge)?) - } - ( - SequenceType(SequenceSubtype::ListType(ListTypeData { - max_len: len_a, - entry_type: entry_a, - })), - SequenceType(SequenceSubtype::ListType(ListTypeData { - max_len: len_b, - entry_type: entry_b, - })), - ) => { - let entry_type = if *len_a == 0 { - *(entry_b.clone()) - } else if *len_b == 0 { - *(entry_a.clone()) - } else { - Self::least_supertype_v2_0(entry_a, entry_b)? - }; - let max_len = cmp::max(len_a, len_b); - Ok(Self::list_of(entry_type, *max_len) - .map_err(|_| CheckErrors::SupertypeTooLarge)?) - } - (ResponseType(resp_a), ResponseType(resp_b)) => { - let ok_type = - Self::factor_out_no_type(&StacksEpochId::Epoch2_05, &resp_a.0, &resp_b.0)?; - let err_type = - Self::factor_out_no_type(&StacksEpochId::Epoch2_05, &resp_a.1, &resp_b.1)?; - Ok(Self::new_response(ok_type, err_type)?) - } - (OptionalType(some_a), OptionalType(some_b)) => { - let some_type = - Self::factor_out_no_type(&StacksEpochId::Epoch2_05, some_a, some_b)?; - Ok(Self::new_option(some_type)?) - } - ( - SequenceType(SequenceSubtype::BufferType(buff_a)), - SequenceType(SequenceSubtype::BufferType(buff_b)), - ) => { - let buff_len = if u32::from(buff_a) > u32::from(buff_b) { - buff_a - } else { - buff_b - } - .clone(); - Ok(SequenceType(SequenceSubtype::BufferType(buff_len))) - } - ( - SequenceType(SequenceSubtype::StringType(StringSubtype::ASCII(string_a))), - SequenceType(SequenceSubtype::StringType(StringSubtype::ASCII(string_b))), - ) => { - let str_len = if u32::from(string_a) > u32::from(string_b) { - string_a - } else { - string_b - } - .clone(); - Ok(SequenceType(SequenceSubtype::StringType( - StringSubtype::ASCII(str_len), - ))) - } - ( - SequenceType(SequenceSubtype::StringType(StringSubtype::UTF8(string_a))), - SequenceType(SequenceSubtype::StringType(StringSubtype::UTF8(string_b))), - ) => { - let str_len = if u32::from(string_a) > u32::from(string_b) { - string_a - } else { - string_b - } - .clone(); - Ok(SequenceType(SequenceSubtype::StringType( - StringSubtype::UTF8(str_len), - ))) - } - (NoType, x) | (x, NoType) => Ok(x.clone()), - (x, y) => { - if x == y { - Ok(x.clone()) - } else { - Err(CheckErrors::TypeError(a.clone(), b.clone())) - } - } - } - } - - pub fn least_supertype_v2_1(a: &TypeSignature, b: &TypeSignature) -> Result { - match (a, b) { - ( - TupleType(TupleTypeSignature { type_map: types_a }), - TupleType(TupleTypeSignature { type_map: types_b }), - ) => { - let mut type_map_out = BTreeMap::new(); - for (name, entry_a) in types_a.iter() { - let entry_b = types_b - .get(name) - .ok_or(CheckErrors::TypeError(a.clone(), b.clone()))?; - let entry_out = Self::least_supertype_v2_1(entry_a, entry_b)?; - type_map_out.insert(name.clone(), entry_out); - } - Ok(TupleTypeSignature::try_from(type_map_out) - .map(|x| x.into()) - .map_err(|_| CheckErrors::SupertypeTooLarge)?) - } - ( - SequenceType(SequenceSubtype::ListType(ListTypeData { - max_len: len_a, - entry_type: entry_a, - })), - SequenceType(SequenceSubtype::ListType(ListTypeData { - max_len: len_b, - entry_type: entry_b, - })), - ) => { - let entry_type = if *len_a == 0 { - *(entry_b.clone()) - } else if *len_b == 0 { - *(entry_a.clone()) - } else { - Self::least_supertype_v2_1(entry_a, entry_b)? - }; - let max_len = cmp::max(len_a, len_b); - Ok(Self::list_of(entry_type, *max_len) - .map_err(|_| CheckErrors::SupertypeTooLarge)?) - } - (ResponseType(resp_a), ResponseType(resp_b)) => { - let ok_type = - Self::factor_out_no_type(&StacksEpochId::Epoch21, &resp_a.0, &resp_b.0)?; - let err_type = - Self::factor_out_no_type(&StacksEpochId::Epoch21, &resp_a.1, &resp_b.1)?; - Ok(Self::new_response(ok_type, err_type)?) - } - (OptionalType(some_a), OptionalType(some_b)) => { - let some_type = Self::factor_out_no_type(&StacksEpochId::Epoch21, some_a, some_b)?; - Ok(Self::new_option(some_type)?) - } - ( - SequenceType(SequenceSubtype::BufferType(buff_a)), - SequenceType(SequenceSubtype::BufferType(buff_b)), - ) => { - let buff_len = if u32::from(buff_a) > u32::from(buff_b) { - buff_a - } else { - buff_b - } - .clone(); - Ok(SequenceType(SequenceSubtype::BufferType(buff_len))) - } - ( - SequenceType(SequenceSubtype::StringType(StringSubtype::ASCII(string_a))), - SequenceType(SequenceSubtype::StringType(StringSubtype::ASCII(string_b))), - ) => { - let str_len = if u32::from(string_a) > u32::from(string_b) { - string_a - } else { - string_b - } - .clone(); - Ok(SequenceType(SequenceSubtype::StringType( - StringSubtype::ASCII(str_len), - ))) - } - ( - SequenceType(SequenceSubtype::StringType(StringSubtype::UTF8(string_a))), - SequenceType(SequenceSubtype::StringType(StringSubtype::UTF8(string_b))), - ) => { - let str_len = if u32::from(string_a) > u32::from(string_b) { - string_a - } else { - string_b - } - .clone(); - Ok(SequenceType(SequenceSubtype::StringType( - StringSubtype::UTF8(str_len), - ))) - } - (NoType, x) | (x, NoType) => Ok(x.clone()), - (CallableType(x), CallableType(y)) => { - if x == y { - Ok(a.clone()) - } else { - Ok(ListUnionType(HashSet::from([x.clone(), y.clone()]))) - } - } - (ListUnionType(l), CallableType(c)) | (CallableType(c), ListUnionType(l)) => { - let mut l1 = l.clone(); - l1.insert(c.clone()); - Ok(ListUnionType(l1)) - } - (PrincipalType, CallableType(CallableSubtype::Principal(_))) - | (CallableType(CallableSubtype::Principal(_)), PrincipalType) => Ok(PrincipalType), - (PrincipalType, ListUnionType(l)) | (ListUnionType(l), PrincipalType) => { - let mut all_principals = true; - for ty in l { - match ty { - CallableSubtype::Trait(_) => { - all_principals = false; - } - CallableSubtype::Principal(_) => (), - } - } - if all_principals { - Ok(PrincipalType) - } else { - Err(CheckErrors::TypeError(a.clone(), b.clone())) - } - } - (ListUnionType(l1), ListUnionType(l2)) => { - Ok(ListUnionType(l1.union(l2).cloned().collect())) - } - (x, y) => { - if x == y { - Ok(x.clone()) - } else { - Err(CheckErrors::TypeError(a.clone(), b.clone())) - } - } + FunctionArgSignature::Single(arg_type) => { + let arg_type = arg_type.canonicalize(epoch); + FunctionArgSignature::Single(arg_type) + } } } +} - pub fn list_of(item_type: TypeSignature, max_len: u32) -> Result { - ListTypeData::new_list(item_type, max_len).map(|x| x.into()) - } - - pub fn empty_list() -> ListTypeData { - ListTypeData { - entry_type: Box::new(TypeSignature::NoType), - max_len: 0, +impl FunctionReturnsSignature { + pub fn canonicalize(&self, epoch: &StacksEpochId) -> FunctionReturnsSignature { + match self { + FunctionReturnsSignature::TypeOfArgAtPosition(_) => self.clone(), + FunctionReturnsSignature::Fixed(return_type) => { + let return_type = return_type.canonicalize(epoch); + FunctionReturnsSignature::Fixed(return_type) + } } } +} - pub fn type_of(x: &Value) -> Result { - let out = match x { - Value::Principal(_) => PrincipalType, - Value::Int(_v) => IntType, - Value::UInt(_v) => UIntType, - Value::Bool(_v) => BoolType, - Value::Tuple(v) => TupleType(v.type_signature.clone()), - Value::Sequence(SequenceData::List(list_data)) => list_data.type_signature()?, - Value::Sequence(SequenceData::Buffer(buff_data)) => buff_data.type_signature()?, - Value::Sequence(SequenceData::String(CharType::ASCII(ascii_data))) => { - ascii_data.type_signature()? +impl FunctionType { + pub fn canonicalize(&self, epoch: &StacksEpochId) -> FunctionType { + match self { + FunctionType::Variadic(arg_type, return_type) => { + let arg_type = arg_type.canonicalize(epoch); + let return_type = return_type.canonicalize(epoch); + FunctionType::Variadic(arg_type, return_type) + } + FunctionType::Fixed(fixed_function) => { + let args = fixed_function + .args + .iter() + .map(|arg| FunctionArg { + signature: arg.signature.canonicalize(epoch), + name: arg.name.clone(), + }) + .collect(); + let returns = fixed_function.returns.canonicalize(epoch); + FunctionType::Fixed(FixedFunction { args, returns }) } - Value::Sequence(SequenceData::String(CharType::UTF8(utf8_data))) => { - utf8_data.type_signature()? + FunctionType::UnionArgs(arg_types, return_type) => { + let arg_types = arg_types + .iter() + .map(|arg_type: &TypeSignature| arg_type.canonicalize(epoch)) + .collect(); + let return_type = return_type.canonicalize(epoch); + FunctionType::UnionArgs(arg_types, return_type) } - Value::Optional(v) => v.type_signature()?, - Value::Response(v) => v.type_signature()?, - Value::CallableContract(v) => { - if let Some(trait_identifier) = &v.trait_identifier { - CallableType(CallableSubtype::Trait(trait_identifier.clone())) - } else { - CallableType(CallableSubtype::Principal(v.contract_identifier.clone())) - } + FunctionType::ArithmeticVariadic => FunctionType::ArithmeticVariadic, + FunctionType::ArithmeticUnary => FunctionType::ArithmeticUnary, + FunctionType::ArithmeticBinary => FunctionType::ArithmeticBinary, + FunctionType::ArithmeticComparison => FunctionType::ArithmeticComparison, + FunctionType::Binary(arg1, arg2, return_type) => { + let arg1 = arg1.canonicalize(epoch); + let arg2 = arg2.canonicalize(epoch); + let return_type = return_type.canonicalize(epoch); + FunctionType::Binary(arg1, arg2, return_type) } - }; - - Ok(out) - } - - pub fn literal_type_of(x: &Value) -> Result { - match x { - Value::Principal(PrincipalData::Contract(contract_id)) => Ok(CallableType( - CallableSubtype::Principal(contract_id.clone()), - )), - _ => Self::type_of(x), } } +} - // Checks if resulting type signature is of valid size. - pub fn construct_parent_list_type(args: &[Value]) -> Result { - let children_types: Result> = args.iter().map(TypeSignature::type_of).collect(); - TypeSignature::parent_list_type(&children_types?) +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct FunctionArg { + pub signature: TypeSignature, + pub name: ClarityName, +} + +impl From for FunctionSignature { + fn from(data: FixedFunction) -> FunctionSignature { + let FixedFunction { args, returns } = data; + let args = args.into_iter().map(|x| x.signature).collect(); + FunctionSignature { args, returns } } +} - pub fn parent_list_type( - children: &[TypeSignature], - ) -> std::result::Result { - if let Some((first, rest)) = children.split_first() { - let mut current_entry_type = first.clone(); - for next_entry in rest.iter() { - current_entry_type = Self::least_supertype_v2_1(¤t_entry_type, next_entry)?; - } - let len = u32::try_from(children.len()).map_err(|_| CheckErrors::ValueTooLarge)?; - ListTypeData::new_list(current_entry_type, len) +/// This trait is used to parse tuple type signatures from Clarity expressions. +/// This is not included in clarity-serialization because it requires the +/// [`CostTracker`] trait. +pub trait TupleTypeSignatureExt { + fn parse_name_type_pair_list( + epoch: StacksEpochId, + type_def: &SymbolicExpression, + accounting: &mut A, + ) -> Result; +} + +impl TupleTypeSignatureExt for TupleTypeSignature { + fn parse_name_type_pair_list( + epoch: StacksEpochId, + type_def: &SymbolicExpression, + accounting: &mut A, + ) -> Result { + if let SymbolicExpressionType::List(ref name_type_pairs) = type_def.expr { + let mapped_key_types = parse_name_type_pairs(epoch, name_type_pairs, accounting)?; + TupleTypeSignature::try_from(mapped_key_types) } else { - Ok(TypeSignature::empty_list()) + Err(CheckErrors::BadSyntaxExpectedListOfPairs) } } } -/// Parsing functions. -impl TypeSignature { +/// This trait is used to parse type signatures from Clarity expressions. +/// This is not included in clarity-serialization because it requires the +/// [`CostTracker`] trait. +pub trait TypeSignatureExt { + fn parse_atom_type(typename: &str) -> Result; + fn parse_list_type_repr( + epoch: StacksEpochId, + type_args: &[SymbolicExpression], + accounting: &mut A, + ) -> Result; + fn parse_tuple_type_repr( + epoch: StacksEpochId, + type_args: &[SymbolicExpression], + accounting: &mut A, + ) -> Result; + fn parse_buff_type_repr(type_args: &[SymbolicExpression]) -> Result; + fn parse_string_utf8_type_repr(type_args: &[SymbolicExpression]) -> Result; + fn parse_string_ascii_type_repr(type_args: &[SymbolicExpression]) -> Result; + fn parse_optional_type_repr( + epoch: StacksEpochId, + type_args: &[SymbolicExpression], + accounting: &mut A, + ) -> Result; + fn parse_response_type_repr( + epoch: StacksEpochId, + type_args: &[SymbolicExpression], + accounting: &mut A, + ) -> Result; + fn parse_type_repr( + epoch: StacksEpochId, + x: &SymbolicExpression, + accounting: &mut A, + ) -> Result; + fn parse_trait_type_repr( + type_args: &[SymbolicExpression], + accounting: &mut A, + epoch: StacksEpochId, + clarity_version: ClarityVersion, + ) -> Result>; + #[cfg(test)] + fn from_string(val: &str, version: ClarityVersion, epoch: StacksEpochId) -> Self; +} + +impl TypeSignatureExt for TypeSignature { fn parse_atom_type(typename: &str) -> Result { match typename { "int" => Ok(TypeSignature::IntType), @@ -1569,7 +332,7 @@ impl TypeSignature { TypeSignature::new_option(inner_type) } - pub fn parse_response_type_repr( + fn parse_response_type_repr( epoch: StacksEpochId, type_args: &[SymbolicExpression], accounting: &mut A, @@ -1582,7 +345,7 @@ impl TypeSignature { TypeSignature::new_response(ok_type, err_type) } - pub fn parse_type_repr( + fn parse_type_repr( epoch: StacksEpochId, x: &SymbolicExpression, accounting: &mut A, @@ -1643,7 +406,7 @@ impl TypeSignature { } } - pub fn parse_trait_type_repr( + fn parse_trait_type_repr( type_args: &[SymbolicExpression], accounting: &mut A, epoch: StacksEpochId, @@ -1705,7 +468,9 @@ impl TypeSignature { } #[cfg(test)] - pub fn from_string(val: &str, version: ClarityVersion, epoch: StacksEpochId) -> Self { + fn from_string(val: &str, version: ClarityVersion, epoch: StacksEpochId) -> Self { + use clarity_serialization::types::QualifiedContractIdentifier; + use crate::vm::ast::parse; let expr = &parse( &QualifiedContractIdentifier::transient(), @@ -1718,205 +483,63 @@ impl TypeSignature { } } -/// These implement the size calculations in TypeSignatures -/// in constructors of TypeSignatures, only `.inner_size()` may be called. -/// .inner_size is a failable method to compute the size of the type signature, -/// Failures indicate that a type signature represents _too large_ of a value. -/// TypeSignature constructors will fail instead of constructing such a type. -/// because of this, the public interface to size is infallible. -impl TypeSignature { - pub fn depth(&self) -> u8 { - // unlike inner_size, depth will never threaten to overflow, - // because a new type can only increase depth by 1. - match self { - // NoType's may be asked for their size at runtime -- - // legal constructions like `(ok 1)` have NoType parts (if they have unknown error variant types). - CallableType(_) - | TraitReferenceType(_) - | ListUnionType(_) - | NoType - | IntType - | UIntType - | BoolType - | PrincipalType - | SequenceType(SequenceSubtype::BufferType(_)) - | SequenceType(SequenceSubtype::StringType(_)) => 1, - TupleType(tuple_sig) => 1 + tuple_sig.max_depth(), - SequenceType(SequenceSubtype::ListType(list_type)) => { - 1 + list_type.get_list_item_type().depth() - } - OptionalType(t) => 1 + t.depth(), - ResponseType(v) => 1 + cmp::max(v.0.depth(), v.1.depth()), - } - } - - pub fn size(&self) -> Result { - self.inner_size()?.ok_or_else(|| { - CheckErrors::Expects( - "FAIL: .size() overflowed on too large of a type. construction should have failed!" - .into(), - ) - }) - } - - fn inner_size(&self) -> Result> { - let out = match self { - // NoType's may be asked for their size at runtime -- - // legal constructions like `(ok 1)` have NoType parts (if they have unknown error variant types). - NoType => Some(1), - IntType => Some(16), - UIntType => Some(16), - BoolType => Some(1), - PrincipalType => Some(148), // 20+128 - TupleType(tuple_sig) => tuple_sig.inner_size()?, - SequenceType(SequenceSubtype::BufferType(len)) - | SequenceType(SequenceSubtype::StringType(StringSubtype::ASCII(len))) => { - Some(4 + u32::from(len)) - } - SequenceType(SequenceSubtype::ListType(list_type)) => list_type.inner_size()?, - SequenceType(SequenceSubtype::StringType(StringSubtype::UTF8(len))) => { - Some(4 + 4 * u32::from(len)) - } - OptionalType(t) => t.size()?.checked_add(WRAPPER_VALUE_SIZE), - ResponseType(v) => { - // ResponseTypes are 1 byte for the committed bool, - // plus max(err_type, ok_type) - let (t, s) = (&v.0, &v.1); - let t_size = t.size()?; - let s_size = s.size()?; - cmp::max(t_size, s_size).checked_add(WRAPPER_VALUE_SIZE) - } - CallableType(CallableSubtype::Principal(_)) | ListUnionType(_) => Some(148), // 20+128 - CallableType(CallableSubtype::Trait(_)) | TraitReferenceType(_) => Some(276), // 20+128+128 - }; - Ok(out) - } - - pub fn type_size(&self) -> Result { - self.inner_type_size() - .ok_or_else(|| CheckErrors::ValueTooLarge) - } - - /// Returns the size of the _type signature_ - fn inner_type_size(&self) -> Option { - match self { - // NoType's may be asked for their size at runtime -- - // legal constructions like `(ok 1)` have NoType parts (if they have unknown error variant types). - // These types all only use ~1 byte for their type enum - NoType | IntType | UIntType | BoolType | PrincipalType => Some(1), - // u32 length + type enum - TupleType(tuple_sig) => tuple_sig.type_size(), - SequenceType(SequenceSubtype::BufferType(_)) => Some(1 + 4), - SequenceType(SequenceSubtype::ListType(list_type)) => list_type.type_size(), - SequenceType(SequenceSubtype::StringType(StringSubtype::ASCII(_))) => Some(1 + 4), - SequenceType(SequenceSubtype::StringType(StringSubtype::UTF8(_))) => Some(1 + 4), - OptionalType(t) => t.inner_type_size()?.checked_add(1), - ResponseType(v) => { - let (t, s) = (&v.0, &v.1); - t.inner_type_size()? - .checked_add(s.inner_type_size()?)? - .checked_add(1) - } - CallableType(_) | TraitReferenceType(_) | ListUnionType(_) => Some(1), +impl FixedFunction { + pub fn total_type_size(&self) -> Result { + let mut function_type_size = u64::from(self.returns.type_size()?); + for arg in self.args.iter() { + function_type_size = + function_type_size.cost_overflow_add(u64::from(arg.signature.type_size()?))?; } + Ok(function_type_size) } } -impl ListTypeData { - /// List Size: type_signature_size + max_len * entry_type.size() - fn inner_size(&self) -> Result> { - let total_size = self - .entry_type - .size()? - .checked_mul(self.max_len) - .and_then(|x| x.checked_add(self.type_size()?)); - match total_size { - Some(total_size) => { - if total_size > MAX_VALUE_SIZE { - Ok(None) - } else { - Ok(Some(total_size)) - } - } - None => Ok(None), - } - } - - fn type_size(&self) -> Option { - let total_size = self.entry_type.inner_type_size()?.checked_add(4 + 1)?; // 1 byte for Type enum, 4 for max_len. - if total_size > MAX_VALUE_SIZE { - None - } else { - Some(total_size) +impl FunctionSignature { + pub fn total_type_size(&self) -> Result { + let mut function_type_size = u64::from(self.returns.type_size()?); + for arg in self.args.iter() { + function_type_size = + function_type_size.cost_overflow_add(u64::from(arg.type_size()?))?; } + Ok(function_type_size) } -} - -impl TupleTypeSignature { - /// Tuple Size: - /// size( btreemap ) = 2*map.len() + sum(names) + sum(values) - pub fn type_size(&self) -> Option { - let mut type_map_size = u32::try_from(self.type_map.len()).ok()?.checked_mul(2)?; - for (name, type_signature) in self.type_map.iter() { - // we only accept ascii names, so 1 char = 1 byte. - type_map_size = type_map_size - .checked_add(type_signature.inner_type_size()?)? - // name.len() is bound to MAX_STRING_LEN (128), so `as u32` won't ever truncate - .checked_add(name.len() as u32)?; + pub fn check_args_trait_compliance( + &self, + epoch: &StacksEpochId, + args: Vec, + ) -> Result { + if args.len() != self.args.len() { + return Ok(false); } - - if type_map_size > MAX_VALUE_SIZE { - None - } else { - Some(type_map_size) + let args_iter = self.args.iter().zip(args.iter()); + for (expected_arg, arg) in args_iter { + if !arg.admits_type(epoch, expected_arg)? { + return Ok(false); + } } + Ok(true) } +} - pub fn size(&self) -> Result { - self.inner_size()? - .ok_or_else(|| CheckErrors::Expects("size() overflowed on a constructed type.".into())) - } +impl FunctionSignature { + pub fn canonicalize(&self, epoch: &StacksEpochId) -> FunctionSignature { + let canonicalized_args = self + .args + .iter() + .map(|arg| arg.canonicalize(epoch)) + .collect(); - fn max_depth(&self) -> u8 { - let mut max = 0; - for (_name, type_signature) in self.type_map.iter() { - max = cmp::max(max, type_signature.depth()) + FunctionSignature { + args: canonicalized_args, + returns: self.returns.canonicalize(epoch), } - max } +} - /// Tuple Size: - /// size( btreemap ) + type_size - /// size( btreemap ) = 2*map.len() + sum(names) + sum(values) - fn inner_size(&self) -> Result> { - let Some(mut total_size) = u32::try_from(self.type_map.len()) - .ok() - .and_then(|x| x.checked_mul(2)) - .and_then(|x| x.checked_add(self.type_size()?)) - else { - return Ok(None); - }; - - for (name, type_signature) in self.type_map.iter() { - // we only accept ascii names, so 1 char = 1 byte. - total_size = if let Some(new_size) = total_size.checked_add(type_signature.size()?) { - new_size - } else { - return Ok(None); - }; - total_size = if let Some(new_size) = total_size.checked_add(name.len() as u32) { - new_size - } else { - return Ok(None); - }; - } - - if total_size > MAX_VALUE_SIZE { - Ok(None) - } else { - Ok(Some(total_size)) - } +impl FunctionArg { + pub fn new(signature: TypeSignature, name: ClarityName) -> FunctionArg { + FunctionArg { signature, name } } } @@ -1967,85 +590,6 @@ pub fn parse_name_type_pairs( key_types } -impl fmt::Display for TupleTypeSignature { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "(tuple")?; - let mut type_strs: Vec<_> = self.type_map.iter().collect(); - type_strs.sort_unstable_by_key(|x| x.0); - for (field_name, field_type) in type_strs { - write!(f, " ({} {field_type})", &**field_name)?; - } - write!(f, ")") - } -} - -impl fmt::Debug for TupleTypeSignature { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "TupleTypeSignature {{")?; - for (field_name, field_type) in self.type_map.iter() { - write!(f, " \"{}\": {field_type},", &**field_name)?; - } - write!(f, "}}") - } -} - -impl fmt::Display for AssetIdentifier { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!( - f, - "{}::{}", - &*self.contract_identifier.to_string(), - &*self.asset_name - ) - } -} - -impl fmt::Display for TypeSignature { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - NoType => write!(f, "UnknownType"), - IntType => write!(f, "int"), - UIntType => write!(f, "uint"), - BoolType => write!(f, "bool"), - OptionalType(t) => write!(f, "(optional {t})"), - ResponseType(v) => write!(f, "(response {} {})", v.0, v.1), - TupleType(t) => write!(f, "{t}"), - PrincipalType => write!(f, "principal"), - SequenceType(SequenceSubtype::BufferType(len)) => write!(f, "(buff {len})"), - SequenceType(SequenceSubtype::ListType(list_type_data)) => write!( - f, - "(list {} {})", - list_type_data.max_len, list_type_data.entry_type - ), - SequenceType(SequenceSubtype::StringType(StringSubtype::ASCII(len))) => { - write!(f, "(string-ascii {len})") - } - SequenceType(SequenceSubtype::StringType(StringSubtype::UTF8(len))) => { - write!(f, "(string-utf8 {len})") - } - CallableType(CallableSubtype::Trait(trait_id)) | TraitReferenceType(trait_id) => { - write!(f, "<{trait_id}>") - } - CallableType(CallableSubtype::Principal(contract_id)) => { - write!(f, "(principal {contract_id})") - } - ListUnionType(_) => write!(f, "principal"), - } - } -} - -impl fmt::Display for BufferLength { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}", self.0) - } -} - -impl fmt::Display for StringUTF8Length { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}", self.0) - } -} - impl fmt::Display for FunctionArg { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.signature) @@ -2063,6 +607,7 @@ mod test { use super::CheckErrors::*; use super::*; use crate::vm::tests::test_clarity_versions; + use crate::vm::types::QualifiedContractIdentifier; use crate::vm::{execute, ClarityVersion}; fn fail_parse(val: &str, version: ClarityVersion, epoch: StacksEpochId) -> CheckErrors { @@ -2148,520 +693,4 @@ mod test { let _ = TypeSignature::from_string(desc, version, epoch); // panics on failed types. } } - - #[test] - fn test_least_supertype() { - let callables = [ - CallableSubtype::Principal(QualifiedContractIdentifier::local("foo").unwrap()), - CallableSubtype::Trait(TraitIdentifier { - name: "foo".into(), - contract_identifier: QualifiedContractIdentifier::transient(), - }), - ]; - let list_union = ListUnionType(callables.clone().into()); - let callables2 = [ - CallableSubtype::Principal(QualifiedContractIdentifier::local("bar").unwrap()), - CallableSubtype::Trait(TraitIdentifier { - name: "bar".into(), - contract_identifier: QualifiedContractIdentifier::transient(), - }), - ]; - let list_union2 = ListUnionType(callables2.clone().into()); - let list_union_merged = ListUnionType(HashSet::from_iter( - [callables, callables2].concat().iter().cloned(), - )); - let callable_principals = [ - CallableSubtype::Principal(QualifiedContractIdentifier::local("foo").unwrap()), - CallableSubtype::Principal(QualifiedContractIdentifier::local("bar").unwrap()), - ]; - let list_union_principals = ListUnionType(callable_principals.into()); - - let notype_pairs = [ - // NoType with X should result in X - ( - (TypeSignature::NoType, TypeSignature::NoType), - TypeSignature::NoType, - ), - ( - (TypeSignature::NoType, TypeSignature::IntType), - TypeSignature::IntType, - ), - ( - (TypeSignature::NoType, TypeSignature::UIntType), - TypeSignature::UIntType, - ), - ( - (TypeSignature::NoType, TypeSignature::BoolType), - TypeSignature::BoolType, - ), - ( - (TypeSignature::NoType, TypeSignature::min_buffer().unwrap()), - TypeSignature::min_buffer().unwrap(), - ), - ( - ( - TypeSignature::NoType, - TypeSignature::list_of(TypeSignature::IntType, 42).unwrap(), - ), - TypeSignature::list_of(TypeSignature::IntType, 42).unwrap(), - ), - ( - ( - TypeSignature::NoType, - TypeSignature::bound_string_ascii_type(17).unwrap(), - ), - TypeSignature::bound_string_ascii_type(17).unwrap(), - ), - ( - ( - TypeSignature::NoType, - TypeSignature::max_string_utf8().unwrap(), - ), - TypeSignature::max_string_utf8().unwrap(), - ), - ( - (TypeSignature::NoType, TypeSignature::PrincipalType), - TypeSignature::PrincipalType, - ), - ( - ( - TypeSignature::NoType, - TypeSignature::TupleType( - TupleTypeSignature::try_from(vec![("a".into(), TypeSignature::IntType)]) - .unwrap(), - ), - ), - TypeSignature::TupleType( - TupleTypeSignature::try_from(vec![("a".into(), TypeSignature::IntType)]) - .unwrap(), - ), - ), - ( - ( - TypeSignature::NoType, - TypeSignature::new_option(TypeSignature::IntType).unwrap(), - ), - TypeSignature::new_option(TypeSignature::IntType).unwrap(), - ), - ( - ( - TypeSignature::NoType, - TypeSignature::new_response(TypeSignature::IntType, TypeSignature::BoolType) - .unwrap(), - ), - TypeSignature::new_response(TypeSignature::IntType, TypeSignature::BoolType) - .unwrap(), - ), - ( - ( - TypeSignature::NoType, - TypeSignature::CallableType(CallableSubtype::Principal( - QualifiedContractIdentifier::transient(), - )), - ), - TypeSignature::CallableType(CallableSubtype::Principal( - QualifiedContractIdentifier::transient(), - )), - ), - ( - ( - TypeSignature::NoType, - TypeSignature::CallableType(CallableSubtype::Trait(TraitIdentifier { - name: "foo".into(), - contract_identifier: QualifiedContractIdentifier::transient(), - })), - ), - TypeSignature::CallableType(CallableSubtype::Trait(TraitIdentifier { - name: "foo".into(), - contract_identifier: QualifiedContractIdentifier::transient(), - })), - ), - ( - (TypeSignature::NoType, list_union.clone()), - list_union.clone(), - ), - ]; - - for (pair, expected) in notype_pairs { - assert_eq!( - TypeSignature::least_supertype_v2_1(&pair.0, &pair.1).unwrap(), - expected - ); - assert_eq!( - TypeSignature::least_supertype_v2_1(&pair.1, &pair.0).unwrap(), - expected - ); - } - - let simple_pairs = [ - ((IntType, IntType), IntType), - ((UIntType, UIntType), UIntType), - ((BoolType, BoolType), BoolType), - ( - ( - TypeSignature::max_buffer().unwrap(), - TypeSignature::max_buffer().unwrap(), - ), - TypeSignature::max_buffer().unwrap(), - ), - ( - ( - TypeSignature::list_of(TypeSignature::IntType, 42).unwrap(), - TypeSignature::list_of(TypeSignature::IntType, 42).unwrap(), - ), - TypeSignature::list_of(TypeSignature::IntType, 42).unwrap(), - ), - ( - ( - TypeSignature::bound_string_ascii_type(17).unwrap(), - TypeSignature::bound_string_ascii_type(17).unwrap(), - ), - TypeSignature::bound_string_ascii_type(17).unwrap(), - ), - ( - ( - TypeSignature::max_string_utf8().unwrap(), - TypeSignature::max_string_utf8().unwrap(), - ), - TypeSignature::max_string_utf8().unwrap(), - ), - ( - (TypeSignature::PrincipalType, TypeSignature::PrincipalType), - TypeSignature::PrincipalType, - ), - ( - ( - TypeSignature::TupleType( - TupleTypeSignature::try_from(vec![("a".into(), TypeSignature::IntType)]) - .unwrap(), - ), - TypeSignature::TupleType( - TupleTypeSignature::try_from(vec![("a".into(), TypeSignature::IntType)]) - .unwrap(), - ), - ), - TypeSignature::TupleType( - TupleTypeSignature::try_from(vec![("a".into(), TypeSignature::IntType)]) - .unwrap(), - ), - ), - ( - ( - TypeSignature::new_option(TypeSignature::IntType).unwrap(), - TypeSignature::new_option(TypeSignature::IntType).unwrap(), - ), - TypeSignature::new_option(TypeSignature::IntType).unwrap(), - ), - ( - ( - TypeSignature::new_response(TypeSignature::IntType, TypeSignature::BoolType) - .unwrap(), - TypeSignature::new_response(TypeSignature::IntType, TypeSignature::BoolType) - .unwrap(), - ), - TypeSignature::new_response(TypeSignature::IntType, TypeSignature::BoolType) - .unwrap(), - ), - ( - ( - TypeSignature::CallableType(CallableSubtype::Principal( - QualifiedContractIdentifier::transient(), - )), - TypeSignature::CallableType(CallableSubtype::Principal( - QualifiedContractIdentifier::transient(), - )), - ), - TypeSignature::CallableType(CallableSubtype::Principal( - QualifiedContractIdentifier::transient(), - )), - ), - ( - ( - TypeSignature::CallableType(CallableSubtype::Trait(TraitIdentifier { - name: "foo".into(), - contract_identifier: QualifiedContractIdentifier::transient(), - })), - TypeSignature::CallableType(CallableSubtype::Trait(TraitIdentifier { - name: "foo".into(), - contract_identifier: QualifiedContractIdentifier::transient(), - })), - ), - TypeSignature::CallableType(CallableSubtype::Trait(TraitIdentifier { - name: "foo".into(), - contract_identifier: QualifiedContractIdentifier::transient(), - })), - ), - ((list_union.clone(), list_union.clone()), list_union.clone()), - ]; - - for (pair, expected) in simple_pairs { - assert_eq!( - TypeSignature::least_supertype_v2_1(&pair.0, &pair.1).unwrap(), - expected - ); - assert_eq!( - TypeSignature::least_supertype_v2_1(&pair.1, &pair.0).unwrap(), - expected - ); - } - - let matched_pairs = [ - ( - ( - TypeSignature::max_buffer().unwrap(), - TypeSignature::min_buffer().unwrap(), - ), - TypeSignature::max_buffer().unwrap(), - ), - ( - ( - TypeSignature::list_of(TypeSignature::IntType, 17).unwrap(), - TypeSignature::list_of(TypeSignature::IntType, 42).unwrap(), - ), - TypeSignature::list_of(TypeSignature::IntType, 42).unwrap(), - ), - ( - ( - TypeSignature::min_string_ascii().unwrap(), - TypeSignature::bound_string_ascii_type(17).unwrap(), - ), - TypeSignature::bound_string_ascii_type(17).unwrap(), - ), - ( - ( - TypeSignature::min_string_utf8().unwrap(), - TypeSignature::max_string_utf8().unwrap(), - ), - TypeSignature::max_string_utf8().unwrap(), - ), - ( - ( - TypeSignature::PrincipalType, - TypeSignature::CallableType(CallableSubtype::Principal( - QualifiedContractIdentifier::transient(), - )), - ), - TypeSignature::PrincipalType, - ), - ( - (TypeSignature::PrincipalType, list_union_principals.clone()), - TypeSignature::PrincipalType, - ), - ( - ( - TypeSignature::CallableType(CallableSubtype::Principal( - QualifiedContractIdentifier::local("foo").unwrap(), - )), - TypeSignature::CallableType(CallableSubtype::Principal( - QualifiedContractIdentifier::local("bar").unwrap(), - )), - ), - list_union_principals.clone(), - ), - ( - (list_union.clone(), list_union2.clone()), - list_union_merged.clone(), - ), - ]; - - for (pair, expected) in matched_pairs { - assert_eq!( - TypeSignature::least_supertype_v2_1(&pair.0, &pair.1).unwrap(), - expected - ); - assert_eq!( - TypeSignature::least_supertype_v2_1(&pair.1, &pair.0).unwrap(), - expected - ); - } - - let compound_pairs = [ - ( - ( - TypeSignature::list_of( - TypeSignature::SequenceType(SequenceSubtype::BufferType( - 16_u32.try_into().unwrap(), - )), - 5, - ) - .unwrap(), - TypeSignature::list_of(TypeSignature::min_buffer().unwrap(), 3).unwrap(), - ), - TypeSignature::list_of( - TypeSignature::SequenceType(SequenceSubtype::BufferType( - 16_u32.try_into().unwrap(), - )), - 5, - ) - .unwrap(), - ), - ( - ( - TypeSignature::TupleType( - TupleTypeSignature::try_from(vec![( - "b".into(), - TypeSignature::min_string_ascii().unwrap(), - )]) - .unwrap(), - ), - TypeSignature::TupleType( - TupleTypeSignature::try_from(vec![( - "b".into(), - TypeSignature::bound_string_ascii_type(17).unwrap(), - )]) - .unwrap(), - ), - ), - TypeSignature::TupleType( - TupleTypeSignature::try_from(vec![( - "b".into(), - TypeSignature::bound_string_ascii_type(17).unwrap(), - )]) - .unwrap(), - ), - ), - ( - ( - TypeSignature::new_option(TypeSignature::min_string_ascii().unwrap()).unwrap(), - TypeSignature::new_option(TypeSignature::bound_string_ascii_type(17).unwrap()) - .unwrap(), - ), - TypeSignature::new_option(TypeSignature::bound_string_ascii_type(17).unwrap()) - .unwrap(), - ), - ( - ( - TypeSignature::new_response(TypeSignature::PrincipalType, list_union.clone()) - .unwrap(), - TypeSignature::new_response( - TypeSignature::CallableType(CallableSubtype::Principal( - QualifiedContractIdentifier::transient(), - )), - list_union2.clone(), - ) - .unwrap(), - ), - TypeSignature::new_response(TypeSignature::PrincipalType, list_union_merged) - .unwrap(), - ), - ]; - - for (pair, expected) in compound_pairs { - assert_eq!( - TypeSignature::least_supertype_v2_1(&pair.0, &pair.1).unwrap(), - expected - ); - assert_eq!( - TypeSignature::least_supertype_v2_1(&pair.1, &pair.0).unwrap(), - expected - ); - } - - let bad_pairs = [ - (IntType, UIntType), - (BoolType, IntType), - ( - TypeSignature::max_buffer().unwrap(), - TypeSignature::max_string_ascii().unwrap(), - ), - ( - TypeSignature::list_of(TypeSignature::UIntType, 42).unwrap(), - TypeSignature::list_of(TypeSignature::IntType, 42).unwrap(), - ), - ( - TypeSignature::min_string_utf8().unwrap(), - TypeSignature::bound_string_ascii_type(17).unwrap(), - ), - ( - TypeSignature::min_string_utf8().unwrap(), - TypeSignature::min_buffer().unwrap(), - ), - ( - TypeSignature::TupleType( - TupleTypeSignature::try_from(vec![("a".into(), TypeSignature::IntType)]) - .unwrap(), - ), - TypeSignature::TupleType( - TupleTypeSignature::try_from(vec![("a".into(), TypeSignature::UIntType)]) - .unwrap(), - ), - ), - ( - TypeSignature::new_option(TypeSignature::IntType).unwrap(), - TypeSignature::new_option(TypeSignature::min_string_utf8().unwrap()).unwrap(), - ), - ( - TypeSignature::new_response(TypeSignature::IntType, TypeSignature::BoolType) - .unwrap(), - TypeSignature::new_response(TypeSignature::BoolType, TypeSignature::IntType) - .unwrap(), - ), - ( - TypeSignature::CallableType(CallableSubtype::Principal( - QualifiedContractIdentifier::transient(), - )), - TypeSignature::IntType, - ), - ( - TypeSignature::CallableType(CallableSubtype::Trait(TraitIdentifier { - name: "foo".into(), - contract_identifier: QualifiedContractIdentifier::transient(), - })), - TypeSignature::PrincipalType, - ), - (list_union.clone(), TypeSignature::PrincipalType), - ( - TypeSignature::min_string_ascii().unwrap(), - list_union_principals, - ), - ( - TypeSignature::list_of( - TypeSignature::SequenceType(SequenceSubtype::BufferType( - 16_u32.try_into().unwrap(), - )), - 5, - ) - .unwrap(), - TypeSignature::list_of(TypeSignature::min_string_ascii().unwrap(), 3).unwrap(), - ), - ( - TypeSignature::TupleType( - TupleTypeSignature::try_from(vec![( - "b".into(), - TypeSignature::min_string_ascii().unwrap(), - )]) - .unwrap(), - ), - TypeSignature::TupleType( - TupleTypeSignature::try_from(vec![("b".into(), TypeSignature::UIntType)]) - .unwrap(), - ), - ), - ( - TypeSignature::new_option(TypeSignature::min_string_ascii().unwrap()).unwrap(), - TypeSignature::new_option(TypeSignature::min_string_utf8().unwrap()).unwrap(), - ), - ( - TypeSignature::new_response(TypeSignature::PrincipalType, list_union).unwrap(), - TypeSignature::new_response( - list_union2, - TypeSignature::CallableType(CallableSubtype::Principal( - QualifiedContractIdentifier::transient(), - )), - ) - .unwrap(), - ), - ]; - - for pair in bad_pairs { - matches!( - TypeSignature::least_supertype_v2_1(&pair.0, &pair.1).unwrap_err(), - CheckErrors::TypeError(..) - ); - matches!( - TypeSignature::least_supertype_v2_1(&pair.1, &pair.0).unwrap_err(), - CheckErrors::TypeError(..) - ); - } - } } diff --git a/stackslib/Cargo.toml b/stackslib/Cargo.toml index 4ae68eb746..c24880a69f 100644 --- a/stackslib/Cargo.toml +++ b/stackslib/Cargo.toml @@ -50,7 +50,6 @@ stacks-common = { path = "../stacks-common" } pox-locking = { path = "../pox-locking" } libstackerdb = { path = "../libstackerdb" } siphasher = "0.3.7" -hashbrown = { workspace = true } rusqlite = { workspace = true } time = "0.3.41" toml = { workspace = true } diff --git a/stackslib/src/blockstack_cli.rs b/stackslib/src/blockstack_cli.rs index 11a5b5679f..bcf23757e7 100644 --- a/stackslib/src/blockstack_cli.rs +++ b/stackslib/src/blockstack_cli.rs @@ -92,7 +92,7 @@ is that the miner chooses, but you can decide which with the following options: --block-only indicates to mine this transaction only in a block The post-condition mode for the transaction can be controlled with the following option: - + --postcondition-mode indicates the post-condition mode for the contract. Allowed values: [`allow`, `deny`]. Default: `deny`. "; @@ -377,7 +377,7 @@ fn extract_flag(args: &mut Vec, flag: &str) -> bool { /// /// # Returns /// -/// An `Option` containing the value following the flag if both were found and removed; +/// An `Option` containing the value following the flag if both were found and removed; /// returns `None` if the flag was not found or no value follows the flag. fn extract_flag_with_value(args: &mut Vec, flag: &str) -> Option { args.iter() diff --git a/stackslib/src/chainstate/nakamoto/test_signers.rs b/stackslib/src/chainstate/nakamoto/test_signers.rs index 1f0ab5ffe9..1aab6b4467 100644 --- a/stackslib/src/chainstate/nakamoto/test_signers.rs +++ b/stackslib/src/chainstate/nakamoto/test_signers.rs @@ -14,8 +14,9 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . +use std::collections::HashMap; + use clarity::util::secp256k1::{MessageSignature, Secp256k1PrivateKey, Secp256k1PublicKey}; -use hashbrown::HashMap; use rand::distributions::Standard; use rand::Rng; use stacks_common::address::*; @@ -191,7 +192,7 @@ impl TestSigners { .collect::>(); info!( - "TestSigners: Signing Nakamoto block. TestSigners has {} signers. Reward set has {} signers.", + "TestSigners: Signing Nakamoto block. TestSigners has {} signers. Reward set has {} signers.", test_signers_by_pk.len(), reward_set_keys.len(), ); diff --git a/stackslib/src/chainstate/stacks/boot/docs.rs b/stackslib/src/chainstate/stacks/boot/docs.rs index 6e84cdda8e..d791b517f1 100644 --- a/stackslib/src/chainstate/stacks/boot/docs.rs +++ b/stackslib/src/chainstate/stacks/boot/docs.rs @@ -13,9 +13,10 @@ // // You should have received a copy of the GNU General Public License // along with this program. If not, see . +use std::collections::{HashMap, HashSet}; + use clarity::vm::docs::contracts::{produce_docs_refs, ContractSupportDocs}; use clarity::vm::ClarityVersion; -use hashbrown::{HashMap, HashSet}; use super::STACKS_BOOT_CODE_MAINNET; @@ -104,7 +105,7 @@ When transferring a name, you have the option to also clear the name's zone file ("name-renewal", "Depending in the namespace rules, a name can expire. For example, names in the .id namespace expire after 2 years. You need to send a name renewal every so often to keep your name. You will pay the registration cost of your name to the namespace's designated burn address when you renew it. -When a name expires, it enters a \"grace period\". The period is set to 5000 blocks (a month) but can be configured for each namespace. +When a name expires, it enters a \"grace period\". The period is set to 5000 blocks (a month) but can be configured for each namespace. It will stop resolving in the grace period, and all of the above operations will cease to be honored by the BNS consensus rules. You may, however, send a NAME_RENEWAL during this grace period to preserve your name. After the grace period, everybody can register that name again.