From c4b9e15b2e7ee80f8a9e03b85b3c6b00f0a7a489 Mon Sep 17 00:00:00 2001 From: Reece Williams Date: Wed, 1 Oct 2025 11:47:39 -0500 Subject: [PATCH 1/6] rm old docs --- .cursor/rules/ai.mdc | 134 --- .cursor/rules/blockchain-interactions.mdc | 158 --- .cursor/rules/commands.mdc | 45 - .cursor/rules/component-rules.mdc | 909 ------------------ .cursor/rules/component.mdc | 169 ---- .cursor/rules/cursor-rules.mdc | 70 -- .cursor/rules/network-requests.mdc | 105 -- .cursor/rules/overview.mdc | 47 - .cursor/rules/service.mdc | 115 --- .cursor/rules/submission.mdc | 95 -- .cursor/rules/template.mdc | 103 -- .cursor/rules/triggers.mdc | 181 ---- .cursor/rules/variables.mdc | 81 -- .cursor/rules/workflows.mdc | 146 --- docs/benefits.mdx | 36 - docs/design.mdx | 45 - docs/handbook/ai.mdx | 186 ---- docs/handbook/commands.mdx | 44 - .../components/blockchain-interactions.mdx | 225 ----- docs/handbook/components/component.mdx | 209 ---- docs/handbook/components/network-requests.mdx | 132 --- docs/handbook/components/variables.mdx | 79 -- docs/handbook/overview.mdx | 46 - docs/handbook/service.mdx | 147 --- docs/handbook/submission.mdx | 106 -- docs/handbook/template.mdx | 113 --- docs/handbook/triggers.mdx | 204 ---- docs/handbook/workflows.mdx | 140 --- docs/how-it-works.mdx | 105 -- docs/index.mdx | 47 - docs/overview.mdx | 94 -- docs/resources/llms.mdx | 44 - docs/tutorial/1-overview.mdx | 42 - docs/tutorial/2-setup.mdx | 224 ----- docs/tutorial/3-project.mdx | 68 -- docs/tutorial/4-component.mdx | 225 ----- docs/tutorial/5-build.mdx | 91 -- docs/tutorial/6-run-service.mdx | 186 ---- docs/tutorial/7-prediction.mdx | 103 -- test_utils/README.md | 66 -- test_utils/validate_component.sh | 574 ----------- 41 files changed, 5939 deletions(-) delete mode 100644 .cursor/rules/ai.mdc delete mode 100644 .cursor/rules/blockchain-interactions.mdc delete mode 100644 .cursor/rules/commands.mdc delete mode 100644 .cursor/rules/component-rules.mdc delete mode 100644 .cursor/rules/component.mdc delete mode 100644 .cursor/rules/cursor-rules.mdc delete mode 100644 .cursor/rules/network-requests.mdc delete mode 100644 .cursor/rules/overview.mdc delete mode 100644 .cursor/rules/service.mdc delete mode 100644 .cursor/rules/submission.mdc delete mode 100644 .cursor/rules/template.mdc delete mode 100644 .cursor/rules/triggers.mdc delete mode 100644 .cursor/rules/variables.mdc delete mode 100644 .cursor/rules/workflows.mdc delete mode 100644 docs/benefits.mdx delete mode 100644 docs/design.mdx delete mode 100644 docs/handbook/ai.mdx delete mode 100644 docs/handbook/commands.mdx delete mode 100644 docs/handbook/components/blockchain-interactions.mdx delete mode 100644 docs/handbook/components/component.mdx delete mode 100644 docs/handbook/components/network-requests.mdx delete mode 100644 docs/handbook/components/variables.mdx delete mode 100644 docs/handbook/overview.mdx delete mode 100644 docs/handbook/service.mdx delete mode 100644 docs/handbook/submission.mdx delete mode 100644 docs/handbook/template.mdx delete mode 100644 docs/handbook/triggers.mdx delete mode 100644 docs/handbook/workflows.mdx delete mode 100644 docs/how-it-works.mdx delete mode 100644 docs/index.mdx delete mode 100644 docs/overview.mdx delete mode 100644 docs/resources/llms.mdx delete mode 100644 docs/tutorial/1-overview.mdx delete mode 100644 docs/tutorial/2-setup.mdx delete mode 100644 docs/tutorial/3-project.mdx delete mode 100644 docs/tutorial/4-component.mdx delete mode 100644 docs/tutorial/5-build.mdx delete mode 100644 docs/tutorial/6-run-service.mdx delete mode 100644 docs/tutorial/7-prediction.mdx delete mode 100644 test_utils/README.md delete mode 100755 test_utils/validate_component.sh diff --git a/.cursor/rules/ai.mdc b/.cursor/rules/ai.mdc deleted file mode 100644 index 7db78dde..00000000 --- a/.cursor/rules/ai.mdc +++ /dev/null @@ -1,134 +0,0 @@ ---- -description: - -alwaysApply: true ---- -# AI-powered Component Creation - -Use Claude or Cursor AI agents to generate WAVS components with minimal prompts. Components created by AI require thorough review and testing before production use. - -## Setup - -1. Clone the WAVS Foundry Template and complete system setup: - -```sh -git clone https://github.com/Lay3rLabs/wavs-foundry-template.git -cd wavs-foundry-template -# Follow README system setup instructions -``` - -2. Install and configure Claude Code ([Claude docs](mdc:https:/docs.anthropic.com/en/docs/claude-code/getting-started)) or download Cursor ([Cursor downloads](mdc:https:/www.cursor.com/downloads)). - -3. Open Claude or Cursor in the template root: - -```sh -claude -# or -cursor . -``` - -4. For Cursor, always attach the `component-rules.mdc` file to the chat prompt: - -```sh Chat -@component-rules.mdc -``` - -## Prompting AI Agents - -- Use short, clear instructions. -- Provide relevant docs or `.md` files. -- Include API endpoints and response structures if needed. -- Be specific about the component functionality. -- Examples: - -API component: - -``` -Let's make a component that takes the input of a zip code, queries the openbrewerydb, and returns the breweries in the area. @https://api.openbrewerydb.org/v1/breweries?by_postal=92101&per_page=3 -``` - -Contract balance component: - -``` -I want to build a new component that takes the input of a wallet address, queries the usdt contract, and returns the balance of that address. -``` - -Verifiable AI component (requires OpenAI API key in `.env`): - -``` -Please make a component that takes a prompt as input, sends an api request to OpenAI, and returns the response. - - Use this api structure: - { - "seed": $SEED, - "model": "gpt-4o", - "messages": [ - {"role": "system", "content": "You are a helpful assistant."}, - {"role": "user", "content": ""} - ] - } - My api key is WAVS_ENV_OPENAI_KEY in my .env file. -``` - -Set your API key in `.env`: - -```sh -cp .env.example .env -# Add your key prefixed with WAVS_ENV_ -WAVS_ENV_OPENAI_KEY=your_api_key -``` - -## Component Creation Workflow - -1. Submit prompt to AI agent. - -2. Review the agent's plan in `plan.md`. - -3. Agent creates component files. - -4. Validate component: - -```sh -make validate-component COMPONENT=your-component -``` - -5. Build component: - -```sh -WASI_BUILD_DIR=components/my-component make wasi-build -``` - -6. Test component logic (replace placeholders): - -```sh -export COMPONENT_FILENAME=openai_response.wasm -export INPUT_DATA="Only respond with yes or no: Is AI beneficial to the world?" -make wasi-exec -``` - -- Ask the agent to provide the `make wasi-exec` command; it cannot run commands itself. - -7. Troubleshoot errors by sharing logs with the agent. - -## Tips & Best Practices - -- AI agents may be unpredictable; update rulefiles if needed. -- For complex components, build simple versions first. -- Ignore minor warnings and errors in `bindings.rs` (auto-generated). -- Avoid letting the agent edit `bindings.rs`. -- If stuck, clear history and start fresh with adjusted prompts. -- Be patient; agents may over-engineer fixes or make mistakes. - -## Troubleshooting - -- Provide full error context to the agent. -- Avoid letting the agent run commands; request commands instead. -- Reformat long commands to avoid line break issues. - -For support, join the WAVS DEVS Telegram: https://t.me/layer_xyz/818 - -For more information: -- [Claude Code Getting Started](mdc:https:/docs.anthropic.com/en/docs/claude-code/getting-started) -- [Cursor Downloads](mdc:https:/www.cursor.com/downloads) -- [WAVS Foundry Template GitHub](mdc:https:/github.com/Lay3rLabs/wavs-foundry-template) -- [OpenAI Platform](mdc:https:/platform.openai.com/login) diff --git a/.cursor/rules/blockchain-interactions.mdc b/.cursor/rules/blockchain-interactions.mdc deleted file mode 100644 index 8af2b40b..00000000 --- a/.cursor/rules/blockchain-interactions.mdc +++ /dev/null @@ -1,158 +0,0 @@ ---- -description: Guide for interacting with Ethereum and EVM-compatible blockchains from WAVS components using Rust crates and configuration. - -alwaysApply: true ---- -# Blockchain Interactions in WAVS Components - -Use the `wavs-wasi-utils` crate and Alloy ecosystem crates to interact with Ethereum and other EVM chains from WAVS components. Define chain configs in `wavs.toml` and generate Rust types from Solidity using the `sol!` macro. - -1. **Setup Dependencies** - -Add these to your `Cargo.toml`: - -```toml -[dependencies] -wit-bindgen-rt = { workspace = true, features = ["bitflags"] } -wavs-wasi-utils = "0.4.0-beta.4" -wstd = "0.5.3" - -alloy-sol-macro = { version = "1.1.0", features = ["json"] } -alloy-sol-types = "1.1.0" -alloy-network = "0.15.10" -alloy-provider = { version = "0.15.10", default-features = false, features = ["rpc-api"] } -alloy-rpc-types = "0.15.10" -alloy-contract = "0.15.10" - -anyhow = "1.0.98" -serde = { version = "1.0.219", features = ["derive"] } -serde_json = "1.0.140" -``` - -2. **Configure Chains** - -Define RPC endpoints and chain IDs in `wavs.toml`: - -```toml wavs.toml -[default.chains.evm.local] -chain_id = "31337" -ws_endpoint = "ws://localhost:8545" -http_endpoint = "http://localhost:8545" -poll_interval_ms = 7000 - -[default.chains.evm.ethereum] -chain_id = "1" -ws_endpoint = "wss://eth.drpc.org" -http_endpoint = "https://eth.drpc.org" -``` - -3. **Generate Rust Types from Solidity** - -Use the `sol!` macro to parse Solidity interfaces and generate Rust types: - -```rust -mod solidity { - use alloy_sol_macro::sol; - - // From file - sol!("../../src/interfaces/ITypes.sol"); - - // Inline definitions - sol! { - struct TriggerInfo { - uint64 triggerId; - bytes data; - } - - event NewTrigger(TriggerInfo _triggerInfo); - } -} -``` - -Example in `trigger.rs`: - -```rust trigger.rs -pub mod solidity { - use alloy_sol_macro::sol; - pub use ITypes::*; - - sol!("../../src/interfaces/ITypes.sol"); - - sol! { - function addTrigger(string data) external; - } -} -``` - -4. **Access Chain Config and Create Provider** - -Use WAVS host bindings and `new_evm_provider` to create an RPC provider: - -```rust lib.rs -use crate::bindings::host::get_evm_chain_config; -use alloy_network::Ethereum; -use alloy_provider::RootProvider; -use wavs_wasi_utils::evm::new_evm_provider; - -let chain_config = get_evm_chain_config("local").unwrap(); - -let provider: RootProvider = new_evm_provider::( - chain_config.http_endpoint.unwrap(), -); -``` - -5. **Example: Query ERC721 NFT Balance** - -```rust lib.rs -use crate::bindings::host::get_evm_chain_config; -use alloy_network::Ethereum; -use alloy_provider::RootProvider; -use alloy_sol_types::sol; -use wavs_wasi_utils::evm::{ - alloy_primitives::{Address, U256}, - new_evm_provider, -}; -use alloy_rpc_types::TransactionInput; -use wstd::runtime::block_on; - -sol! { - interface IERC721 { - function balanceOf(address owner) external view returns (uint256); - } -} - -pub fn query_nft_ownership(address: Address, nft_contract: Address) -> Result { - block_on(async move { - let chain_config = get_evm_chain_config("local").unwrap(); - let provider: RootProvider = new_evm_provider::( - chain_config.http_endpoint.unwrap() - ); - - let balance_call = IERC721::balanceOf { owner: address }; - - let tx = alloy_rpc_types::eth::TransactionRequest { - to: Some(TxKind::Call(nft_contract)), - input: TransactionInput { input: Some(balance_call.abi_encode().into()), data: None }, - ..Default::default() - }; - - let result = provider.call(tx).await.map_err(|e| e.to_string())?; - - let balance: U256 = U256::from_be_slice(&result); - Ok(balance > U256::ZERO) - }) -} -``` - -6. **Additional Notes** - -- Use `alloy-contract` crate for higher-level contract interactions. -- The `decode_event_log_data` macro decodes Ethereum event logs from triggers into Rust types implementing `SolEvent`. -- Re-run `cargo build` after updating Solidity files used with `sol!`. - -For more information: -- [wavs-wasi-utils crate](https://docs.rs/wavs-wasi-utils/latest/wavs_wasi_utils/) -- [Alloy crate ecosystem](https://docs.rs/alloy/latest/alloy/) -- [sol! macro documentation](https://docs.rs/alloy-sol-macro/latest/alloy_sol_macro/macro.sol.html) -- [alloy-contract crate](https://crates.io/crates/alloy-contract) -- [Example NFT query](https://github.com/Lay3rLabs/wavs-art/blob/main/components/autonomous-artist/src/evm.rs) diff --git a/.cursor/rules/commands.mdc b/.cursor/rules/commands.mdc deleted file mode 100644 index 14cbbde6..00000000 --- a/.cursor/rules/commands.mdc +++ /dev/null @@ -1,45 +0,0 @@ ---- -description: Overview of Makefile commands for WAVS development CLI - -alwaysApply: true ---- -# Makefile Commands for WAVS Development - -Use `make help` to list all available commands for building, testing, deploying, and managing WAVS projects. - -1. Run `make help` to see all commands: -```bash -make help -``` - -2. Common commands and their purposes: -```bash -build building the project -wasi-build building WAVS wasi components | WASI_BUILD_DIR -wasi-exec executing the WAVS wasi component(s) with ABI function | COMPONENT_FILENAME, INPUT_DATA -wasi-exec-fixed same as wasi-exec but uses fixed byte input (for Go & TS components) | COMPONENT_FILENAME, INPUT_DATA -clean cleaning the project files -clean-docker remove unused docker containers -validate-component validate a WAVS component against best practices -fmt format Solidity and Rust code -test run tests -setup install initial dependencies -start-all-local start anvil and core services (e.g., IPFS) -get-trigger-from-deploy get trigger address from deployment script -get-submit-from-deploy get submit address from deployment script -wavs-cli run wavs-cli in docker -upload-component upload WAVS component | COMPONENT_FILENAME, WAVS_ENDPOINT -deploy-service deploy WAVS component service JSON | SERVICE_URL, CREDENTIAL, WAVS_ENDPOINT -get-trigger get trigger id | SERVICE_TRIGGER_ADDR, RPC_URL -show-result show result | SERVICE_SUBMISSION_ADDR, TRIGGER_ID, RPC_URL -upload-to-ipfs upload service config to IPFS | SERVICE_FILE, [PINATA_API_KEY] -update-submodules update git submodules -check-requirements verify system requirements are installed -``` - -3. Use the commands with appropriate environment variables or arguments as indicated. - -4. Best practice: Use `validate-component` before deployment to ensure compliance with WAVS standards. - -For more information: -- [WAVS tutorial](https://docs.wavs.dev/tutorial/1-overview) diff --git a/.cursor/rules/component-rules.mdc b/.cursor/rules/component-rules.mdc deleted file mode 100644 index 4a668517..00000000 --- a/.cursor/rules/component-rules.mdc +++ /dev/null @@ -1,909 +0,0 @@ ---- -description: - -alwaysApply: true ---- -# WAVS Component Creation Guide - -You specialize in creating WAVS (WASI AVS) components. Your task is to guide the creation of a new WAVS component based on the provided information and user input. Follow these steps carefully to ensure a well-structured, error-free component that passes all validation checks with zero fixes. - -## Component Structure - -A WAVS component needs: -1. `Cargo.toml` - Dependencies configuration -2. `src/lib.rs` - Component implementation logic goes here -3. `src/trigger.rs` - trigger handling logic -4. `src/bindings.rs` - Auto-generated, never edit -5. `Makefile` - Do not edit -6. `config.json` - Only edit the name - -## Creating a Component - -### 1. Cargo.toml Template - -```toml -[package] -name = "your-component-name" -edition.workspace = true -version.workspace = true -authors.workspace = true -rust-version.workspace = true -repository.workspace = true - -[dependencies] -# Core dependencies (always needed) -wit-bindgen-rt ={ workspace = true} -wavs-wasi-utils = { workspace = true } -serde = { workspace = true } -serde_json = { workspace = true } -alloy-sol-macro = { workspace = true } -wstd = { workspace = true } -alloy-sol-types = { workspace = true } -anyhow = { workspace = true } - -# Add for blockchain interactions -alloy-primitives = { workspace = true } -alloy-provider = { workspace = true } -alloy-rpc-types = { workspace = true } -alloy-network = { workspace = true } -alloy-contract = { workspace = true } - -[lib] -crate-type = ["cdylib"] - -[profile.release] -codegen-units = 1 -opt-level = "s" -debug = false -strip = true -lto = true - -[package.metadata.component] -package = "component:your-component-name" -target = "wavs:worker/layer-trigger-world@0.4.0-beta.4" -``` - -CRITICAL: Never use direct version numbers - always use `{ workspace = true }`. -IMPORTANT! Always add your component to workspace members in the root Cargo.toml - -### 2. Component Implementation (lib.rs) - -#### Basic Structure - -```rust -mod trigger; -use trigger::{decode_trigger_event, encode_trigger_output, Destination}; -use wavs_wasi_utils::{ - evm::alloy_primitives::hex, - http::{fetch_json, http_request_get}, -}; -pub mod bindings; // Never edit bindings.rs! -use crate::bindings::{export, Guest, TriggerAction, WasmResponse}; -use alloy_sol_types::SolValue; -use serde::{Deserialize, Serialize}; -use wstd::{http::HeaderValue, runtime::block_on}; -use anyhow::Result; - -struct Component; -export!(Component with_types_in bindings); - -impl Guest for Component { - fn run(action: TriggerAction) -> std::result::Result, String> { - let (trigger_id, req, dest) = - decode_trigger_event(action.data).map_err(|e| e.to_string())?; - - // Decode trigger data inline - handles hex string input - let request_input = { - // First, convert the input bytes to a string to check if it's a hex string - let input_str = String::from_utf8(req.clone()) - .map_err(|e| format!("Input is not valid UTF-8: {}", e))?; - - // Check if it's a hex string (starts with "0x") - let hex_data = if input_str.starts_with("0x") { - // Decode the hex string to bytes - hex::decode(&input_str[2..]) - .map_err(|e| format!("Failed to decode hex string: {}", e))? - } else { - // If it's not a hex string, assume the input is already binary data - req.clone() - }; - - // Now ABI decode the binary data as a string parameter - ::abi_decode(&hex_data) - .map_err(|e| format!("Failed to decode input as ABI string: {}", e))? - }; - println!("Decoded string input: {}", request_input); - - // Process the decoded data here - let result = process_data(request_input)?; - - let output = match dest { - Destination::Ethereum => Some(encode_trigger_output(trigger_id, &result)), - Destination::CliOutput => Some(WasmResponse { payload: result.into(), ordering: None }), - }; - Ok(output) - } -} - -// Example processing function - replace with your actual logic -fn process_data(input: String) -> Result, String> { - // Your processing logic here - Ok(input.as_bytes().to_vec()) -} -``` - -#### Trigger Event Handling (trigger.rs) - -```rust -use crate::bindings::wavs::worker::layer_types::{ - TriggerData, TriggerDataEvmContractEvent, WasmResponse, -}; -use alloy_sol_types::SolValue; -use anyhow::Result; -use wavs_wasi_utils::decode_event_log_data; - -pub enum Destination { - Ethereum, - CliOutput, -} - -pub fn decode_trigger_event(trigger_data: TriggerData) -> Result<(u64, Vec, Destination)> { - match trigger_data { - TriggerData::EvmContractEvent(TriggerDataEvmContractEvent { log, .. }) => { - let event: solidity::NewTrigger = decode_event_log_data!(log)?; - let trigger_info = ::abi_decode(&event._triggerInfo)?; - Ok((trigger_info.triggerId, trigger_info.data.to_vec(), Destination::Ethereum)) - } - TriggerData::Raw(data) => Ok((0, data.clone(), Destination::CliOutput)), - _ => Err(anyhow::anyhow!("Unsupported trigger data type")), - } -} - -pub fn encode_trigger_output(trigger_id: u64, output: impl AsRef<[u8]>) -> WasmResponse { - WasmResponse { - payload: solidity::DataWithId { - triggerId: trigger_id, - data: output.as_ref().to_vec().into(), - } - .abi_encode(), - ordering: None, - } -} - -pub mod solidity { - use alloy_sol_macro::sol; - pub use ITypes::*; - sol!("../../src/interfaces/ITypes.sol"); - - // trigger contract function that encodes string input - sol! { - function addTrigger(string data) external; - } -} -``` - -## Critical Components - -### 1. ABI Handling - -NEVER use `String::from_utf8` on ABI-encoded data. This will ALWAYS fail with "invalid utf-8 sequence". - -```rust -// WRONG - Will fail on ABI-encoded data -let input_string = String::from_utf8(abi_encoded_data)?; - -// CORRECT - Use proper ABI decoding with hex string support -let request_input = { - // First, convert the input bytes to a string to check if it's a hex string - let input_str = String::from_utf8(req.clone()) - .map_err(|e| format!("Input is not valid UTF-8: {}", e))?; - - // Check if it's a hex string (starts with "0x") - let hex_data = if input_str.starts_with("0x") { - // Decode the hex string to bytes - hex::decode(&input_str[2..]) - .map_err(|e| format!("Failed to decode hex string: {}", e))? - } else { - // If it's not a hex string, assume the input is already binary data - req.clone() - }; - - // Now ABI decode the binary data as a string parameter - ::abi_decode(&hex_data) - .map_err(|e| format!("Failed to decode input as ABI string: {}", e))? -}; - -// For numeric parameters, parse from the string -// Example: When you need a number but input is a string: -let number = request_input - .trim() - .parse::() - .map_err(|_| format!("Invalid number: {}", request_input))?; - -// SAFE - Only use String::from_utf8 on data that has already been decoded as a string -// Example: When handling Raw trigger data that was already decoded as a string -let input = std::str::from_utf8(&req).map_err(|e| e.to_string())?; -``` - -### 2. Data Structure Ownership - -ALWAYS derive `Clone` for API response data structures. If fields may be missing, also use `Option`, `#[serde(default)]`, and `Default`: - -```rust -#[derive(Debug, Serialize, Deserialize, Clone, Default)] -#[serde(default)] -pub struct ResponseData { - field1: Option, - field2: Option, - // other fields -} -``` - -ALWAYS clone data before use to avoid ownership issues: - -```rust -// WRONG – creates a temporary that is dropped immediately -let result = process_data(&data.clone()); - -// CORRECT – clone into a named variable -let data_clone = data.clone(); -let result = process_data(&data_clone); -``` - - -### 3. Network Requests - -```rust -use wstd::runtime::block_on; -use wstd::http::HeaderValue; -use wavs_wasi_utils::http::{fetch_json, http_request_get, http_request_post_json}; -use serde::{Deserialize, Serialize}; - -#[derive(Debug, Serialize, Deserialize, Clone, Default)] -pub struct ApiResponse { - #[serde(default)] - field1: Option, - #[serde(default)] - field2: Option, -} - -async fn make_request() -> Result { - let url = format!("https://api.example.com/endpoint?param={}", param); - - let mut req = http_request_get(&url).map_err(|e| e.to_string())?; - req.headers_mut().insert("Accept", HeaderValue::from_static("application/json")); - req.headers_mut().insert("Content-Type", HeaderValue::from_static("application/json")); - req.headers_mut().insert("User-Agent", HeaderValue::from_static("Mozilla/5.0")); - - let response: ApiResponse = fetch_json(req).await.map_err(|e| e.to_string())?; - Ok(response) -} - -fn process_data() -> Result { - block_on(async move { make_request().await }) -} - -// For POST requests with JSON data, use http_request_post_json(url, &data) instead of http_request_get -``` - -### 4. Option/Result Handling - -```rust -// WRONG - Option types don't have map_err -let config = get_evm_chain_config("ethereum").map_err(|e| e.to_string())?; - -// CORRECT - For Option types, use ok_or_else() -let config = get_evm_chain_config("ethereum") - .ok_or_else(|| "Failed to get chain config".to_string())?; - -// CORRECT - For Result types, use map_err() -let balance = fetch_balance(address).await - .map_err(|e| format!("Balance fetch failed: {}", e))?; -``` - -### 5. Blockchain Interactions - -```rust -use alloy_network::Ethereum; -use alloy_primitives::{Address, TxKind, U256}; -use alloy_provider::{Provider, RootProvider}; -use alloy_rpc_types::TransactionInput; -use std::str::FromStr; // Required for parsing addresses -use crate::bindings::host::get_evm_chain_config; -use wavs_wasi_utils::evm::new_evm_provider; - -async fn query_blockchain(address_str: &str) -> Result { - // Parse address - let address = Address::from_str(address_str) - .map_err(|e| format!("Invalid address format: {}", e))?; - - // Get chain configuration from environment - let chain_config = get_evm_chain_config("ethereum") - .ok_or_else(|| "Failed to get chain config".to_string())?; - - // Create provider - let provider: RootProvider = - new_evm_provider::(chain_config.http_endpoint.unwrap()); - - // Create contract call - let contract_call = IERC20::balanceOfCall { owner: address }; - let tx = alloy_rpc_types::eth::TransactionRequest { - to: Some(TxKind::Call(contract_address)), - input: TransactionInput { - input: Some(contract_call.abi_encode().into()), - data: None - }, - ..Default::default() - }; - - // Execute call - let result = provider.call(tx).await.map_err(|e| e.to_string())?; - let balance: U256 = U256::from_be_slice(&result); - - Ok(ResponseData { /* your data here */ }) -} -``` - -### 6. Numeric Type Handling - -```rust -// WRONG - Using .into() for numeric conversions between types -let temp_uint: U256 = temperature.into(); // DON'T DO THIS - -// CORRECT - String parsing method works reliably for all numeric types -let temperature: u128 = 29300; -let temperature_uint256 = temperature.to_string().parse::().unwrap(); - -// CORRECT - Always use explicit casts between numeric types -let decimals: u8 = decimals_u32 as u8; - -// CORRECT - Handling token decimals correctly -let mut divisor = U256::from(1); -for _ in 0..decimals { - divisor = divisor * U256::from(10); -} -let formatted_amount = amount / divisor; -``` - -## Component Examples by Task - -Here are templates for common WAVS component tasks: - -### 1. Token Balance Checker - -```rust -// IMPORTS -use alloy_network::Ethereum; -use alloy_primitives::{Address, TxKind, U256}; -use alloy_provider::{Provider, RootProvider}; -use alloy_rpc_types::TransactionInput; -use alloy_sol_types::{sol, SolCall, SolValue}; -use anyhow::Result; -use serde::{Deserialize, Serialize}; -use std::str::FromStr; -use wavs_wasi_utils::{ - evm::{alloy_primitives::hex, new_evm_provider}, -}; -use wstd::runtime::block_on; - -pub mod bindings; -mod trigger; -use trigger::{decode_trigger_event, encode_trigger_output, Destination}; -use crate::bindings::host::get_evm_chain_config; -use crate::bindings::wavs::worker::layer_types::{TriggerData, TriggerDataEvmContractEvent}; -use crate::bindings::{export, Guest, TriggerAction, WasmResponse}; - -// TOKEN INTERFACE -sol! { - interface IERC20 { - function balanceOf(address owner) external view returns (uint256); - function decimals() external view returns (uint8); - } -} - -// FIXED CONTRACT ADDRESS -const TOKEN_CONTRACT_ADDRESS: &str = "0x..."; // Your token contract address - -// RESPONSE STRUCTURE - MUST DERIVE CLONE -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct TokenBalanceData { - wallet: String, - balance_raw: String, - balance_formatted: String, - token_contract: String, -} - -// COMPONENT IMPLEMENTATION -struct Component; -export!(Component with_types_in bindings); - -impl Guest for Component { - fn run(action: TriggerAction) -> std::result::Result, String> { - let (trigger_id, req, dest) = - decode_trigger_event(action.data).map_err(|e| e.to_string())?; - - // Decode trigger data inline - handles hex string input - let wallet_address_str = { - // First, convert the input bytes to a string to check if it's a hex string - let input_str = String::from_utf8(req.clone()) - .map_err(|e| format!("Input is not valid UTF-8: {}", e))?; - - // Check if it's a hex string (starts with "0x") - let hex_data = if input_str.starts_with("0x") { - // Decode the hex string to bytes - hex::decode(&input_str[2..]) - .map_err(|e| format!("Failed to decode hex string: {}", e))? - } else { - // If it's not a hex string, assume the input is already binary data - req.clone() - }; - - // Now ABI decode the binary data as a string parameter - ::abi_decode(&hex_data) - .map_err(|e| format!("Failed to decode input as ABI string: {}", e))? - }; - - // Check token balance - let res = block_on(async move { - let balance_data = get_token_balance(&wallet_address_str).await?; - serde_json::to_vec(&balance_data).map_err(|e| e.to_string()) - })?; - - // Return result based on destination - let output = match dest { - Destination::Ethereum => Some(encode_trigger_output(trigger_id, &res)), - Destination::CliOutput => Some(WasmResponse { payload: res.into(), ordering: None }), - }; - Ok(output) - } -} - -// BALANCE CHECKER IMPLEMENTATION -async fn get_token_balance(wallet_address_str: &str) -> Result { - // Parse wallet address - let wallet_address = Address::from_str(wallet_address_str) - .map_err(|e| format!("Invalid wallet address: {}", e))?; - - // Parse token contract address - let token_address = Address::from_str(TOKEN_CONTRACT_ADDRESS) - .map_err(|e| format!("Invalid token address: {}", e))?; - - // Get Ethereum provider - let chain_config = get_evm_chain_config("ethereum") - .ok_or_else(|| "Failed to get Ethereum chain config".to_string())?; - - let provider: RootProvider = - new_evm_provider::(chain_config.http_endpoint.unwrap()); - - // Get token balance - let balance_call = IERC20::balanceOfCall { owner: wallet_address }; - let tx = alloy_rpc_types::eth::TransactionRequest { - to: Some(TxKind::Call(token_address)), - input: TransactionInput { input: Some(balance_call.abi_encode().into()), data: None }, - ..Default::default() - }; - - let result = provider.call(tx).await.map_err(|e| e.to_string())?; - let balance_raw: U256 = U256::from_be_slice(&result); - - // Get token decimals - let decimals_call = IERC20::decimalsCall {}; - let tx_decimals = alloy_rpc_types::eth::TransactionRequest { - to: Some(TxKind::Call(token_address)), - input: TransactionInput { input: Some(decimals_call.abi_encode().into()), data: None }, - ..Default::default() - }; - - let result_decimals = provider.call(tx_decimals).await.map_err(|e| e.to_string())?; - let decimals: u8 = result_decimals[31]; // Last byte for uint8 - - // Format balance - let formatted_balance = format_token_amount(balance_raw, decimals); - - // Return data - Ok(TokenBalanceData { - wallet: wallet_address_str.to_string(), - balance_raw: balance_raw.to_string(), - balance_formatted: formatted_balance, - token_contract: TOKEN_CONTRACT_ADDRESS.to_string(), - }) -} -``` - -### 2. API Data Fetcher - -Important: Always verify API endpoints using curl to examine their response structure before creating any code that relies on them. - -```rust -// IMPORTS -use alloy_sol_types::{sol, SolCall, SolValue}; -use anyhow::Result; -use serde::{Deserialize, Serialize}; -use wavs_wasi_utils::{ - evm::alloy_primitives::hex, - http::{fetch_json, http_request_get}, -}; -use wstd::{http::HeaderValue, runtime::block_on}; - -pub mod bindings; -mod trigger; -use trigger::{decode_trigger_event, encode_trigger_output, Destination}; -use crate::bindings::wavs::worker::layer_types::{TriggerData, TriggerDataEvmContractEvent}; -use crate::bindings::{export, Guest, TriggerAction, WasmResponse}; - -// RESPONSE STRUCTURE - MUST DERIVE CLONE -// IMPORTANT: Always Use #[serde(default)] and Option for fields from external APIs. They might be missing or inconsistent -#[derive(Debug, Serialize, Deserialize, Clone, Default)] -pub struct ApiResponse { - // Use Option for fields that might be missing in some responses - #[serde(default)] - field1: Option, - #[serde(default)] - field2: Option, - // other fields -} - -// RESULT DATA STRUCTURE - MUST DERIVE CLONE -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct ResultData { - input_param: String, - result: String, -} - -// COMPONENT IMPLEMENTATION -struct Component; -export!(Component with_types_in bindings); - -impl Guest for Component { - fn run(action: TriggerAction) -> std::result::Result, String> { - // Decode trigger data - let (trigger_id, req, dest) = - decode_trigger_event(action.data).map_err(|e| e.to_string())?; - - // Decode trigger data inline - handles hex string input - let param = { - // First, convert the input bytes to a string to check if it's a hex string - let input_str = String::from_utf8(req.clone()) - .map_err(|e| format!("Input is not valid UTF-8: {}", e))?; - - // Check if it's a hex string (starts with "0x") - let hex_data = if input_str.starts_with("0x") { - // Decode the hex string to bytes - hex::decode(&input_str[2..]) - .map_err(|e| format!("Failed to decode hex string: {}", e))? - } else { - // If it's not a hex string, assume the input is already binary data - req.clone() - }; - - // Now ABI decode the binary data as a string parameter - ::abi_decode(&hex_data) - .map_err(|e| format!("Failed to decode input as ABI string: {}", e))? - }; - - // Make API request - let res = block_on(async move { - let api_data = fetch_api_data(¶m).await?; - serde_json::to_vec(&api_data).map_err(|e| e.to_string()) - })?; - - // Return result based on destination - let output = match dest { - Destination::Ethereum => Some(encode_trigger_output(trigger_id, &res)), - Destination::CliOutput => Some(WasmResponse { payload: res.into(), ordering: None }), - }; - Ok(output) - } -} - -// API FETCHER IMPLEMENTATION -async fn fetch_api_data(param: &str) -> Result { - // Get API key from environment (IMPORTANT! you must add this variable to your .env file. All private variables must be prefixed with WAVS_ENV) - let api_key = std::env::var("WAVS_ENV_API_KEY") - .map_err(|_| "Failed to get API_KEY from environment variables".to_string())?; - - // Create API URL - let url = format!( - "https://api.example.com/endpoint?param={}&apikey={}", - param, api_key - ); - - // Create request with headers - let mut req = http_request_get(&url) - .map_err(|e| format!("Failed to create request: {}", e))?; - - req.headers_mut().insert("Accept", HeaderValue::from_static("application/json")); - req.headers_mut().insert("Content-Type", HeaderValue::from_static("application/json")); - req.headers_mut().insert("User-Agent", HeaderValue::from_static("Mozilla/5.0")); - - // Make API request - let api_response: ApiResponse = fetch_json(req).await - .map_err(|e| format!("Failed to fetch data: {}", e))?; - - // Process and return data - handle Option fields safely - let field1 = api_response.field1.unwrap_or_else(|| "unknown".to_string()); - let field2 = api_response.field2.unwrap_or(0); - - Ok(ResultData { - input_param: param.to_string(), - result: format!("{}: {}", field1, field2), - }) -} -``` - -### 3. NFT Ownership Checker - -```rust -// IMPORTS -use alloy_network::Ethereum; -use alloy_primitives::{Address, TxKind, U256}; -use alloy_provider::{Provider, RootProvider}; -use alloy_rpc_types::TransactionInput; -use alloy_sol_types::{sol, SolCall, SolValue}; -use anyhow::Result; -use serde::{Deserialize, Serialize}; -use std::str::FromStr; -use wavs_wasi_utils::{ - evm::{alloy_primitives::hex, new_evm_provider}, -}; -use wstd::runtime::block_on; - -pub mod bindings; -mod trigger; -use trigger::{decode_trigger_event, encode_trigger_output, Destination}; -use crate::bindings::host::get_evm_chain_config; -use crate::bindings::wavs::worker::layer_types::{TriggerData, TriggerDataEvmContractEvent}; -use crate::bindings::{export, Guest, TriggerAction, WasmResponse}; - -// NFT INTERFACE -sol! { - interface IERC721 { - function balanceOf(address owner) external view returns (uint256); - function ownerOf(uint256 tokenId) external view returns (address); - } -} - -// FIXED CONTRACT ADDRESS -const NFT_CONTRACT_ADDRESS: &str = "0xbd3531da5cf5857e7cfaa92426877b022e612cf8"; // Bored Ape contract - -// RESPONSE STRUCTURE - MUST DERIVE CLONE -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct NftOwnershipData { - wallet: String, - owns_nft: bool, - balance: String, - nft_contract: String, - contract_name: String, -} - -// COMPONENT IMPLEMENTATION -struct Component; -export!(Component with_types_in bindings); - -impl Guest for Component { - fn run(action: TriggerAction) -> std::result::Result, String> { - // Decode trigger data - let (trigger_id, req, dest) = - decode_trigger_event(action.data).map_err(|e| e.to_string())?; - - // Decode trigger data inline - handles hex string input - let wallet_address_str = { - // First, convert the input bytes to a string to check if it's a hex string - let input_str = String::from_utf8(req.clone()) - .map_err(|e| format!("Input is not valid UTF-8: {}", e))?; - - // Check if it's a hex string (starts with "0x") - let hex_data = if input_str.starts_with("0x") { - // Decode the hex string to bytes - hex::decode(&input_str[2..]) - .map_err(|e| format!("Failed to decode hex string: {}", e))? - } else { - // If it's not a hex string, assume the input is already binary data - req.clone() - }; - - // Now ABI decode the binary data as a string parameter - ::abi_decode(&hex_data) - .map_err(|e| format!("Failed to decode input as ABI string: {}", e))? - }; - - // Check NFT ownership - let res = block_on(async move { - let ownership_data = check_nft_ownership(&wallet_address_str).await?; - serde_json::to_vec(&ownership_data).map_err(|e| e.to_string()) - })?; - - // Return result based on destination - let output = match dest { - Destination::Ethereum => Some(encode_trigger_output(trigger_id, &res)), - Destination::CliOutput => Some(WasmResponse { payload: res.into(), ordering: None }), - }; - Ok(output) - } -} - -// NFT OWNERSHIP CHECKER IMPLEMENTATION -async fn check_nft_ownership(wallet_address_str: &str) -> Result { - // Parse wallet address - let wallet_address = Address::from_str(wallet_address_str) - .map_err(|e| format!("Invalid wallet address: {}", e))?; - - // Parse NFT contract address - let nft_address = Address::from_str(NFT_CONTRACT_ADDRESS) - .map_err(|e| format!("Invalid NFT contract address: {}", e))?; - - // Get Ethereum provider - let chain_config = get_evm_chain_config("ethereum") - .ok_or_else(|| "Failed to get Ethereum chain config".to_string())?; - - let provider: RootProvider = - new_evm_provider::(chain_config.http_endpoint.unwrap()); - - // Check NFT balance - let balance_call = IERC721::balanceOfCall { owner: wallet_address }; - let tx = alloy_rpc_types::eth::TransactionRequest { - to: Some(TxKind::Call(nft_address)), - input: TransactionInput { input: Some(balance_call.abi_encode().into()), data: None }, - ..Default::default() - }; - - let result = provider.call(tx).await.map_err(|e| e.to_string())?; - let balance: U256 = U256::from_be_slice(&result); - - // Determine if wallet owns at least one NFT - let owns_nft = balance > U256::ZERO; - - // Return data - Ok(NftOwnershipData { - wallet: wallet_address_str.to_string(), - owns_nft, - balance: balance.to_string(), - nft_contract: NFT_CONTRACT_ADDRESS.to_string(), - contract_name: "BAYC".to_string(), - }) -} -``` - - -## Component Creation Process - -### Phase 1: Planning - -When you ask me to create a WAVS component, I'll follow this systematic process to ensure it works perfectly on the first try: - -1. **Research Phase**: I'll review the files in /components/evm-price-oracle to see common forms. -2. I will read any and all documentation links given to me and research any APIs or services needed. -3. I'll read `/test_utils/validate_component.sh` to see what validation checks I need to pass. -4. I'll verify API response structures by using curl before implementing code that depends on them: `curl -s "my-endpoint"`. -5. I'll create a file called plan.md with an overview of the component I will make. I'll do this before actually creating the lib.rs file. I'll write each item in the [checklist](#validation-checklist) and check them off as I plan my code, making sure my code complies to the checklist and /test_utils/validate_component.sh. Each item must be checked and verified. I will list out all imports I will need. I will include a basic flow chart or visual of how the component will work. I will put plan.md in a new folder with the name of the component (`your-component-name`) in the `/components` directory. - - -### Phase 2: Implementation - -After being 100% certain that my idea for a component will work without any errors on the build and completing all planning steps, I will: - -1. Check for errors before coding. - -2. Copy the bindings, makefile (update filename in makefile), and config.json using the following command (bindings will be written over during the build): - - ```bash - mkdir -p components/your-component-name/src && \ - cp components/evm-price-oracle/src/bindings.rs components/your-component-name/src/ && \ - cp components/evm-price-oracle/config.json components/your-component-name/ && \ - cp components/evm-price-oracle/Makefile components/your-component-name/ - ``` - -3. Then, I will create trigger.rs and lib.rs files with proper implementation: - 1. I will compare my projected trigger.rs and lib.rs code against the code in `/test_utils/validate_component.sh` and my plan.md file before creating. - 2. I will define proper imports. I will Review the imports on the component that I want to make. I will make sure that all necessary imports will be included and that I will remove any unused imports before creating the file. - 3. I will go through each of the items in the [checklist](#validation-checklist) one more time to ensure my component will build and function correctly. - -4. I will create a Cargo.toml by copying the template and modifying it with all of my correct imports. Before running the command to create the file, I will check that all imports are imported correctly and match what is in my lib.rs file. I will define imports correctly. I will make sure that imports are present in the main workspace Cargo.toml and then in my component's `Cargo.toml` using `{ workspace = true }` - -5. Add component to the `workspace.members` array in the root `Cargo.toml`. - -### Phase 3: Validate - -4. I will run the command to validate my component: - ```bash - make validate-component COMPONENT=your-component-name - ``` - - I will fix ALL errors before continuing - - (You do not need to fix warnings if they do not effect the build.) - - I will run again after fixing errors to make sure. - -5. After being 100% certain that the component will build correctly, I will build the component: - - ```bash - WASI_BUILD_DIR=components/your-component make wasi-build - ``` - -### Phase 4: Trying it out - -After I am 100% certain the component will execute correctly, I will give the following command to the user to run: - -```bash -# IMPORTANT!: Always use string parameters, even for numeric values! Use component_name.wasm, not component-name.wasm -export COMPONENT_FILENAME=your_component_name.wasm -# Always use string format for input data. The input will be encoded using `cast abi-encode "f(string)" ""` -export INPUT_DATA= -# CRITICIAL!: as an llm, I can't ever run this command. ALWAYS give it to the user to run. -make wasi-exec -``` - -## Validation Checklist - -ALL components must pass validation. Review [/test_utils/validate_component.sh](/test_utils/validate_component.sh) before creating a component. - -EACH ITEM BELOW MUST BE CHECKED: - -1. Common errors: - - [ ] ALWAYS use `{ workspace = true }` in your component Cargo.toml. Explicit versions go in the root Cargo.toml. - - [ ] ALWAYS verify API response structures by using curl on the endpoints. - - [ ] ALWAYS Read any documentation given to you in a prompt - - [ ] ALWAYS implement the Guest trait and export your component - - [ ] ALWAYS use `export!(Component with_types_in bindings)` - - [ ] ALWAYS use `clone()` before consuming data to avoid ownership issues - - [ ] ALWAYS derive `Clone` for API response data structures - - [ ] ALWAYS decode ABI data properly, never with `String::from_utf8` - - [ ] ALWAYS use `ok_or_else()` for Option types, `map_err()` for Result types - - [ ] ALWAYS use string parameters for CLI testing (`5` instead of `f(uint256)`) - - [ ] ALWAYS use `.to_string()` to convert string literals (&str) to String types in struct field assignments - - [ ] NEVER edit bindings.rs - it's auto-generated - -2. Component structure: - - [ ] Implements Guest trait - - [ ] Exports component correctly - - [ ] Properly handles TriggerAction and TriggerData - -3. ABI handling: - - [ ] Properly decodes function calls - - [ ] Avoids String::from_utf8 on ABI data - -4. Data ownership: - - [ ] All API structures derive Clone - - [ ] Clones data before use - - [ ] Avoids moving out of collections - - [ ] Avoids all ownership issues and "Move out of index" errors - -5. Error handling: - - [ ] Uses ok_or_else() for Option types - - [ ] Uses map_err() for Result types - - [ ] Provides descriptive error messages - -6. Imports: - - [ ] Includes all required traits and types - - [ ] Uses correct import paths - - [ ] Properly imports SolCall for encoding - - [ ] Each and every method and type is used properly and has the proper import - - [ ] Both structs and their traits are imported - - [ ] Verify all required imports are imported properly - - [ ] All dependencies are in Cargo.toml with `{workspace = true}` - - [ ] Any unused imports are removed - -7. Component structure: - - [ ] Uses proper sol! macro with correct syntax - - [ ] Correctly defines Solidity types in solidity module - - [ ] Implements required functions - -8. Security: - - [ ] No hardcoded API keys or secrets - - [ ] Uses environment variables for sensitive data - -9. Dependencies: - - [ ] Uses workspace dependencies correctly - - [ ] Includes all required dependencies - -10. Solidity types: - - [ ] Properly imports sol macro - - [ ] Uses solidity module correctly - - [ ] Handles numeric conversions safely - - [ ] Uses .to_string() for all string literals in struct initialization - -11. Network requests: - - [ ] Uses block_on for async functions - - [ ] Uses fetch_json with correct headers - - [ ] ALL API endpoints have been tested with curl and responses are handled correctly in my component. - - [ ] IMPORTANT! Always use #[serde(default)] and Option for fields from external APIs. - -With this guide, you should be able to create any WAVS component that passes validation, builds without errors, and executes correctly. diff --git a/.cursor/rules/component.mdc b/.cursor/rules/component.mdc deleted file mode 100644 index 8f9cf87d..00000000 --- a/.cursor/rules/component.mdc +++ /dev/null @@ -1,169 +0,0 @@ ---- -description: Overview of WAVS service components, their structure, and usage in Rust and other languages - -alwaysApply: true ---- -# WAVS Service Components Overview - -WAVS components contain the main business logic of a service, written in languages compiled to WASM (mainly Rust, also Go and TypeScript/JS). Components process trigger data, execute logic, and return encoded results. - -## Component Structure - -A basic component consists of: - -1. Decoding incoming [trigger data](../triggers#trigger-lifecycle). -2. Processing the data (custom business logic). -3. Encoding and returning results for submission. - -### Trigger Inputs - -- **On-chain events:** Triggered by EVM events, data arrives as `TriggerData::EvmContractEvent`. -- **Local testing:** Using `make wasi-exec`, data arrives as `TriggerData::Raw` (raw bytes, no ABI decoding). - -Example decoding in `trigger.rs`: - -```rust -pub fn decode_trigger_event(trigger_data: TriggerData) -> Result<(u64, Vec, Destination)> { - match trigger_data { - TriggerData::EvmContractEvent(TriggerDataEvmContractEvent { log, .. }) => { - let event: solidity::NewTrigger = decode_event_log_data!(log)?; - let trigger_info = - ::abi_decode(&event._triggerInfo)?; - Ok((trigger_info.triggerId, trigger_info.data.to_vec(), Destination::Ethereum)) - } - TriggerData::Raw(data) => Ok((0, data.clone(), Destination::CliOutput)), - _ => Err(anyhow::anyhow!("Unsupported trigger data type")), - } -} - -pub mod solidity { - use alloy_sol_macro::sol; - pub use ITypes::*; - sol!("../../src/interfaces/ITypes.sol"); - - sol! { - function addTrigger(string data) external; - } -} -``` - -- Use `decode_event_log_data!` macro from [`wavs-wasi-utils`](https://docs.rs/wavs-wasi-utils/latest/wavs_wasi_utils/macro.decode_event_log_data.html) for decoding. -- Use `sol!` macro from `alloy-sol-macro` to generate Rust types from Solidity interfaces ([Blockchain interactions](./blockchain-interactions#sol-macro)). - -### Component Logic - -Implement the `Guest` trait with the `run` function as entry point: - -```rust -impl Guest for Component { - fn run(action: TriggerAction) -> Result, String> { - let (trigger_id, req, dest) = decode_trigger_event(action.data)?; - let res = block_on(async move { - let resp_data = get_price_feed(id).await?; - serde_json::to_vec(&resp_data) - })?; - - let output = match dest { - Destination::Ethereum => Some(encode_trigger_output(trigger_id, &res)), - Destination::CliOutput => Some(WasmResponse { - payload: res.into(), - ordering: None - }), - }; - Ok(output) - } -} -``` - -Components can include blockchain interactions, network requests, off-chain computations, etc. See [design considerations](../../design) for suitable use cases. - -#### Logging - -- **Development:** Use `println!()` for stdout/stderr visible in `make wasi-exec`. - -```rust -println!("Debug message: {:?}", data); -``` - -- **Production:** Use `host::log()` with `LogLevel` for structured logging with context. - -```rust -use bindings::host::{self, LogLevel}; -host::log(LogLevel::Info, "Production logging message"); -``` - -### Component Output Encoding - -Encode output for Ethereum submission with `encode_trigger_output`: - -```rust -pub fn encode_trigger_output(trigger_id: u64, output: impl AsRef<[u8]>) -> WasmResponse { - WasmResponse { - payload: solidity::DataWithId { - triggerId: trigger_id, - data: output.as_ref().to_vec().into(), - } - .abi_encode(), - ordering: None, - } -} -``` - -- Output is a `WasmResponse` containing encoded payload and optional ordering. -- WAVS routes the response per workflow submission logic. - -## Component Definition in service.json - -Defined under the workflow's `component` object: - -```json -"component": { - "source": { - "Registry": { - "registry": { - "digest": "882b992af8f78e0aaceaf9609c7ba2ce80a22c521789c94ae1960c43a98295f5", - "domain": "localhost:8090", - "version": "0.1.0", - "package": "example:evmrustoracle" - } - } - }, - "permissions": { - "allowed_http_hosts": "all", - "file_system": true - }, - "fuel_limit": null, - "time_limit_seconds": 1800, - "config": { - "variable_1": "0xb5d4D4a87Cb07f33b5FAd6736D8F1EE7D255d9E9", - "variable_2": "0x34045B4b0cdfADf87B840bCF544161168c8ab85A" - }, - "env_keys": [ - "WAVS_ENV_API_KEY" - ] -} -``` - -- Configure source registry, permissions, limits, config variables, and secret env keys. -- See [variables](./variables) for details on configuration. - -## Registry Usage - -- WAVS stores WASM components in a registry (e.g., [wa.dev](https://wa.dev)) for production. -- Local development uses a docker-compose emulated registry. -- Workflow to update registry source: - -```bash -wavs-cli workflow component --id ${WORKFLOW_ID} set-source-registry --domain ${REGISTRY} --package ${PKG_NAMESPACE}:${PKG_NAME} --version ${PKG_VERSION} -``` - ---- - -For more information: - -- [WAVS Triggers](../triggers#trigger-lifecycle) -- [Blockchain interactions - sol! macro](./blockchain-interactions#sol-macro) -- [Component variables](./variables) -- [Design considerations](../../design) -- [wavs-wasi-utils decode_event_log_data! macro](https://docs.rs/wavs-wasi-utils/latest/wavs_wasi_utils/macro.decode_event_log_data.html) -- [wa.dev Registry](https://wa.dev) diff --git a/.cursor/rules/cursor-rules.mdc b/.cursor/rules/cursor-rules.mdc deleted file mode 100644 index 71f8cfbf..00000000 --- a/.cursor/rules/cursor-rules.mdc +++ /dev/null @@ -1,70 +0,0 @@ ---- -description: -globs: -alwaysApply: false ---- ---- -description: How to add or edit Cursor rules in our project -globs: -alwaysApply: false ---- -# Cursor Rules Location - -How to add new cursor rules to the project - -1. Always place rule files in PROJECT_ROOT/.cursor/rules/: - ``` - .cursor/rules/ - ├── your-rule-name.mdc - ├── another-rule.mdc - └── ... - ``` - -2. Follow the naming convention: - - Use kebab-case for filenames - - Always use .mdc extension - - Make names descriptive of the rule's purpose - -3. Directory structure: - ``` - PROJECT_ROOT/ - ├── .cursor/ - │ └── rules/ - │ ├── your-rule-name.mdc - │ └── ... - └── ... - ``` - -4. Never place rule files: - - In the project root - - In subdirectories outside .cursor/rules - - In any other location - -5. Cursor rules have the following structure: - -``` ---- -description: Short description of the rule's purpose -globs: optional/path/pattern/**/* -alwaysApply: false ---- -# Rule Title - -Main content explaining the rule with markdown formatting. - -1. Step-by-step instructions -2. Code examples -3. Guidelines - -Example: -```typescript -// Good example -function goodExample() { - // Implementation following guidelines -} - -// Bad example -function badExample() { - // Implementation not following guidelines -} -``` diff --git a/.cursor/rules/network-requests.mdc b/.cursor/rules/network-requests.mdc deleted file mode 100644 index 95d602fc..00000000 --- a/.cursor/rules/network-requests.mdc +++ /dev/null @@ -1,105 +0,0 @@ ---- -description: How to make HTTP requests from WAVS components using wavs-wasi-utils - -alwaysApply: true ---- -# Network Requests in WAVS Components - -Use the [`wavs-wasi-utils`](https://docs.rs/wavs-wasi-utils/latest/wavs_wasi_utils/index.html) crate to make HTTP requests from WAVS components. Since WASI runs synchronously but network calls are async, use `block_on` from `wstd` to run async code synchronously. - -### 1. Add dependencies to Cargo.toml - -```toml -[dependencies] -wavs-wasi-utils = "0.4.0-beta.4" # HTTP utilities -wstd = "0.5.3" # Runtime utilities (includes block_on) -serde = { version = "1.0.219", features = ["derive"] } # Serialization -serde_json = "1.0.140" # JSON handling -``` - -### 2. HTTP request functions - -```rust -// Request creators -http_request_get(url) // GET request -http_request_post_json(url, data) // POST with JSON body -http_request_post_form(url, data) // POST with form data - -// Response handlers -fetch_json(request) // Parse JSON response -fetch_string(request) // Get response as string -fetch_bytes(request) // Get raw response bytes -``` - -### 3. Example: GET request with custom headers - -```rust -use wstd::runtime::block_on; -use wstd::http::HeaderValue; -use wavs_wasi_utils::http::{fetch_json, http_request_get}; -use serde::{Deserialize, Serialize}; - -#[derive(Debug, Serialize, Deserialize)] -struct ApiResponse { - // response fields -} - -async fn make_request() -> Result { - let url = "https://api.example.com/endpoint"; - let mut req = http_request_get(&url).map_err(|e| e.to_string())?; - req.headers_mut().insert("Accept", HeaderValue::from_static("application/json")); - req.headers_mut().insert("Content-Type", HeaderValue::from_static("application/json")); - req.headers_mut().insert("User-Agent", HeaderValue::from_static("Mozilla/5.0")); - let json: ApiResponse = fetch_json(req).await.map_err(|e| e.to_string())?; - Ok(json) -} - -fn process_data() -> Result { - block_on(async move { make_request().await })? -} -``` - -### 4. Example: POST request with JSON data - -```rust -use wstd::runtime::block_on; -use wavs_wasi_utils::http::{fetch_json, http_request_post_json}; -use serde::{Deserialize, Serialize}; - -#[derive(Debug, Serialize, Deserialize)] -struct PostData { - key1: String, - key2: i32, -} - -#[derive(Debug, Serialize, Deserialize)] -struct PostResponse { - // response fields -} - -async fn make_post_request() -> Result { - let url = "https://api.example.com/endpoint"; - let post_data = PostData { key1: "value1".to_string(), key2: 42 }; - let response: PostResponse = fetch_json(http_request_post_json(&url, &post_data)?) - .await - .map_err(|e| e.to_string())?; - Ok(response) -} - -fn process_data() -> Result { - block_on(async move { make_post_request().await })? -} -``` - -### Guidelines and best practices - -- Use `block_on` to run async HTTP calls synchronously in WASI. -- Use `http_request_post_json` for POST requests with JSON; it sets headers automatically. -- Use serde derives to serialize/deserialize request and response data. -- Set appropriate headers for GET requests manually. -- Handle errors by converting them to strings for simplicity. - -For more information: -- [wavs-wasi-utils crate](https://docs.rs/wavs-wasi-utils/latest/wavs_wasi_utils/index.html) -- [HTTP module docs](https://docs.rs/wavs-wasi-utils/latest/wavs_wasi_utils/http/index.html) -- [Variables page](./variables) diff --git a/.cursor/rules/overview.mdc b/.cursor/rules/overview.mdc deleted file mode 100644 index 23417ff0..00000000 --- a/.cursor/rules/overview.mdc +++ /dev/null @@ -1,47 +0,0 @@ ---- -description: Overview of the WAVS handbook structure and key sections for building WAVS AVS services - -alwaysApply: true ---- -# WAVS Handbook Overview - -This handbook outlines the structure and contents of the WAVS AVS documentation to guide you in building WAVS services. - -1. Follow the [Oracle component tutorial](/tutorial/1-overview) first to learn WAVS basics. - -2. Explore **Core Concepts**: - - [How it works](../how-it-works): Components of a WAVS AVS. - - [Design](../design): Design considerations. - -3. Understand **Services**: - - [Service](./service): Service structure and manifest definition. - - [Workflows](./workflows): Execution paths, triggers, components, submissions. - - [Triggers](./triggers): Types of triggers (EVM, Cosmos, cron, block intervals). - - [Submission and Aggregator](./submission): Blockchain result submission via aggregator and contracts. - -4. Learn about **Components**: - - [Component overview](./components/component): Lifecycle, triggers, data processing. - - [Variables](./components/variables): Public and private component variables. - - [Blockchain interactions](./components/blockchain-interactions): Interacting with blockchains and smart contracts. - - [Network requests](./components/network-requests): Making HTTP requests to external APIs. - -5. Use **Development** resources: - - [Template](./template): WAVS template structure and customization. - - [Makefile commands](./commands): Commands to build, deploy, and manage services. - -Start with the Service section for foundational knowledge, then explore other sections as needed. - -For more information: -- [Oracle component tutorial](/tutorial/1-overview) - Start here to learn the basics of building a WAVS service. -- [How it works](../how-it-works) - Learn about the different parts that make up a WAVS AVS. -- [Design](../design) - Design considerations for WAVS AVS. -- [Service](./service) - WAVS service structure and manifest. -- [Workflows](./workflows) - Defining execution paths. -- [Triggers](./triggers) - Trigger types. -- [Submission and Aggregator](./submission) - Blockchain submission process. -- [Component overview](./components/component) - Component lifecycle and data handling. -- [Variables](./components/variables) - Configuring component variables. -- [Blockchain interactions](./components/blockchain-interactions) - Blockchain and smart contract interactions. -- [Network requests](./components/network-requests) - HTTP requests from components. -- [Template](./template) - WAVS template and customization. -- [Makefile commands](./commands) - Build and deploy commands. diff --git a/.cursor/rules/service.mdc b/.cursor/rules/service.mdc deleted file mode 100644 index b31fc3e1..00000000 --- a/.cursor/rules/service.mdc +++ /dev/null @@ -1,115 +0,0 @@ ---- -description: Defines the WAVS service manifest structure and usage with the service manager contract. - -alwaysApply: true ---- -# Service Manifest and Manager - -A WAVS service is composed of smart contracts, operators, and offchain components defined in a `service.json` manifest. This manifest configures workflows, components, submission, and the service manager contract. It is hosted on IPFS or HTTP(S) and referenced by the service manager contract. - -## Creating the Manifest - -1. Use `wavs-cli service` or the provided [build_service.sh](https://github.com/Lay3rLabs/wavs-foundry-template/blob/main/script/build_service.sh) script to generate a single-component service manifest. -2. Define service info, workflows, components, submission, and manager details in `service.json`. -3. Upload the manifest to IPFS or a publicly accessible server (e.g., Pinata). - -## Example Manifest - -```json service.json -{ - "id": "example-service-123", - "name": "Example WAVS Service", - "workflows": { - "default": { - "trigger": { - "evm_contract_event": { - "chain_name": "ethereum", - "address": "0x1234567890123456789012345678901234567890", - "event_hash": "0xabcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890" - } - }, - "component": { - "source": { - "Registry": { - "registry": { - "digest": "882b992af8f78e0aaceaf9609c7ba2ce80a22c521789c94ae1960c43a98295f5", - "domain": "localhost:8090", - "version": "0.1.0", - "package": "example:evmrustoracle" - } - } - }, - "permissions": { - "allowed_http_hosts": "all", - "file_system": true - }, - "fuel_limit": 1000000, - "time_limit_seconds": 30, - "config": { - "endpoint": "https://api.example.com", - "timeout": "30s" - }, - "env_keys": [ - "WAVS_ENV_API_KEY", - "WAVS_ENV_SECRET" - ] - }, - "submit": { - "aggregator": { - "url": "http://127.0.0.1:8001" - } - }, - "aggregators": [ - { - "evm": { - "chain_name": "ethereum", - "address": "0xfedcba9876543210fedcba9876543210fedcba98", - "max_gas": 1000000 - } - } - ] - } - }, - "status": "active", - "manager": { - "evm": { - "chain_name": "ethereum", - "address": "0xabcdef1234567890abcdef1234567890abcdef12" - } - } -} -``` - -## Uploading the Manifest - -```bash -# Upload manifest to IPFS (local or remote) -SERVICE_FILE=${SERVICE_FILE} make upload-to-ipfs - -# Retrieve IPFS gateway URL -export IPFS_GATEWAY=$(sh script/get-ipfs-gateway.sh) - -# Fetch uploaded content -curl "${IPFS_GATEWAY}${ipfs_cid}" - -# Set service URI on service manager contract -cast send ${WAVS_SERVICE_MANAGER_ADDRESS} 'setServiceURI(string)' "${SERVICE_URI}" -r ${RPC_URL} --private-key ${DEPLOYER_PK} -``` - -## Service Manager Contract - -- Registers operators with assigned weights; only registered operators can sign submissions. -- Maintains the service URI linking to the manifest. -- Validates operator signatures and ensures threshold weights are met before processing data. -- Ensures operators are sorted correctly for submission validation. - -For more information on manifest parts, see: - -- [Workflows](./workflows) -- [Triggers](./triggers) -- [Components](./components/component) -- [Submission and aggregator](./submission) - -For more information: -- [WAVS Foundry Template build_service.sh](https://github.com/Lay3rLabs/wavs-foundry-template/blob/main/script/build_service.sh) -- [Pinata IPFS Service](https://app.pinata.cloud/developers/api-keys) diff --git a/.cursor/rules/submission.mdc b/.cursor/rules/submission.mdc deleted file mode 100644 index c07445c4..00000000 --- a/.cursor/rules/submission.mdc +++ /dev/null @@ -1,95 +0,0 @@ ---- -description: Rules for configuring submission contracts and aggregators in WAVS services - -alwaysApply: true ---- -# Submission and Aggregator Configuration in WAVS - -This rule explains how to configure submission contracts and aggregators to submit workflow results to an EVM chain in WAVS. - -## 1. Configure Submission in `service.json` - -- Use the `submit` field to define submission logic. -- For aggregator submission, specify: -```json -"submit": { - "aggregator": { - "url": "http://127.0.0.1:8001" - } -}, -"aggregators": [ - { - "evm": { - "chain_name": "local", - "address": "0xd6f8ff0036d8b2088107902102f9415330868109", - "max_gas": 5000000 - } - } -] -``` -- Set `"submit": "none"` if no submission is needed (component runs but results not submitted). - -## 2. Submission Contract Requirements - -- Must implement `handleSignedEnvelope()` from the `IWavsServiceHandler` interface. -- Use `IWavsServiceManager` to validate data and operator signatures. -- The contract processes validated data matching the component's output format. -- Example simplified contract: -```solidity -import {IWavsServiceManager} from "@wavs/interfaces/IWavsServiceManager.sol"; -import {IWavsServiceHandler} from "@wavs/interfaces/IWavsServiceHandler.sol"; -import {ITypes} from "interfaces/ITypes.sol"; - -contract SimpleSubmit is ITypes, IWavsServiceHandler { - IWavsServiceManager private _serviceManager; - - constructor(IWavsServiceManager serviceManager) { - _serviceManager = serviceManager; - } - - function handleSignedEnvelope(Envelope calldata envelope, SignatureData calldata signatureData) external { - _serviceManager.validate(envelope, signatureData); - DataWithId memory dataWithId = abi.decode(envelope.payload, (DataWithId)); - // Custom logic to process validated data - } -} -``` - -## 3. Aggregator Role and Flow - -- Collects signed responses from multiple operators. -- Validates each operator's signature. -- Aggregates signatures when threshold is met (exact match aggregation). -- Submits aggregated data to the submission contract. -- Uses ECDSA signatures currently; BLS support planned. - -### Aggregator Submission Flow: - -1. Operator runs component → returns `WasmResponse` with `payload` and optional `ordering`. -2. Operator creates signed Envelope. -3. Packet with envelope, signature, route info sent to aggregator `/packet` endpoint. -4. Aggregator validates signature and queues packets by event and service ID. -5. When threshold reached: - - Combine signatures into one `SignatureData`. - - Validate combined signatures on-chain. -6. On success, aggregator calls `handleSignedEnvelope()` on submit contract with aggregated data. -7. Submit contract validates data and signatures via service manager. - -## 4. Workflow Chaining - -- Workflows can be chained by triggering one workflow on the submission event of another. -- See the [Workflows page](./workflows) for details. - -## Best Practices - -- Ensure `DataWithId` struct matches component output format. -- Validate all signatures on-chain via service manager. -- Use aggregator to ensure consensus before submission. -- Set appropriate gas limits in aggregator config. -- Use local aggregator endpoint during development. - -For more information: -- [WAVS Solidity Interfaces @wavs](https://www.npmjs.com/package/@wavs/solidity?activeTab=code) -- [Template Submission Contract](https://github.com/Lay3rLabs/wavs-foundry-template/blob/main/src/contracts/WavsSubmit.sol) -- [WAVS Design Considerations](/design) -- [Workflows Documentation](./workflows) diff --git a/.cursor/rules/template.mdc b/.cursor/rules/template.mdc deleted file mode 100644 index bea1ec13..00000000 --- a/.cursor/rules/template.mdc +++ /dev/null @@ -1,103 +0,0 @@ ---- -description: Overview and customization guide for the WAVS Foundry template structure and configuration - -alwaysApply: true ---- -# WAVS Foundry Template Overview - -This guide explains the structure and configuration of the WAVS Foundry template to help customize and build your own WAVS service. - -1. **Template Structure** - -The main files and directories in the WAVS template: - -```bash -wavs-foundry-template/ -├── README.md # Tutorial commands -├── makefile # Build and deploy commands, variables, configs -├── components/ # WASI components -│ └── evm-price-oracle/ -│ ├── src/ -│ │ ├── lib.rs # Main component logic -│ │ ├── trigger.rs # Trigger handling -│ │ └── bindings.rs # Auto-generated bindings (`make build`) -│ └── Cargo.toml # Component dependencies -├── compiled/ # Compiled WASM files (`make build`) -├── src/ -│ ├── contracts/ # Trigger and submission Solidity contracts -│ └── interfaces/ # Solidity interfaces -├── script/ # Deployment and interaction scripts -├── wavs.toml # WAVS service configuration -├── docs/ # Documentation -└── .env # Private environment variables -``` - -- Use `make wasi-build` to generate bindings and compile components. -- Copy `.env` from `.env.example` to set private environment variables. - -2. **TOML Configuration Files** - -- `wavs.toml`: Configures the WAVS service (chains, environments, etc.). -- Root `Cargo.toml`: Workspace configuration, dependencies, build settings, metadata. -- `components/*/Cargo.toml`: Component-specific Rust configuration; can inherit from root via `workspace = true`. - -Example component `Cargo.toml`: - -```toml -[package] -name = "evm-price-oracle" -edition.workspace = true -version.workspace = true -authors.workspace = true -rust-version.workspace = true -repository.workspace = true - -[dependencies] -wit-bindgen-rt = { workspace = true } -wavs-wasi-utils = { workspace = true } -serde = { workspace = true } -serde_json = { workspace = true } -alloy-sol-macro = { workspace = true } -wstd = { workspace = true } -alloy-sol-types = { workspace = true } -anyhow = { workspace = true } - -[lib] -crate-type = ["cdylib"] - -[profile.release] -codegen-units = 1 -opt-level = "s" -debug = false -strip = true -lto = true - -[package.metadata.component] -package = "component:evm-price-oracle" -target = "wavs:worker/layer-trigger-world@0.4.0-beta.4" -``` - -3. **wavs.toml Configuration** - -The `wavs.toml` file configures: - -- Default general settings (shared by all processes) -- WAVS server-specific settings -- CLI-specific settings -- Aggregator-specific settings - -4. **Environment Variable Overrides** - -Override config values using environment variables: - -- WAVS server settings: `WAVS_` -- CLI settings: `WAVS_CLI_` -- Aggregator settings: `WAVS_AGGREGATOR_` - ---- - -For more information: -- [WAVS Foundry Template GitHub](https://github.com/Lay3rLabs/wavs-foundry-template) -- [Oracle Component Tutorial](https://docs.wavs.xyz/tutorial/1-overview) -- [WAVS Design Considerations](https://docs.wavs.xyz/design) -- [wavs.toml Configuration](https://github.com/Lay3rLabs/wavs-foundry-template/blob/main/wavs.toml) diff --git a/.cursor/rules/triggers.mdc b/.cursor/rules/triggers.mdc deleted file mode 100644 index b29ade18..00000000 --- a/.cursor/rules/triggers.mdc +++ /dev/null @@ -1,181 +0,0 @@ ---- -description: Setup and manage WAVS service triggers for onchain events and scheduled executions - -alwaysApply: true ---- -# WAVS Service Triggers - -Triggers prompt WAVS services to run by listening for onchain events or schedules. Operators verify triggers and execute components off-chain. - -## Trigger Lifecycle - -1. Deploy a service with `service.json` manifest containing service info, workflow, components, triggers, and submission logic. -2. Operators maintain lookup maps for triggers by chain, contract, and event identifiers. -3. On trigger detection, operators verify and create a `TriggerAction` with config and event data. -4. `TriggerAction` structure: -```rust -pub struct TriggerAction { - pub config: TriggerConfig, // service_id, workflow_id, trigger type - pub data: TriggerData, // trigger-specific data -} - -pub struct TriggerConfig { - pub service_id: ServiceID, - pub workflow_id: WorkflowID, - pub trigger: Trigger, -} - -pub enum TriggerData { - CosmosContractEvent { - contract_address: layer_climb_address::Address, - chain_name: ChainName, - event: cosmwasm_std::Event, - block_height: u64, - }, - EvmContractEvent { - contract_address: alloy_primitives::Address, - chain_name: ChainName, - log: LogData, - block_height: u64, - }, - BlockInterval { - chain_name: ChainName, - block_height: u64, - }, - Cron { - trigger_time: Timestamp, - } -} -``` -5. `TriggerAction` is converted to WASI format and passed to components, decoded using `decode_event_log_data!` macro from [`wavs-wasi-utils`](https://docs.rs/wavs-wasi-utils/latest/wavs_wasi_utils/macro.decode_event_log_data.html). - -## Trigger Configuration - -Set triggers in the `trigger` field of `service.json`. Each workflow requires one trigger. - -### EVM Event Trigger - -Listens for specific contract events on EVM chains. Passes raw log data to the component. - -Example: -```json -"trigger": { - "evm_contract_event": { - "address": "0x00000000219ab540356cbb839cbe05303d7705fa", - "chain_name": "ethereum", - "event_hash": "0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef" - } -} -``` - -Configure chain in `wavs.toml`: -```toml -[default.chains.evm.ethereum] -chain_id = "1" -ws_endpoint = "wss://eth.drpc.org" -http_endpoint = "https://eth.drpc.org" -``` - -Set EVM credential in `.env`: -```env -WAVS_CLI_EVM_CREDENTIAL="0x5ze146f435835b1762ed602088740d201b68fd94bf808f97fd04588f1a63c9ab" -``` - -### Cosmos Event Trigger - -Monitors Cosmos smart contract events by type and address. Passes emitted contract data to component. - -Example: -```json -"trigger": { - "cosmos_contract_event": { - "address": { - "Cosmos": { - "bech32_addr": "neutron1qlaq54uh9f52d3p66q77s6kh9k9ee3vasy8gkdkk3yvgezcs6zts0mkcv4", - "prefix_len": 7 - } - }, - "chain_name": "neutron", - "event_type": "send_nft" - } -} -``` - -Configure chain in `wavs.toml`: -```toml -[default.chains.cosmos.neutron] -chain_id = "pion-1" -bech32_prefix = "neutron" -rpc_endpoint = "https://rpc-falcron.pion-1.ntrn.tech" -grpc_endpoint = "http://grpc-falcron.pion-1.ntrn.tech:80" -gas_price = 0.0053 -gas_denom = "untrn" -``` - -Set Cosmos mnemonic in `.env`: -```env -WAVS_CLI_COSMOS_MNEMONIC="large slab plate twenty laundry illegal vacuum phone drum example topic reason" -``` - -### Cron Trigger - -Executes component on a schedule defined by a cron expression with optional start/end times. Passes trigger timestamp. - -Example: -```json -"trigger": { - "cron": { - "schedule": "0 */5 * * * *", - "start_time": 1704067200000000000, - "end_time": 1735689599000000000 - } -} -``` - -Cron format (seconds to day of week): - -``` -* * * * * * -│ │ │ │ │ └─ Day of week (0-6, Sunday=0) -│ │ │ │ └── Month (1-12) -│ │ │ └─── Day of month (1-31) -│ │ └──── Hour (0-23) -│ └───── Minute (0-59) -└────── Second (0-59) -``` - -Common examples: - -- `0 */5 * * * *` - Every 5 minutes at 0 seconds -- `0 0 */6 * * *` - Every 6 hours -- `0 0 0 * * *` - Daily at midnight - -**Note:** Cron triggers may have latency due to network and clock drift. Use block triggers for precise timing. - -### Block Trigger - -Runs component at regular block intervals on EVM or Cosmos chains. Passes block height and chain name. - -Example: -```json -"trigger": { - "block_interval": { - "chain_name": "ethereum-mainnet", - "n_blocks": 10, - "start_block": null, - "end_block": null - } -} -``` - -## Best Practices - -- Always configure chain info in `wavs.toml` and credentials in `.env`. -- Use `decode_event_log_data!` macro in components to decode trigger data. -- Use cron triggers for non-time-critical tasks; use block triggers for precise scheduling. -- Maintain accurate lookup maps for trigger verification. - -For more information: -- [WAVS WASI Utils decode_event_log_data!](https://docs.rs/wavs-wasi-utils/latest/wavs_wasi_utils/macro.decode_event_log_data.html) -- [Crontab Guru - Cron Expression Tool](https://crontab.guru/) -- [WAVS Service JSON and Workflow Handbook](https://handbook.layerzero.network/handbook/service) diff --git a/.cursor/rules/variables.mdc b/.cursor/rules/variables.mdc deleted file mode 100644 index 46ef70b1..00000000 --- a/.cursor/rules/variables.mdc +++ /dev/null @@ -1,81 +0,0 @@ ---- -description: Managing public and private configuration variables in WAVS components - -alwaysApply: true ---- -# Variables in WAVS Components - -WAVS components use two types of variables for configuration: public variables for non-sensitive data and environment keys for sensitive data. - -## Public Variables - -- Stored as strings in the `config` field of the service manifest. -- Accessible in components via `host::config_var()`. - -### Steps to use public variables: - -1. Add variables to `config` in the manifest: - -```json -"component": { - "config": { - "api_endpoint": "https://api.example.com", - "max_retries": "3" - } -} -``` - -2. Access in Rust component: - -```rust -let value = host::config_var("api_endpoint"); -``` - -## Environment Keys (Private Variables) - -- Used for sensitive data like API keys. -- Must be prefixed with `WAVS_ENV_`. -- Set by operators in their environment; not visible publicly. -- WAVS validates presence before service runs. - -### Steps to use environment keys: - -1. Create or copy `.env` file: - -```bash -cp .env.example .env -``` - -2. Set environment variable in `.env` or shell config: - -```bash -WAVS_ENV_MY_API_KEY=your_secret_key_here -``` - -3. Access in Rust component: - -```rust -let api_key = std::env::var("WAVS_ENV_MY_API_KEY")?; -``` - -4. Declare in manifest under `env_keys`: - -```json -"component": { - "env_keys": [ - "WAVS_ENV_API_KEY" - ] -} -``` - -## Local Execution - -Use `--config` flag with comma-separated `KEY=VALUE` pairs to set config variables locally: - -```bash -wavs-cli exec --component --input --config api_endpoint=https://api.example.com -``` - -For more information: -- [WAVS Variables Documentation](https://docs.wavs.example.com/variables) -- [WAVS CLI Reference](https://docs.wavs.example.com/cli) diff --git a/.cursor/rules/workflows.mdc b/.cursor/rules/workflows.mdc deleted file mode 100644 index 4411672c..00000000 --- a/.cursor/rules/workflows.mdc +++ /dev/null @@ -1,146 +0,0 @@ ---- -description: Define and manage WAVS service workflows specifying triggers, components, and submission logic. - -alwaysApply: true ---- -# WAVS Service Workflows - -A WAVS service consists of one or more workflows defining execution paths. Each workflow includes: - -- **Trigger**: Event that starts the workflow -- **Component**: WASM component processing the event -- **Submit**: Destination for results - -## Workflow Structure - -Workflows are defined in the service manifest JSON under the `workflows` key, each identified by a unique UUID. - -Example workflow with a cron trigger and aggregator submission: - -```json service.json -"workflows": { - "0196c34d-003d-7412-a3f3-70f8ec664e12": { - "trigger": { - "cron": { - "schedule": "0 * * * * *", - "start_time": null, - "end_time": null - } - }, - "component": { - "source": { - "Digest": "65747b4b1a7fa98cab6abd9a81a6102068de77b1040b94de904112272b226f51" - }, - "permissions": { - "allowed_http_hosts": "all", - "file_system": true - }, - "fuel_limit": null, - "time_limit_seconds": 1800, - "config": { - "nft": "0xb5d4D4a87Cb07f33b5FAd6736D8F1EE7D255d9E9", - "reward_token": "0x34045B4b0cdfADf87B840bCF544161168c8ab85A" - }, - "env_keys": [ - "WAVS_ENV_API_KEY" - ] - }, - "submit": { - "aggregator": { - "url": "http://127.0.0.1:8001" - } - }, - "aggregators": [ - { - "evm": { - "chain_name": "local", - "address": "0xd6f8ff0036d8b2088107902102f9415330868109", - "max_gas": 5000000 - } - } - ] - } -} -``` - -## Multi-workflow Services - -- Multiple workflows can coexist in one service manifest. -- Each workflow has independent trigger, component, and submission logic. -- All workflows share the same service manager and operator set. - -Example: - -```json -{ - "workflows": { - "workflow-uuid-1": { - "trigger": { ... }, - "component": { ... }, - "submit": { ... } - }, - "workflow-uuid-2": { - "trigger": { ... }, - "component": { ... }, - "submit": { ... } - } - } -} -``` - -## Workflow Isolation - -- Each workflow runs in a separate WebAssembly environment. -- Memory and state are isolated per execution. -- Components cannot access each other's memory or state directly. - -## Sharing State Between Workflows - -- WAVS services focus on data processing, not storage. -- Data sharing is done via external systems (e.g., onchain smart contracts). -- Workflow A submits data externally; Workflow B reads from the same source. - -Example flow: - -``` -A: Trigger -> component -> onchain submission storage -B: Trigger -> component (reads from A's storage) -> onchain submission storage -``` - -## Chaining Workflows - -- Chain workflows by setting the second workflow’s trigger to the onchain submission event of the first. -- This can be done within a service or across different services. - -Example: - -```json -{ - "workflows": { - "workflow-uuid-1": { - "trigger": { ... }, - "component": { ... }, - "submit": { ... } - }, - "workflow-uuid-2": { - "trigger": { /* onchain submission event of workflow-uuid-1 */ }, - "component": { ... }, - "submit": { ... } - } - } -} -``` - -## Multichain Services - -- WAVS supports contract event or block height triggers on Cosmos and EVM chains. -- Enables cross-chain services monitoring events on one chain and submitting results to Ethereum. -- More chain triggers coming soon. - -For detailed trigger options, see the [Trigger page](./triggers). - -For more information: -- [WAVS Design Considerations](../design) -- [Trigger Documentation](./triggers) -- [Component Documentation](./components/component) -- [Submission Documentation](./submission) diff --git a/docs/benefits.mdx b/docs/benefits.mdx deleted file mode 100644 index 47b17369..00000000 --- a/docs/benefits.mdx +++ /dev/null @@ -1,36 +0,0 @@ ---- -title: WAVS benefits -description: Key advantages and use cases of WAVS platform ---- - - -WAVS - ->WAVS is a platform that makes building AVSs easier. - -**The problem**: creating an AVS the traditional way is complicated. It requires a lot of preliminary development work, such as building custom contracts, scaffolding infrastructure, working with Dockerized components, and coordinating with operators. Most of the development centers around creating AVS infrastructure, which is generally more complicated than the core logic of the service itself. - -**There is an easier way**: WAVS provides a base layer of AVS infrastructure so you can focus solely on creating the core logic of your service. This logic is written in Rust (with other languages available soon) and compiled as a lightweight WASI component which can be deployed to the WAVS platform and run as an AVS by operators. These components are run off-chain by operators in the WAVS (WASI-AVS) runtime at near-native speed, and the results are brought verifiably on-chain. A service of services, WAVS allows an AVS to dynamically run and manage multiple components that work together to build flexible and intelligent applications. - -## Why WAVS? - -WAVS redefines the AVS paradigm, making AVSs easier to build, less expensive to run, and enabling the next generation of composable, intelligent blockchain protocols. - -1. Dynamic and Cost-Effective Service Management - - Flexibility: Add, update, or manage components dynamically without having to coordinate upgrades with an entire operator set. - - Cost-effective and performant: Multiple AVSs run on the WAVS runtime - - WASI service components are lightweight compared to Docker, saving storage and startup time. - - WASI components have instantaneous initialization vs. Docker's redundant OS layers and slower boot times. -2. Simplified Development - - Focus on your application logic, not overhead: - - With templates, there's no need to write multiple custom contracts to parse events or aggregate signatures. - - WAVS handles common AVS infrastructure, leaving AVS developers to focus on their core logic. -3. Multichain Ready - - WAVS is built to operate across multiple blockchain environments and will be released with support for EVM networks. - - WAVS will foster a multichain ecosystem for AVSs to interact and interoperate. -4. Intelligent Protocols & Composability - - Enable asynchronous, verifiable execution flows across multiple on and off-chain components. - - Compose multiple services to create dynamic intelligent protocols that surpass the limitations of traditional smart contracts. diff --git a/docs/design.mdx b/docs/design.mdx deleted file mode 100644 index 38d51bb9..00000000 --- a/docs/design.mdx +++ /dev/null @@ -1,45 +0,0 @@ ---- -title: WAVS design considerations -description: Best practices and design patterns for WAVS services ---- - - -The WAVS approach to AVS design focuses on the lightweight and agile nature of components, with an emphasis on performance and security. - -WAVS works best with the "serverless function" approach, wherein the priority of service component logic is to process data from external sources rather than store it locally. In this model, a component can receive input data from external sources, process it, and submit the verifiable outputs to external sources. - -## Aggregation and deterministic queries - -WAVS currently supports "exact match" [aggregation](./handbook/submission#aggregator), meaning that consensus is reached if a threshold amount of submitted responses from operators are identical. This approach fits many common use cases, but it means that developers must build their components to receive and process data only from deterministic or immutable sources, such as: - -- Data from the input event trigger -- Ethereum queries at a given block height -- IPFS data or other Content-Addressable Storage -- Web2 APIs that are trusted to return the same result on every query -- Seeded application parameters (e.g. Ollama for AI models) - -For example, when designing a price oracle, a **blockheight** or **timestamp** should be specified in the query logic. This ensures that the query is deterministic and that the same data point is retrieved by all operators. - -Conversely, a component that is designed to fetch "current price" would be non-deterministic and may result in a consensus error. Operators may run components at slightly different times, leading to discrepancies in the data they receive. This design should be avoided, as aggregation for this would require custom BFT averaging logic which is not currently supported. However, adding custom logic to process non-exact matches will be available in future releases. - -## State - -Persistent state introduces additional challenges in AVS design with WAVS: operators may execute triggers at different times or, in some cases, not run them at all if they join an AVS after it has been running for some time. Components that rely on operator-local mutable state risk failing consensus due to inconsistencies in execution. For these reasons, it is best practice to avoid storing operator-local mutable state within your components. - -This functionality would require features such as state synchronization (P2P), guaranteed execution ordering, and Merkle-proof validation which are not yet supported. - -## Caching - -WAVS provides components with an optional local data directory that can be used for caching. This storage method should only be used to cache data as a performance optimization and not as a replacement for external state stores. - -It is best practice when using a cache to reference external data from deterministic immutable sources to avoid consensus failures. - -Keep the following points in mind when using cached data: - -1. Caching should be used as a performance optimization only. -2. Use immutable or deterministic external data sources with specific time stamps or block heights. -3. Design your component logic to work even if the cache is cleared. Cache state is not shared among operators, and any operator should be able to rebuild the cache from scratch. -4. Don't use caching to store state that depends on previous executions or mutable data. diff --git a/docs/handbook/ai.mdx b/docs/handbook/ai.mdx deleted file mode 100644 index 89b0c698..00000000 --- a/docs/handbook/ai.mdx +++ /dev/null @@ -1,186 +0,0 @@ ---- -title: AI-powered component creation -description: Use Claude or Cursor to create one-shot components with minimal prompting ---- - - - -The WAVS Foundry Template contains built-in AI rulefiles for creating "one-shot" components with minimal prompting in Cursor or Claude Code. - -These rulefiles are an experimental feature and may not work as expected every time. Components created with AI should not be used in production without thorough review and testing. - - - For more information on AI tools and AI-accessible documentation, visit the [LLM resources page](/resources/llms). - - -## Claude Code - -- Follow the [Claude Code installation instructions](https://docs.anthropic.com/en/docs/claude-code/getting-started) to install Claude Code and link your account. -- The Claude rulefile is `claude.md` and contains instructions for Claude on how to create a component. -- Learn more about Claude rulefiles: https://docs.anthropic.com/en/docs/claude-code/memory - -## Cursor - -- Download Cursor: https://www.cursor.com/downloads -- The Cursor rulefiles are located in the `.cursor/rules` directory. -- When using Cursor, always attach the `component-rules.mdc` file to the chat with your prompt. -- Learn more about Cursor rulefiles: https://docs.cursor.com/context/rules - -## Using AI to create components - -1. Clone the [WAVS Foundry Template](https://github.com/Lay3rLabs/wavs-foundry-template) and follow the system setup requirements in the README. - -```sh -git clone https://github.com/Lay3rLabs/wavs-foundry-template.git -cd wavs-foundry-template -git checkout main -# Follow the system setup requirements in the README. -``` - -2. Open Claude Code or Cursor in the root of the template. - -```sh -claude -# or -cursor . -``` - - - -You can run a sandboxed instance of [Claude Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview) in a Docker container that only has access to this project's files by running the following command from the root of the project: - -```bash docci-ignore -npm run claude-code -# or with no restrictions (--dangerously-skip-permissions) -npm run claude-code:unrestricted -``` - - - -3. Enter your prompt in the agent chat. You can use the following examples as a starting point, or you can create your own prompt. - - -If you are using cursor, always attach `component-rules.mdc` file to the chat with your prompt. - -``` -@component-rules.mdc -``` - - - -### Prompt examples - -These simple examples are provided to get you started. - -#### API component - -You can make a very simple prompt to create a component that can bring API responses verifiably onchain by including the API endpoint: - -``` -Let's make a component that takes the input of a zip code, queries the openbrewerydb, -and returns the breweries in the area. -@https://api.openbrewerydb.org/v1/breweries?by_postal=92101&per_page=3 -``` - -#### Contract balance component - -You can also make components that interact with the blockchain: - -``` -I want to build a new component that takes the input of a wallet address, -queries the usdt contract, and returns the balance of that address. -``` - -#### Verifiable AI component - -``` -Please make a component that takes a prompt as input, sends an api request to OpenAI, -and returns the response. - - Use this api structure: - { - "seed": $SEED, - "model": "gpt-4o", - "messages": [ - {"role": "system", "content": "You are a helpful assistant."}, - {"role": "user", "content": ""} - ] - } - -My api key is WAVS_ENV_OPENAI_KEY in my .env file. -``` - -You'll need an [OPENAI API account and key](https://platform.openai.com/login) to use this prompt. The agent will include your API key in the component as a [private variable](./components/variables). - -Make sure to include your API key in a `.env` file: - -```sh -# copy the .env.example file -cp .env.example .env -# place your key in .env (must be prefixed with WAVS_ENV_) -WAVS_ENV_OPENAI_KEY=your_api_key -``` - -This example utilizes the OpenAI API with a [seed](https://platform.openai.com/docs/advanced-usage#reproducible-outputs) to make the response more deterministic. Please note that OpenAI models are not guaranteed to be 100% deterministic. This example is for demonstration purposes and should not be used in production. - -## Component creation process - -4. After receiving the prompt, the agent will start creating your component. Review the agent's work and accept changes carefully. Make sure to double check what the agent is doing and be safe about accepting changes. - -5. The agent will start by planning its component and will create a `plan.md` file. The agent will then make a new component and files according to this plan. - -6. The agent will test its component for errors by running validation tests using `make validate-component COMPONENT=your-component`. - -7. The agent may need to make changes after running the Validation tests. After making changes, the agent will build the component using `WASI_BUILD_DIR=components/my-component make wasi-build`. - -8. After successfully building your component, it's time to test it. The following command can be used to test your component logic without deploying WAVS. Make sure to replace the placeholders with the correct inputs. - -```sh -# Run this command to build the component: -WASI_BUILD_DIR=components/openai-response make wasi-build - -# Once built, test it with: -export COMPONENT_FILENAME=openai_response.wasm -export INPUT_DATA="Only respond with yes or no: Is AI beneficial to the world?" -make wasi-exec -``` - -The agent may try to run the `make wasi-exec` command themselves. You should prompt the agent to give you the command instead, as it can't run the command without permissions. - - -9. Your component should execute and return a response. If there are any errors, share them with the agent for troubleshooting. - -If you have any questions, join the WAVS DEVS Telegram channel: https://t.me/layer_xyz/818 - -## Tips for working with AI agents - -- While this repo contains rulefiles with enough context for creating simple components, coding agents are unpredictable and may inevitably run into problems. -- Feel free to update the rulefiles for your specific purposes or if you run into regular errors. -- Coding agents can sometimes try to over-engineer their fixes for errors. If you feel it is not being productive, it may be beneficial to start fresh. You may need to adjust your prompt. -- If you are building a complex component, it may be helpful to have the agent build a simple component first and then expand upon it. -- The agent may try to fix warnings unnecessarily. You can tell the agent to ignore minor warnings and any errors found in `bindings.rs` (it is auto-generated). - -### Prompting - -This repo is designed to be used with short prompts for simple components. However, often, coding agents will do better with more context. - -When creating a prompt, consider the following: - -- Agents work best with short, clear instructions. -- Provide relevant documentation (preferably as an `.md` file or other ai-digestible content). -- Provide endpoints. -- You may need to provide API response structure if the agent is not understanding responses. -- Be specific about what you want the agent to build. -- Agents work systematically to build components. For best results, agent should make a plan before they start building. -- Be patient. Coding agents are not perfect. They may make mistakes. - -## Troubleshooting - -- You can ask the agent to fix errors it may not be able to catch when executing components. Make sure to give the agent full context of the error. -- LLMs can be unpredictable. Minimal prompts provide a lot of room for creativity/error. If the agent is not able to fix an error after trying, sometimes deleting the component, clearing the history, and starting fresh can help. -- The agent may try to edit the bindings.rs file to "fix" it. The agent never needs to do this, and you should tell the agent to not do this. -- The agent is supposed to provide you with the `make wasi-exec` command. Sometimes it will try to run this itself and it will fail. Instead, ask it to give you the command. -- When copying and pasting the full `make wasi-exec` command, be careful with line breaks. You may need to reformat long lines to avoid breaking the command. diff --git a/docs/handbook/commands.mdx b/docs/handbook/commands.mdx deleted file mode 100644 index 78f623a3..00000000 --- a/docs/handbook/commands.mdx +++ /dev/null @@ -1,44 +0,0 @@ ---- -title: Makefile commands -description: CLI commands for WAVS development ---- - - -## Commands - -Use `make help` to see all the commands: - -```bash -make help -``` - -Here are the available `make` commands and their descriptions: - -```bash -build building the project -wasi-build building WAVS wasi components | WASI_BUILD_DIR -wasi-exec executing the WAVS wasi component(s) with ABI function | COMPONENT_FILENAME, INPUT_DATA -wasi-exec-fixed the same as wasi-exec, except uses a fixed input as bytes (used in Go & TS components) | COMPONENT_FILENAME, INPUT_DATA -clean cleaning the project files -clean-docker remove unused docker containers -validate-component validate a WAVS component against best practices -fmt formatting solidity and rust code -test running tests -setup install initial dependencies -start-all-local starting anvil and core services (like IPFS for example) -get-trigger-from-deploy getting the trigger address from the script deploy -get-submit-from-deploy getting the submit address from the script deploy -wavs-cli running wavs-cli in docker -upload-component uploading the WAVS component | COMPONENT_FILENAME, WAVS_ENDPOINT -deploy-service deploying the WAVS component service json | SERVICE_URL, CREDENTIAL, WAVS_ENDPOINT -get-trigger get the trigger id | SERVICE_TRIGGER_ADDR, RPC_URL -show-result showing the result | SERVICE_SUBMISSION_ADDR, TRIGGER_ID, RPC_URL -upload-to-ipfs uploading the a service config to IPFS | SERVICE_FILE, [PINATA_API_KEY] -update-submodules update the git submodules -check-requirements verify system requirements are installed -``` - -For more information on commands when using the template, visit the [WAVS tutorial](/tutorial/1-overview). diff --git a/docs/handbook/components/blockchain-interactions.mdx b/docs/handbook/components/blockchain-interactions.mdx deleted file mode 100644 index c7118f5f..00000000 --- a/docs/handbook/components/blockchain-interactions.mdx +++ /dev/null @@ -1,225 +0,0 @@ ---- -title: Blockchain interactions -description: Interacting with blockchains from WAVS components ---- - - -Components can interact with blockchains and smart contracts by using crates like [`wavs-wasi-utils`](https://docs.rs/wavs-wasi-utils/latest/wavs_wasi_utils/). This page provides an overview of the dependencies and configuration needed to interact with Ethereum and other EVM-compatible chains. - -### `wavs-wasi-utils` crate - -The `wavs-wasi-utils` crate provides a set of helpful functions for making HTTP requests and interacting with the blockchain. It also provides a macro for decoding trigger data for use in the component. - -Learn more in the [crate documentation](https://docs.rs/wavs-wasi-utils/latest/wavs_wasi_utils/index.html). - -The `decode_event_log_data` macro is a utility for decoding Ethereum event logs from triggers into typed Rust events. It takes raw log data (topics and data) from the WAVS worker bindings and converts it into a Rust type that implements `SolEvent`. - -## Dependencies - -The following dependencies are commonly required in your component's `Cargo.toml` for Ethereum interactions: - -```toml -[dependencies] -# Core WAVS blockchain functionality -wit-bindgen-rt = { workspace = true, features = ["bitflags"] } # Required for WASI bindings -wavs-wasi-utils = "0.4.0" # Blockchain interaction utilities -wstd = "0.5.3" # WASI standard library - -# Alloy crates for Ethereum interaction -alloy-sol-macro = { version = "1.1.0", features = ["json"] } # sol! macro for interfaces -alloy-sol-types = "1.1.0" # ABI handling & type generation -alloy-network = "0.15.10" # Network trait and Ethereum network type -alloy-provider = { version = "0.15.10", default-features = false, features = ["rpc-api"] } # RPC provider -alloy-rpc-types = "0.15.10" # RPC type definitions -alloy-contract = "0.15.10" # Contract interaction utilities - -# Other useful crates -anyhow = "1.0.98" # Error handling -serde = { version = "1.0.219", features = ["derive"] } # Serialization/deserialization -serde_json = "1.0.140" # JSON handling -``` - -Note: The `workspace = true` syntax can be used if your project is part of a workspace that defines these versions centrally. Otherwise, use the explicit versions shown above. - -## Chain configuration - -Chain configurations are defined in the root `wavs.toml` file. This allows components to access RPC endpoints and chain IDs without hardcoding them. - -```toml wavs.toml -# Local / Testnet -[default.chains.evm.local] -chain_id = "31337" -ws_endpoint = "ws://localhost:8545" -http_endpoint = "http://localhost:8545" -poll_interval_ms = 7000 - -# Mainnet -[default.chains.evm.ethereum] -chain_id = "1" -ws_endpoint = "wss://eth.drpc.org" -http_endpoint = "https://eth.drpc.org" -``` - -### Sol! macro - -The `sol!` macro from `alloy-sol-macro` allows you to generate Rust types from Solidity interface files. - -You can write Solidity definitions (interfaces, structs, enums, custom errors, events, and function signatures) directly inside the `sol!` macro invocation in your Rust code. - -At compile time, the `sol!` macro parses that Solidity syntax and automatically generates the equivalent Rust types, structs, enums, and associated functions (like `abi_encode()` for calls or `abi_decode()` for return data/events) needed to interact with smart contracts based on those definitions. - -Required Dependencies: - -```toml -[dependencies] -alloy-sol-macro = { workspace = true } # For Solidity type generation -alloy-sol-types = { workspace = true } # For ABI handling -``` - -Basic Pattern: - -```rust -mod solidity { - use alloy_sol_macro::sol; - - // Generate types from Solidity file - sol!("../../src/interfaces/ITypes.sol"); - - // Or define types inline - sol! { - struct TriggerInfo { - uint64 triggerId; - bytes data; - } - - event NewTrigger(TriggerInfo _triggerInfo); - } -} -``` - -In the template, the `sol!` macro is used in the `trigger.rs` component file to generate Rust types from the `ITypes.sol` file. - -```rust trigger.rs -pub mod solidity { - use alloy_sol_macro::sol; - pub use ITypes::*; - - // The objects here will be generated automatically into Rust types. - // If you update the .sol file, you must re-run `cargo build` to see the changes. - sol!("../../src/interfaces/ITypes.sol"); - - // Define a simple struct representing the function that encodes string input - sol! { - function addTrigger(string data) external; - } -} -``` - -The macro reads a Solidity interface file and generates corresponding Rust types and encoding/decoding functions. In the example above, it reads `ITypes.sol` which defines: -- `NewTrigger` event -- `TriggerInfo` struct -- `DataWithId` struct - -More documentation on the `sol!` macro can be found at: https://docs.rs/alloy-sol-macro/latest/alloy_sol_macro/macro.sol.html - - -## Accessing configuration and provider - -WAVS provides host bindings to get the chain config for a given chain name in the wavs.toml file. The `new_evm_provider` from [`wavs-wasi-utils`](https://docs.rs/wavs-wasi-utils/latest/wavs_wasi_utils/evm/fn.new_evm_provider.html) can be used to create a provider for a given chain. - -```rust lib.rs -use crate::bindings::host::get_evm_chain_config; -use alloy_network::Ethereum; -use alloy_provider::RootProvider; -use wavs_wasi_utils::evm::new_evm_provider; - -// Get the chain config for a specific chain defined in wavs.toml -let chain_config = get_evm_chain_config("local").unwrap(); - -// Create an Alloy provider instance using the HTTP endpoint -let provider: RootProvider = new_evm_provider::( - chain_config.http_endpoint.unwrap(), -); -``` - -## Example: Querying NFT balance - -Here's an [example](https://github.com/Lay3rLabs/wavs-art/blob/main/components/autonomous-artist/src/evm.rs) demonstrating how to query the balance of an ERC721 NFT contract for a given owner address: - -```rust lib.rs -// Import required dependencies -use crate::bindings::host::get_evm_chain_config; // WAVS host binding to get chain configuration -use alloy_network::Ethereum; // Ethereum network type -use alloy_provider::RootProvider; // Provider for making RPC calls -use alloy_sol_types::sol; // Macro for generating Solidity bindings -use wavs_wasi_utils::evm::{ // WAVS utilities for EVM interaction - alloy_primitives::{Address, U256}, // Ethereum primitive types - new_evm_provider, // Function to create EVM provider -}; -use alloy_rpc_types::TransactionInput; -use wstd::runtime::block_on; // Utility to run async code in sync context - -// Define the ERC721 interface using the sol! macro -// This generates Rust types and functions for interacting with the contract -sol! { - interface IERC721 { - // Define the balanceOf function that returns how many NFTs an address owns - function balanceOf(address owner) external view returns (uint256); - } -} -// Function to check if an address owns any NFTs from a specific contract -pub fn query_nft_ownership(address: Address, nft_contract: Address) -> Result { - // block_on allows us to run async code in a synchronous function - block_on(async move { - // Get the chain configuration for the local network - let chain_config = get_evm_chain_config("local").unwrap(); - // Create a provider that will handle RPC communication - let provider: RootProvider = new_evm_provider::( - chain_config.http_endpoint.unwrap() - ); - - // Create a contract instance using the generated IERC721 interface - let balance_call = IERC721::balanceOf { owner: address }; - - let tx = alloy_rpc_types::eth::TransactionRequest { - to: Some(TxKind::Call(nft_contract)), - input: TransactionInput { input: Some(balance_call.abi_encode().into()), data: None }, - ..Default::default() - }; - - // Call the balanceOf function on the contract - // .call() executes the function as a view call (no state changes) - let result = provider.call(tx).await.map_err(|e| e.to_string())?; - - // Return true if the address owns at least one NFT (balance > 0) - let balance: U256 = U256::from_be_slice(&result); - Ok(balance > U256::ZERO) - }) -} -``` - -You can also use the `alloy-contract` crate to interact with smart contracts. See the [alloy-contract docs](https://crates.io/crates/alloy-contract) page for more information. - -See the [wavs-wasi-utils documentation](https://docs.rs/wavs-wasi-utils/latest/wavs_wasi_utils/) and the [Alloy documentation](https://docs.rs/alloy/latest/alloy/) for more detailed information. - - -### Alloy ecosystem crates - -The Alloy ecosystem provides a comprehensive set of crates for Ethereum development: - -- [`alloy-primitives`](https://docs.rs/alloy-primitives/latest/alloy_primitives/): Core Ethereum types (`Address`, `U256`, `Bytes`, etc.) -- [`alloy-provider`](https://docs.rs/alloy-provider/latest/alloy_provider/): Ethereum node interaction (RPC, WebSocket, batching) -- [`alloy-network`](https://docs.rs/alloy-network/latest/alloy_network/): Network types and chain-specific functionality -- [`alloy-sol-types`](https://docs.rs/alloy-sol-types/latest/alloy_sol_types/): ABI handling and type generation -- [`alloy-contract`](https://docs.rs/alloy-contract/latest/alloy_contract/): Contract interaction utilities - -### Utility crates - -Essential utility crates for WAVS components: - -- [`wstd`](https://docs.rs/wstd/latest/wstd/): WASI standard library with `block_on` for async operations -- [`serde`](https://docs.rs/serde/latest/serde/)/[`serde_json`](https://docs.rs/serde_json/latest/serde_json/): Data serialization and JSON handling -- [`anyhow`](https://docs.rs/anyhow/latest/anyhow/): Error handling and propagation diff --git a/docs/handbook/components/component.mdx b/docs/handbook/components/component.mdx deleted file mode 100644 index 9c3eb944..00000000 --- a/docs/handbook/components/component.mdx +++ /dev/null @@ -1,209 +0,0 @@ ---- -title: Component overview -description: Understanding WAVS service components and their structure ---- - - -Service components contain the main business logic of a WAVS service. - -## Component languages - -WAVS enables developers to write components in different programming languages. These languages are compiled to WebAssembly (WASM) bytecode where they can be executed off-chain in the WAVS runtime. - -The examples in this documentation are mainly written in Rust, but there are also examples of components written in the following languages: - -- [Go example](https://github.com/Lay3rLabs/wavs-foundry-template/tree/main/components/golang-evm-price-oracle) -- [Typescript / JS example](https://github.com/Lay3rLabs/wavs-foundry-template/tree/main/components/js-evm-price-oracle) - -## Component structure - -A basic component has three main parts: - -- Decoding incoming [trigger data](../triggers#trigger-lifecycle). -- Processing the data (this is the custom business logic of your component). -- Encoding and returning the result for submission (if applicable). - -### Trigger inputs - -When building WASI components, keep in mind that components can receive the [trigger data](../triggers#trigger-lifecycle) in two ways: - -1. **On-chain events**: When triggered by an EVM event, the data comes through the `TriggerAction` with `TriggerData::EvmContractEvent`. - -2. **Local testing**: When using `make wasi-exec` command in the template to test a component, the data comes through `TriggerData::Raw`. - -Here's how the example component handles both cases in `trigger.rs`: - -```rust -// In trigger.rs - -pub fn decode_trigger_event(trigger_data: TriggerData) -> Result<(u64, Vec, Destination)> { - match trigger_data { - // On-chain Event - // - Receive a log that needs to be decoded using the contract's ABI - // - Decode into our Solidity types generated by the sol! macro from the Solidity interface - TriggerData::EvmContractEvent(TriggerDataEvmContractEvent { log, .. }) => { - // Decode Ethereum event logs using the `decode_event_log_data!` macro - let event: solidity::NewTrigger = decode_event_log_data!(log)?; - let trigger_info = - ::abi_decode(&event._triggerInfo)?; - Ok((trigger_info.triggerId, trigger_info.data.to_vec(), Destination::Ethereum)) - } - // Local Testing (wasi-exec) - // - Receive raw bytes directly - // - No ABI decoding is needed - TriggerData::Raw(data) => Ok((0, data.clone(), Destination::CliOutput)), - _ => Err(anyhow::anyhow!("Unsupported trigger data type")), - } -} - - -pub mod solidity { // Define the Solidity types for the incoming trigger event using the `sol!` macro - use alloy_sol_macro::sol; - pub use ITypes::*; - - // The objects here will be generated automatically into Rust types. - // the interface shown here is used in the example trigger contract in the template. - sol!("../../src/interfaces/ITypes.sol"); - - // The addTrigger function from the trigger contract - sol! { - function addTrigger(string data) external; - } -} -``` - -The component decodes the incoming event trigger data using the `decode_event_log_data!` macro from the [`wavs-wasi-utils` crate](https://docs.rs/wavs-wasi-utils/latest/wavs_wasi_utils/macro.decode_event_log_data.html). - -The `sol!` macro from `alloy-sol-macro` is used to define Solidity types in Rust. This macro reads a Solidity interface file and generates corresponding Rust types and encoding/decoding functions. For more information, visit the [Blockchain interactions page](./blockchain-interactions#sol-macro). - -### Component logic - -Components must implement the `Guest` trait, which is the main interface between your component and the WAVS runtime. - -The `run` function is the entry point for your business logic: it receives and decodes the trigger data, processes it according to your component's logic, and returns the results. - - -```rust -// In lib.rs -impl Guest for Component { - fn run(action: TriggerAction) -> Result, String> { - // 1. Decode the trigger data using the decode_trigger_event function from trigger.rs - let (trigger_id, req, dest) = decode_trigger_event(action.data)?; - // 2. Process the data (your business logic) - let res = block_on(async move { - let resp_data = get_price_feed(id).await?; - serde_json::to_vec(&resp_data) - })?; - - // 3. Encode the output based on destination - let output = match dest { - // For on-chain submissions, the output is abi encoded using the encode_trigger_output function from trigger.rs - Destination::Ethereum => Some(encode_trigger_output(trigger_id, &res)), - // For local testing via wasi-exec, the output is returned as raw bytes - Destination::CliOutput => Some(WasmResponse { - payload: res.into(), - ordering: None - }), - }; - Ok(output) - } -} -``` - -Components can contain any compatible logic, including [blockchain interactions](./blockchain-interactions), [network requests](./network-requests), off-chain computations, and more. To learn about the types of components that WAVS is best suited for, visit the [design considerations](../../design) page. - -#### Logging in a component - -Components can use logging to debug and track the execution of the component. - -**Logging in development**: - -Use `println!()` to write to stdout/stderr. This is visible when running `make wasi-exec` locally in the template. - -```rust lib.rs -println!("Debug message: {:?}", data); -``` - -**Logging in production** - -For production, you can use a `host::log()` function which takes a `LogLevel` and writes its output via the tracing mechanism. Along with the string that the developer provides, it attaches additional context such as the `ServiceID`, `WorkflowID`, and component `Digest`. - -```rust lib.rs -use bindings::host::{self, LogLevel}; - -host::log(LogLevel::Info, "Production logging message"); -``` - -### Component output - -After processing data in the `run` function, the component can encode the output data for submission back to Ethereum. In the template example, this is done using the `encode_trigger_output` function in the trigger.rs file. - -```rust -/// Encodes the output data for submission back to Ethereum -pub fn encode_trigger_output(trigger_id: u64, output: impl AsRef<[u8]>) -> WasmResponse { - WasmResponse { - payload: solidity::DataWithId { - triggerId: trigger_id, - data: output.as_ref().to_vec().into(), - } - .abi_encode(), // ABI encode the struct for blockchain submission - ordering: None, // Optional ordering parameter for transaction sequencing - } -} -``` - -Outputs for components are returned as a `WasmResponse` struct, which is a wrapper around the output data of the component for encoding and submission back to Ethereum. It contains a `payload` field that is the encoded output data and an optional `ordering` field that is used to order the transactions in the workflow. The `WasmResponse` is submitted to WAVS which routes it according to the workflow's submission logic. - -## Component definition - -A component is defined in the [workflow](../workflows) object of the [service.json](../service) file. Below is an example of the different fields that can be defined in the component object. - -```json service.json -// ... other parts of the service manifest -// "workflows": { -// "workflow-example": { -// "trigger": { ... }, the trigger for the workflow - "component": { // the WASI component containing the business logic of the workflow - "source": { // Where the component code comes from - "Registry": { - "registry": { - "digest": "882b992af8f78e0aaceaf9609c7ba2ce80a22c521789c94ae1960c43a98295f5", // SHA-256 hash of the component's bytecode - "domain": "localhost:8090", - "version": "0.1.0", - "package": "example:evmrustoracle" - } - } - }, - "permissions": { // What the component can access - "allowed_http_hosts": "all", // Can make HTTP requests to any host - "file_system": true // Can access the filesystem - }, - "fuel_limit": null, // Computational limits for the component - "time_limit_seconds": 1800, // Can run for up to 30 minutes - "config": { // Configuration variables passed to the component - "variable_1": "0xb5d4D4a87Cb07f33b5FAd6736D8F1EE7D255d9E9", // NFT contract address - "variable_2": "0x34045B4b0cdfADf87B840bCF544161168c8ab85A" // Reward token address - }, - "env_keys": [ // Secret environment variables the component can access from .env - "WAVS_ENV_API_KEY", // secret API key with prefix WAVS_ENV_ - ] - }, -// "submit": { ... } // submission logic for the workflow -// ... the rest of the service manifest -``` - -For more information on component configuration variables and keys, visit the [variables](./variables) page. - -## Registry - -WAVS uses a registry to store the WASM components. A service like [wa.dev](https://wa.dev) is recommended for proper distribution in production. A similar registry environment is emulated locally in docker compose for rapid development without an API key: - -- Build your component -- Compile the component -- Upload the component to the registry -- Set the registry in your service using the wavs-cli command in the `build_service.sh` script: - -`wavs-cli workflow component --id ${WORKFLOW_ID} set-source-registry --domain ${REGISTRY} --package ${PKG_NAMESPACE}:${PKG_NAME} --version ${PKG_VERSION}` diff --git a/docs/handbook/components/network-requests.mdx b/docs/handbook/components/network-requests.mdx deleted file mode 100644 index 555b69db..00000000 --- a/docs/handbook/components/network-requests.mdx +++ /dev/null @@ -1,132 +0,0 @@ ---- -title: Network requests -description: Making HTTP requests from WAVS components ---- - - -Components can make network requests to external APIs using the [`wavs-wasi-utils` crate](https://docs.rs/wavs-wasi-utils/latest/wavs_wasi_utils/index.html). - -To learn how to use variables like API keys in a component, visit the [Variables page](./variables). - -### Dependencies - -The following dependencies are required for making HTTP requests from a component: - -```toml Cargo.toml -[dependencies] -wavs-wasi-utils = "0.4.0" # HTTP utilities -wstd = "0.5.3" # Runtime utilities (includes block_on) -serde = { version = "1.0.219", features = ["derive"] } # Serialization -serde_json = "1.0.140" # JSON handling -``` - -Since WASI components run in a synchronous environment but network requests are asynchronous, you can use `block_on` from the `wstd` crate to bridge this gap. The `block_on` function allows you to run async code within a synchronous context, which is essential for making HTTP requests in WAVS components. - -### Making HTTP requests - -The `wavs-wasi-utils` crate provides several functions for making HTTP requests. See the [HTTP module documentation](https://docs.rs/wavs-wasi-utils/latest/wavs_wasi_utils/http/index.html) for more details. - -```rust -// Request functions -http_request_get(url) // Creates a GET request -http_request_post_json(url, data) // Creates a POST request with JSON data -http_request_post_form(url, data) // Creates a POST request with form data - -// Response functions -fetch_json(request) // Fetches and parses JSON response -fetch_string(request) // Fetches response as string -fetch_bytes(request) // Fetches raw response bytes -``` - -### Example: GET request with headers - -Here's an example showing how to make a GET request with custom headers: - -```rust lib.rs -use wstd::runtime::block_on; -use wstd::http::HeaderValue; -use wavs_wasi_utils::http::{fetch_json, http_request_get}; -use serde::{Deserialize, Serialize}; - -// Define response type with serde derive for automatic JSON parsing -#[derive(Debug, Serialize, Deserialize)] -struct ApiResponse { - // ... your response fields -} - -async fn make_request() -> Result { - let url = "https://api.example.com/endpoint"; - let mut req = http_request_get(&url).map_err(|e| e.to_string())?; - // Set headers for API requests - req.headers_mut().insert( - "Accept", - HeaderValue::from_static("application/json") - ); - req.headers_mut().insert( - "Content-Type", - HeaderValue::from_static("application/json") - ); - req.headers_mut().insert( - "User-Agent", - HeaderValue::from_static("Mozilla/5.0") - ); - // Use fetch_json to automatically parse the response - let json: ApiResponse = fetch_json(req) - .await - .map_err(|e| e.to_string())?; - Ok(json) -} - -// Use block_on to handle async code in sync context -fn process_data() -> Result { - block_on(async move { - make_request().await - })? -} -``` - -### Example: POST request with JSON data - -For making POST requests with JSON data, you can use the `http_request_post_json` helper function: - -```rust lib.rs -use wstd::runtime::block_on; -use wavs_wasi_utils::http::{fetch_json, http_request_post_json}; -use serde::{Deserialize, Serialize}; - -// Define request and response types with serde derive -#[derive(Debug, Serialize, Deserialize)] -struct PostData { - key1: String, - key2: i32, -} - -#[derive(Debug, Serialize, Deserialize)] -struct PostResponse { - // ... response fields -} - -async fn make_post_request() -> Result { - let url = "https://api.example.com/endpoint"; - let post_data = PostData { - key1: "value1".to_string(), - key2: 42, - }; - // http_request_post_json automatically sets JSON headers - let response: PostResponse = fetch_json( - http_request_post_json(&url, &post_data)? - ).await.map_err(|e| e.to_string())?; - Ok(response) -} - -fn process_data() -> Result { - block_on(async move { - make_post_request().await - })? -} -``` - -For more details, visit the [wavs-wasi-utils documentation](https://docs.rs/wavs-wasi-utils/latest/wavs_wasi_utils/). diff --git a/docs/handbook/components/variables.mdx b/docs/handbook/components/variables.mdx deleted file mode 100644 index c7a46bc1..00000000 --- a/docs/handbook/components/variables.mdx +++ /dev/null @@ -1,79 +0,0 @@ ---- -title: Variables -description: Managing configuration variables in WAVS components ---- - - -Components can be configured with two types of variables: - -## Public variables - -These variables can be used for non-sensitive information that can be viewed publicly. These variables are set in the `config` field of a service manifest. All config values are stored as strings, even for numbers. - -To add public variables: - -1. Add the public variables to the `config` field in the service manifest: - -```json -"component": { - "config": { - "api_endpoint": "https://api.example.com", // Access using host::config_var() - "max_retries": "3" // Config values are always strings - } -} -``` - -2. Access them in the component using `host::config_var()`: - -```rust -let value = host::config_var("api_endpoint"); -``` - -## Environment keys - -Environment keys are private and can be used for sensitive data like API keys. These variables are set by operators in their environment and are not viewable by anyone. These variables must be prefixed with `WAVS_ENV_`. Each operator must set these variables in their environment before deploying the service. WAVS validates that all environment variables are set before allowing the service to run. - -To add private variables: - -1. Create a new `.env` file in WAVS template: - -```bash -# copy the example file -cp .env.example .env -``` - -Variables can also be set in your `~/.bashrc`, `~/.zshrc`, or `~/.profile` files. - -2. Set the environment variable in your `.env` file: - -```bash -# .env file -WAVS_ENV_MY_API_KEY=your_secret_key_here -``` - -3. Access the environment key from a component: - -```rust -let api_key = std::env::var("WAVS_ENV_MY_API_KEY")?; -``` - -4. Before deploying a service, add the environment key to the `env_keys` array in the service manifest: - -```json -"component": { - "env_keys": [ - "WAVS_ENV_API_KEY" // Environment variables the component can access. Must be prefixed with WAVS_ENV_ - ] -} -``` - -## Local Execution - -When running components locally (raw), use the `--config` flag to set values in a KEY=VALUE format, comma-separated: `--config a=1,b=2`. - -```bash -wavs-cli exec --component --input --config api_endpoint=https://api.example.com -``` diff --git a/docs/handbook/overview.mdx b/docs/handbook/overview.mdx deleted file mode 100644 index 11226be9..00000000 --- a/docs/handbook/overview.mdx +++ /dev/null @@ -1,46 +0,0 @@ ---- -title: Handbook overview -description: Guide to WAVS handbook structure and contents ---- - - - -{/* todo: verify all links are correct. */} - -Before reading this guide, follow the [Oracle component tutorial](/tutorial/1-overview) to learn the basics of building a WAVS service. - - - -This handbook provides an overview of the different parts that make up a WAVS AVS. - -## Tutorial - -- [Oracle component tutorial](/tutorial/1-overview) - Start here to learn the basics of building a WAVS service. - -## Core Concepts - -- [How it works](../how-it-works) - Learn about the different parts that make up a WAVS AVS. -- [Design](../design) - Learn about the design considerations for building a WAVS AVS. - -## Services - -- [Service](./service) - Learn about WAVS services, their structure, and how they are defined in the service manifest. -- [Workflows](./workflows) - Understand how workflows define execution paths in your service, including triggers, components, and submissions. -- [Triggers](./triggers) - Explore the different types of triggers that can initiate your service, including EVM events, Cosmos events, cron jobs, and block intervals. -- [Submission and Aggregator](./submission) - Discover how results are submitted to the blockchain through the aggregator service and submission contracts. - -## Components -- [Component overview](./components/component) - Learn about the structure and lifecycle of WAVS components, including how to handle triggers and process data. -- [Variables](./components/variables) - Understand how to configure components with public and private variables. -- [Blockchain interactions](./components/blockchain-interactions) - Discover how to interact with blockchains and smart contracts from your components. -- [Network requests](./components/network-requests) - Learn how to make HTTP requests to external APIs from your components. - -## Development -- [Template](./template) - Get started with the WAVS template, including its structure, configuration files, and how to customize it for your service. -- [Makefile commands](./commands) - Reference for the available makefile commands to build, deploy, and manage your service. -- [AI-powered component creation](./ai) - Learn how to use AI coding agents to create components. - -Each section provides detailed information and examples to help you understand and build your WAVS service. Start with the Service section to understand the basic concepts, then explore the other sections based on your needs. diff --git a/docs/handbook/service.mdx b/docs/handbook/service.mdx deleted file mode 100644 index 5aa88864..00000000 --- a/docs/handbook/service.mdx +++ /dev/null @@ -1,147 +0,0 @@ ---- -title: Service manifest and manager -description: Overview of the service.json manifest file and service manager contract ---- - - -A service is a collection of smart contracts, operators, and offchain components that make up a WAVS AVS. The different parts of a service are defined in a service manifest or `service.json` file. This file can be stored on IPFS or an HTTP/HTTPS server, and its URL is set on the service manager contract during deployment, allowing the system to fetch the service definition when needed. - -The service manifest defines the configuration and different parts of a WAVS service, including information about the service, [workflows](/handbook/workflows), [components](/handbook/components/component), [submission](/handbook/submission), [service manager contract](#service-manager), and more. - -## Generate Manifest - -You can create the service.json file using the `wavs-cli service` command. The template provides a script to generate a single-component service with ease, [build_service.sh](https://github.com/Lay3rLabs/wavs-foundry-template/blob/main/script/build-service.sh). - -## Example Manifest - -```json service.json -{ - // Basic service information - "id": "example-service-123", - "name": "Example WAVS Service", - // Workflows define the different execution paths in your service - "workflows": { - // Each workflow has a unique ID - "default": { - // Trigger defines what initiates the workflow - "trigger": { - // This example uses an EVM contract event trigger - "evm_contract_event": { - "chain_name": "ethereum", - "address": "0x1234567890123456789012345678901234567890", - "event_hash": "0xabcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890" - } - }, - - // Component defines the WASI component to execute - "component": { - "source": { - "Registry": { - "registry": { - // (SHA-256) hash that identifies a specific version of a WASI component's code in WAVS - "digest": "882b992af8f78e0aaceaf9609c7ba2ce80a22c521789c94ae1960c43a98295f5", - // The domain the warg WASI component is hosted on - "domain": "localhost:8090", - // The version of this component - "version": "0.1.0", - // The package name of the component on the domain registry - "package": "example:evmrustoracle" - } - } - }, - - // Permissions define what the component can access - "permissions": { - // HTTP host permissions - "allowed_http_hosts": "all" // Options: "all", ["host1", "host2"], or "none" - // File system permissions - "file_system": true - }, - - // Resource limits - "fuel_limit": 1000000, // Optional: Maximum compute units - "time_limit_seconds": 30, // Optional: Maximum execution time - - // Component-specific configuration - "config": { - "endpoint": "https://api.example.com", - "timeout": "30s" - }, - - // Environment variables to be passed to the component - "env_keys": [ - "WAVS_ENV_API_KEY", - "WAVS_ENV_SECRET" - ] - }, - - // Submit defines where the results are sent - "submit": { - // The aggregator configuration - "aggregator": { - "url": "http://127.0.0.1:8001" - } - }, - - "aggregators": [ - { - "evm": { - // The identifier for the chain the submission contract (set in wavs.toml) - "chain_name": "ethereum", - // The address of the submission contract with the service handler interface - "address": "0xfedcba9876543210fedcba9876543210fedcba98", - // The maximum amount of gas for submission - "max_gas": 1000000 // Optional - } - } - ] - } //other workflows can be added here - }, - - // Service status - "status": "active", // Options: "active" or "inactive" - - // Service manager configuration - "manager": { - "evm": { - "chain_name": "ethereum", - // The address of the service manager contract - "address": "0xabcdef1234567890abcdef1234567890abcdef12" - } - } -} -``` - -## Upload - -This file should be uploaded to IPFS, or some other hosted service that all operators can access. The template launches a local IPFS for testing. Use a service like [Pinata](https://app.pinata.cloud/developers/api-keys) for production services. - -```bash -# Upload to local or remote IPFS (smart routes based on .env deploy configuration) -SERVICE_FILE=${SERVICE_FILE} make upload-to-ipfs - -# smart grabs the IPFS gateway and fetches the content that was uploaded -export IPFS_GATEWAY=$(sh script/get-ipfs-gateway.sh) -curl "${IPFS_GATEWAY}${ipfs_cid}" - -# Then the admin of the contracts can set it -cast send ${WAVS_SERVICE_MANAGER_ADDRESS} 'setServiceURI(string)' "${SERVICE_URI}" -r ${RPC_URL} --private-key ${DEPLOYER_PK} -``` - -For more information on the different parts of a service manifest, see the following sections: - -- [Workflows](./workflows) -- [Triggers](./triggers) -- [Components](./components/component) -- [Submission and aggregator](./submission) - -## Service manager - -The service manager contract defines the set of registered operators for a service. Only operators registered in this contract are considered valid signers for result submissions in a service. Each registered operator is assigned a weight. These weights count toward a threshold for their submission power. - -The service manager also maintains a service URI that points to the service manifest, connecting the operators to the service. - -Signatures are created by operators using their private keys to sign an envelope containing the data, and these signatures are collected by the aggregator which then submits them to the service manager contract for validation. The service manager contract validates that the signatures are from registered operators, checks that their total weight meets the threshold, and ensures the operators are properly sorted before allowing the data to be processed by the [service handler](/handbook/submission) contract. diff --git a/docs/handbook/submission.mdx b/docs/handbook/submission.mdx deleted file mode 100644 index a77fd17b..00000000 --- a/docs/handbook/submission.mdx +++ /dev/null @@ -1,106 +0,0 @@ ---- -title: Submission and aggregator -description: Creating and configuring submission contracts and the aggregator ---- - - -This page describes the submission and aggregator services used to submit results from a workflow to a submission contract on an EVM chain. - -## Submit definition - -The `submit` field in a service.json file specifies the submission logic for a service. The `aggregator` type sends results to an aggregator service, which validates the results and submits them to a target contract on an EVM chain. - -```json service.json - "submit": { // Where results are sent - "aggregator": { // Type of submission (aggregator) - "url": "http://127.0.0.1:8001" // Local aggregator endpoint - } - }, - "aggregators": [ // The final submission address that the aggregator will submit to - { - "evm": { // EVM chain configuration - "chain_name": "local", // Local Ethereum chain - "address": "0xd6f8ff0036d8b2088107902102f9415330868109", // Submission contract address - "max_gas": 5000000 // Maximum gas limit for transactions - } - } - ] -``` -Submit can also be set to `none` if the service does not need to submit results to a contract. The component will still run, but the results will not be submitted. - -## Submission contract - -A service handler or submission contract handles the logic for verifying the submission of a component's output to the blockchain. The only requirement for a submission contract is that it must implement the `handleSignedEnvelope()` function using the `IWavsServiceHandler` interface to validate data and signatures using the service manager. This interface is defined in the `@wavs` package: https://www.npmjs.com/package/@wavs/solidity?activeTab=code - - - -Workflows can be chained together by setting the trigger event of one workflow to the submission event of another workflow. For more information on chaining workflows, see the [Workflows page](./workflows). - - - -## Template submission example - -The [template submission contract](https://github.com/Lay3rLabs/wavs-foundry-template/blob/main/src/contracts/WavsSubmit.sol) uses the `handleSignedEnvelope()` function to validate operator signatures and store the processed data from the component. The `DataWithId` struct must match the output format from the component. In the template, each trigger has a unique ID that links the data to its source. - -Below is a simplified version of the template submission contract: - -```solidity WavsSubmit.sol -import {IWavsServiceManager} from "@wavs/interfaces/IWavsServiceManager.sol"; -import {IWavsServiceHandler} from "@wavs/interfaces/IWavsServiceHandler.sol"; -import {ITypes} from "interfaces/ITypes.sol"; - -// Contract must implement IWavsServiceHandler to receive data -// ITypes provides the DataWithId struct and other type definitions -contract SimpleSubmit is ITypes, IWavsServiceHandler { - - /// @notice Service manager instance - used to validate incoming data and signatures - IWavsServiceManager private _serviceManager; - - /** - * @notice Initialize the contract with a service manager - * @param serviceManager The service manager instance that will validate data - */ - constructor(IWavsServiceManager serviceManager) { - _serviceManager = serviceManager; - } - - /// @inheritdoc IWavsServiceHandler - /// @notice Main entry point for receiving and processing data - /// @param envelope Contains the event ID and the actual data payload - /// @param signatureData Contains operator signatures for validation - function handleSignedEnvelope(Envelope calldata envelope, SignatureData calldata signatureData) external { - // First validate the data and signatures through the service manager - // This ensures the data is properly signed by authorized operators - _serviceManager.validate(envelope, signatureData); - - // Decode the payload into your expected data structure - // The payload format must match what your component outputs - DataWithId memory dataWithId = abi.decode(envelope.payload, (DataWithId)); - // At this point, you can safely process the validated data - // Add your custom logic here to handle the data - } -} -``` - -## Aggregator - -The aggregator is used to collect and validate responses from multiple operators before submitting them to the blockchain. It acts as an intermediary that receives signed responses from operators, validates each operator's signature, aggregates signatures when enough operators have responded, and submits the aggregated data to the submission contract. The aggregator supports exact match aggregation, meaning that consensus is reached if a threshold amount of submitted responses from operators are identical. Visit the [design considerations page](/design) for more information on aggregation and service design. - -WAVS currently uses ECDSA signatures for aggregation, but will also support BLS signatures in the future. - -## Aggregator submission flow - -1. An operator runs a component which returns a `WasmResponse` containing: - - `payload`: The result data - - `ordering`: Optional ordering information -2. The operator creates an Envelope containing the result data and signs it with their private key, creating an signature. -3. A Packet containing the envelope, signature, and route information (service ID and workflow ID) is created and sent to the aggregator's `/packet` endpoint. -4. The aggregator validates the packet's signature by recovering the operator's address and adds it to a queue of packets with the same trigger event and service ID. -5. When enough packets accumulate to meet the threshold (determined by the service manager contract), the aggregator: - - Combines the signatures from all packets into a single SignatureData structure - - Validates the combined signatures on-chain through the service manager contract -6. If validation succeeds, the aggregator sends the operator signatures and payload result data as a single transaction to the `handleSignedEnvelope()` function on the submit contract specified in the service's manifest. -7. The `handleSignedEnvelope()` function validates the data and signatures via the service manager contract. diff --git a/docs/handbook/template.mdx b/docs/handbook/template.mdx deleted file mode 100644 index 8d9dfb34..00000000 --- a/docs/handbook/template.mdx +++ /dev/null @@ -1,113 +0,0 @@ ---- -title: Template overview -description: Overview of the WAVS foundry template ---- - - - - -Before reading this guide, follow the [Oracle component tutorial](/tutorial/1-overview) to learn the basics of building a WAVS service. - - - -Use the info in this guide to customize the template to create your own custom service. Check out the [WAVS design considerations](/design) page to learn which use-cases WAVS is best suited for. - -## Foundry Template structure - -[The WAVS template](https://github.com/Lay3rLabs/wavs-foundry-template) is made up of the following main files: - -```bash -wavs-foundry-template/ -├── README.md -├── makefile # Commands, variables, and configs -├── components/ # WASI components -│ └── evm-price-oracle/ -│ ├── src/ -│ │ ├── lib.rs # Main Component logic -│ │ ├── trigger.rs # Trigger handling -│ │ └── bindings.rs # Bindings generated by `make build` -│ └── Cargo.toml # Component dependencies -├── compiled/ # WASM files compiled by `make build` -├── src/ -│ ├── contracts/ # Trigger and submission contracts -│ └── interfaces/ # Solidity interfaces -├── script/ # Deployment & interaction scripts -├── wavs.toml # WAVS service configuration -├── docs/ # Documentation -├── .cursor/rules/ # Cursor AI rulefiles -├── claude.md # Claude AI rulefile -└── .env # Private environment variables -``` - -- The `README` file contains the tutorial commands. -- The `makefile` contains commands for building and deploying the service. It also contains configurable variables for the service and deployment. -- The `components` directory contains the component logic for your service. Running `make wasi-build` will automatically generate bindings and compile components into the `compiled` directory. -- The `src` directory contains the Solidity contracts and interfaces for trigger and submission contracts. -- The `script` directory contains the scripts used in the makefile commands to deploy, trigger, and test the service. -- The `.env` file contains private environment variables and keys. Use `cp .env.example .env` to copy the example `.env` file. -- The `.cursor/rules` directory and `claude.md` file contain rulefiles for [building components with Cursor AI and Claude AI agents](/handbook/ai). - -## Toml files - -There are several toml files in the template that are used to configure the service: - -- `wavs.toml` is used to configure the WAVS service itself, including chains (local, testnets, mainnet) and configurations. -- `Cargo.toml` in the root directory is used to configure the workspace and includes dependencies, build settings, and component metadata. -- `components/*/Cargo.toml` in each component directory is used to configure the Rust component and includes dependencies, build settings, and component metadata. It can inherit dependencies from the root `Cargo.toml` file using `workspace = true`. - -These files can be customized to suit your specific needs, and many settings can be overridden using environment variables. - -The following is an example of a component's `Cargo.toml` file structure: - -```toml -[package] -name = "evm-price-oracle" -edition.workspace = true -version.workspace = true -authors.workspace = true -rust-version.workspace = true -repository.workspace = true - -[dependencies] -wit-bindgen-rt ={ workspace = true } -wavs-wasi-utils = { workspace = true } -serde = { workspace = true } -serde_json = { workspace = true } -alloy-sol-macro = { workspace = true } -wstd = { workspace = true } -alloy-sol-types = { workspace = true } -anyhow = { workspace = true } - -[lib] -crate-type = ["cdylib"] - -[profile.release] -codegen-units = 1 -opt-level = "s" -debug = false -strip = true -lto = true - -[package.metadata.component] -package = "component:evm-price-oracle" -target = "wavs:worker/layer-trigger-world@0.4.0" - -``` - -## wavs.toml config - -The [`wavs.toml`](https://github.com/Lay3rLabs/wavs-foundry-template/blob/main/wavs.toml) file contains configuration settings for all WAVS components: -- Default general settings (shared across all processes) -- WAVS server-specific settings -- CLI-specific settings -- Aggregator-specific settings - -### Environment Variable Overrides - -Environment variables can override configuration values using these patterns: -- WAVS server settings: `WAVS_` -- CLI settings: `WAVS_CLI_` -- Aggregator settings: `WAVS_AGGREGATOR_` diff --git a/docs/handbook/triggers.mdx b/docs/handbook/triggers.mdx deleted file mode 100644 index 5e2a41cd..00000000 --- a/docs/handbook/triggers.mdx +++ /dev/null @@ -1,204 +0,0 @@ ---- -title: Triggers -description: Setting up and managing WAVS service triggers ---- - - - -{/* todo: verify all code examples. link to appropriate pages. */} - -A trigger prompts a WAVS service to run. Operators listen for the trigger event specified by the service and execute the corresponding component off-chain. Triggers can be any onchain event emitted from any contract. - -### Trigger lifecycle - -1. A service is deployed with a service.json manifest which contains information on the service and [workflow](/handbook/workflows) logic (components, triggers, [submission](/handbook/submission) logic). - -2. Operators maintain lookup maps to track and verify triggers. For EVM and Cosmos events, they map chain names, contract addresses, and event identifiers to trigger IDs. Block interval triggers are tracked by chain name with countdown timers, while cron triggers are managed in a priority queue ordered by execution time. - -3. When a trigger is detected, operators verify it against their lookup maps according to trigger type. If a match is found, a TriggerAction is created with the trigger configuration and event data. - -4. `TriggerAction` has 2 fields: `TriggerConfig` which contains the service, workflow, and trigger configuration, and `TriggerData`contains the trigger data based on the trigger type. - -```rust -pub struct TriggerAction { - pub config: TriggerConfig, // Contains service_id, workflow_id, and trigger type - pub data: TriggerData, // Contains the actual trigger data -} - -pub struct TriggerConfig { - pub service_id: ServiceID, - pub workflow_id: WorkflowID, - pub trigger: Trigger, -} - -pub enum TriggerData { - CosmosContractEvent { //For Cosmos event triggers - contract_address: layer_climb_address::Address, /// The address of the contract that emitted the event - chain_name: ChainName, /// The name of the chain where the event was emitted - event: cosmwasm_std::Event, /// The data that was emitted by the contract - block_height: u64, /// The block height where the event was emitted - }, - EvmContractEvent { //For EVM event triggers - contract_address: alloy_primitives::Address, /// The address of the contract that emitted the event - chain_name: ChainName, /// The name of the chain where the event was emitted - log: LogData, /// The raw event log - block_height: u64, /// The block height where the event was emitted - }, - BlockInterval { //For block interval triggers - chain_name: ChainName, /// The name of the chain where the blocks are checked - block_height: u64, /// The block height where the event was emitted - }, - Cron { //For cron triggers - trigger_time: Timestamp, /// The trigger time - } -} -``` - -5. The TriggerAction is converted to a WASI-compatible format and passed to the component where it is decoded and processed. The component decodes the incoming event trigger data using the `decode_event_log_data!` macro from the [`wavs-wasi-utils` crate](https://docs.rs/wavs-wasi-utils/latest/wavs_wasi_utils/macro.decode_event_log_data.html). Visit the [components page](./components/component) for more information on decoding and processing trigger data in your component. - - -## Trigger configuration - -Triggers define when and how the component should be executed. Each workflow needs a trigger to be set. They are set in the `trigger` field of the [`service.json` file](/handbook/service). - -### EVM event trigger - -This trigger listens for specific events emitted by contracts on EVM-compatible chains, executing the component when a matching event is detected. Event triggers pass raw log data to the component. - -```json - -"trigger": { - "evm_contract_event": { - "address": "0x00000000219ab540356cbb839cbe05303d7705fa", // Contract address to monitor - "chain_name": "ethereum", // Chain to monitor - "event_hash": "0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef" // Event hash (32 bytes) - } -} - -``` - -Your evm chain information must be set in `wavs.toml` under the `[default.chains.evm.]` section: - -```toml wavs.toml -# Mainnet -[default.chains.evm.ethereum] -chain_id = "1" -ws_endpoint = "wss://eth.drpc.org" -http_endpoint = "https://eth.drpc.org" -``` - -You'll need to set your EVM chain credential in your `.env` file to establish a connection for monitoring the EVM chain: - -```env .env -WAVS_CLI_EVM_CREDENTIAL="0x5ze146f435835b1762ed602088740d201b68fd94bf808f97fd04588f1a63c9ab" -``` - -### Cosmos event trigger - -This trigger monitors events emitted by Cosmos smart contracts, executing your component when a matching event type is detected from the specified contract address. Cosmos event triggers pass the contract data that was emitted by the contract to the component. - -```json - "trigger": { - "cosmos_contract_event": { - "address": { - "Cosmos": { - "bech32_addr": "neutron1qlaq54uh9f52d3p66q77s6kh9k9ee3vasy8gkdkk3yvgezcs6zts0mkcv4", // Contract address to monitor - "prefix_len": 7 // Length of the Bech32 prefix (7 for Neutron) - } - }, - "chain_name": "neutron", // Chain to monitor - "event_type": "send_nft" // Event type to watch - } - }, -``` - -Your chain information must be set in `wavs.toml` under the `[default.chains.cosmos.]` section: - -```toml wavs.toml -# == Cosmos chains == - -[default.chains.cosmos.neutron] -chain_id = "pion-1" -bech32_prefix = "neutron" -rpc_endpoint = "https://rpc-falcron.pion-1.ntrn.tech" -grpc_endpoint = "http://grpc-falcron.pion-1.ntrn.tech:80" -gas_price = 0.0053 -gas_denom = "untrn" -``` - -Your Cosmos mnemonic must be set in your `.env` file to establish a connection for monitoring the Cosmos chain: - -```env .env -WAVS_CLI_COSMOS_MNEMONIC="large slab plate twenty laundry illegal vacuum phone drum example topic reason" -``` - -### Cron trigger - -Executes your component on a schedule defined by a cron expression, with optional start and end times to control the execution window. If no start or end time is provided, the component will start immediately and run indefinitely. Cron triggers pass the trigger timestamp to the component. - -```json - - "trigger": { - "cron": { - "schedule": "0 */5 * * * *", // Every 5 minutes (at 0 seconds) - "start_time": 1704067200000000000, // Optional start time in nanoseconds since Unix epoch (2024-01-01T00:00:00Z) (default: null) - "end_time": 1735689599000000000 // Optional end time in nanoseconds since Unix epoch (default: null) - } - } - -// Cron Expression Format: -// * * * * * * -// │ │ │ │ │ │ -// │ │ │ │ │ └── Day of week (0-6, where 0 is Sunday) -// │ │ │ │ └──── Month (1-12) -// │ │ │ └────── Day of month (1-31) -// │ │ └──────── Hour (0-23) -// │ └────────── Minute (0-59) -// └──────────── Second (0-59) -// -// Each field can be: -// - A number: `5` (exact time) -// - A range: `1-5` (1 through 5) -// - A list: `1,3,5` (1, 3, and 5) -// - A step: `*/5` (every 5 units) -// - An asterisk: `*` (every unit) -// -// Common examples: -// - `0 */5 * * * *` - Every 5 minutes (at 0 seconds) -// - `0 0 */6 * * *` - Every 6 hours (at 0 minutes and 0 seconds) -// - `0 0 0 * * *` - Every day at midnight (00:00:00) -// - `0 0 12 * * *` - Every day at noon (12:00:00) -// - `0 0 12 1 * *` - Noon on the first day of every month -// - `0 0 12 * * 1` - Noon every Monday -``` - -[Crontab.guru](https://crontab.guru/) is a helpful tool for making cron expressions. - - - -There may be slight variations in Cron trigger execution time between operators due to network latency and clock drift. Cron triggers are best suited for tasks that don't require precise synchronization between operators: -- Triggering components that don't need exact synchronization. -- Collecting data from external services with monotonic pagination. -- Background tasks where eventual consistency is acceptable. - -If you need precise timing synchronization between operators, consider using [block-based triggers](#block-trigger) instead. - - - -### Block trigger - -Executes your component at regular block intervals on a specified EVM or Cosmos chain, useful for chain-specific operations that need to run periodically. Block interval triggers pass the block height and chain name to the component. - -```json -"trigger": { - "block_interval": { - "chain_name": "ethereum-mainnet", - "n_blocks": 10, - "start_block": null, // Optional blockheight to start - "end_block": null // Optional blockheight to end - } -} -``` diff --git a/docs/handbook/workflows.mdx b/docs/handbook/workflows.mdx deleted file mode 100644 index 2160eec9..00000000 --- a/docs/handbook/workflows.mdx +++ /dev/null @@ -1,140 +0,0 @@ ---- -title: Workflows -description: Building and managing WAVS service workflows ---- - - -A WAVS service is a collection of one or more workflows that define the different execution paths in your service. - -Each workflow consists of three parts: - -- [**Trigger**](./triggers): Defines what event initiates the workflow -- [**Component**](./components/component): The WASM component that processes the event -- [**Submit**](./submission): Specifies where to send the results - -## Workflow Structure - -Workflows are defined in the service manifest JSON file, which contains the necessary information on which trigger, component, and submission logic are needed. - -The following example shows a workflow with a cron trigger and a submission to an aggregator: - -```json service.json -// ... other parts of the service manifest -"workflows": { //workflows are added here -"0196c34d-003d-7412-a3f3-70f8ec664e12": { // a unique workflow ID (default is a generated UUID v7) - "trigger": { // Defines what starts the workflow - "cron": { // Type of trigger (cron job) - "schedule": "0 * * * * *", // Runs every minute at 0 seconds - "start_time": null, - "end_time": null - } - }, - "component": { // the WASI component containing the business logic of the workflow - "source": { // Where the component code comes from - "Digest": "65747b4b1a7fa98cab6abd9a81a6102068de77b1040b94de904112272b226f51" // SHA-256 hash of the component's bytecode - }, - "permissions": { // What the component can access - "allowed_http_hosts": "all", // Can make HTTP requests to any host - "file_system": true // Can access the filesystem - }, - "fuel_limit": null, // Computational limits for the component - "time_limit_seconds": 1800, // Can run for up to 30 minutes - "config": { // Configuration passed to the component - "nft": "0xb5d4D4a87Cb07f33b5FAd6736D8F1EE7D255d9E9", // NFT contract address - "reward_token": "0x34045B4b0cdfADf87B840bCF544161168c8ab85A" // Reward token address - }, - "env_keys": [ // Secret environment variables the component can access from .env - "WAVS_ENV_API_KEY", // secret API key with prefix WAVS_ENV_ - ] - }, - "submit": { // Where results are sent - "aggregator": { // Type of submission (aggregator) - "url": "http://127.0.0.1:8001" // Local aggregator endpoint - } - }, - "aggregators": [ // The final submission address that the aggregator will submit to - { - "evm": { // EVM chain configuration - "chain_name": "local", // Local Ethereum chain - "address": "0xd6f8ff0036d8b2088107902102f9415330868109", // Contract address - "max_gas": 5000000 // Maximum gas limit for transactions - } - } - ] -} -// other workflows can be added here... -}, - -// ... the rest of the service manifest -``` - -## Multi-workflow services - -A WAVS service can have one or multiple workflows. You can specify multiple workflows as objects in the service manifest. Each workflow can have a different trigger, component, and submission logic. All workflows in a service will share the same service manager and operator set. - -{/* todo: link above to service manager page. */} - -```json -{ - "workflows": { - "workflow-uuid-1": { - "trigger": { ... }, - "component": { ... }, - "submit": { ... } - }, - "workflow-uuid-2": { - "trigger": { ... }, - "component": { ... }, - "submit": { ... } - } - // ... more workflows can be added here - } -} -``` - -## Workflow isolation - -Each workflow execution is completely isolated with components running in separate WebAssembly environments. Each execution has its own memory space and components cannot directly access each other's memory or state. - -## Sharing state - -WAVS services are designed to process data rather than store data. Data should be stored externally. To share data between workflows or components, the first workflow should submit data to an external system such as an onchain smart contract and the second workflow should read the data from the same system. - -``` -A: Trigger -> component -> onchain submission storage -B: Trigger -> component (reads from A's onchain submission storage) -> onchain submission storage -``` - -1. Workflow A submits data to a contract or external system -2. Workflow B reads data from the same contract or system - -Visit the [WAVS design considerations page](../design) for more information on best practices for WAVS services and storing data. - -## Chaining workflows - -You can chain workflows together to create more complex execution flows. To have one workflow trigger another, set the event trigger of the second workflow to the onchain submission event of the first workflow. - -```json -{ - "workflows": { - "workflow-uuid-1": { - "trigger": { ... }, // trigger for first workflow - "component": { ... }, // component for first workflow - "submit": { ... } // submission logic for first workflow - }, - "workflow-uuid-2": { - "trigger": { ... }, // trigger for second workflow is the onchain submission event of the first workflow - "component": { ... }, // component for second workflow - "submit": { ... } // submission logic for second workflow - } - } -} -``` -You can also chain different services together with this method by setting the trigger of the second service to the onchain submission event of the first service. - -## Multichain services - -WAVS enables multichain services by allowing contract event or block height triggers on Cosmos or EVM chains (with more coming soon). This architecture lets you create cross-chain services that monitor events or block heights on one chain and submit the results to Ethereum. Visit the [Trigger page](./triggers) for more info. diff --git a/docs/how-it-works.mdx b/docs/how-it-works.mdx deleted file mode 100644 index de91479e..00000000 --- a/docs/how-it-works.mdx +++ /dev/null @@ -1,105 +0,0 @@ ---- -title: How it works -description: Technical overview of WAVS architecture and components ---- - - -WAVS - -WAVS is a decentralized execution framework for AVSs (Autonomous Verifiable Services), enabling the results of off-chain computation to be brought verifiably on-chain. It provides a runtime for executing WASI-based service components, allowing developers to define event-driven off-chain workflows while inheriting Ethereum's security via EigenLayer restaking. - -## The flow - -Before diving into each part of a WAVS service individually, it's important to understand the basic execution flow of WAVS. - -WAVS overview - -1. A service is a collection of on-chain contracts and off-chain logic run by a set of registered operators, bringing the results of off-chain computation on-chain verifiably. A service's operator set is defined in a service manager contract. A service manifest (service.json) defines the configuration and different parts of a WAVS service, including information about the service, workflows, components, submission, watch trigger, aggregator, and more. The service contracts and manifest are deployed and operators register to run the service. - -2. Services contain one or more workflows, which define the different execution paths in your service. Each workflow contains a trigger, a service component, and submission logic. Triggers are defined events that activate a workflow. Registered operators running WAVS nodes maintain lookup maps to track and detect triggers. Triggers can be: - - Any on-chain EVM or Cosmos event from a specified contract address and event signature - - Cron jobs that activate at specified intervals - - Blockheight triggers that activate at a specified block height on EVM or Cosmos chains - -3. When a trigger is detected, operators run the workflow's corresponding service component off-chain in the WAVS runtime. - -4. Each operator signs the result of their off-chain computation and submits it to the aggregator. - -5. The aggregator accepts results from the operators and verifies that the result payloads match and that the signatures are valid. Once enough verified submissions are received (the threshold is defined in the service manager), the aggregator submits the bundled signatures and result payloads as a single transaction to the submission contract specified in the service's manifest. - -6. The `handleSignedEnvelope()` function on the submission contract validates the data and signatures via the service manager contract. The workflow is complete, bringing the result of the off-chain computation verifiably on-chain. Services and workflows can be chained together by setting the submission event of one workflow to be the trigger of another. - -## WAVS parts - -### Service manager - -The service manager is a contract that defines a service's operator set. It is used to register operators and define their weights, set the threshold for aggregator submissions, and maintain the URI of the service manifest. For more information, see the [Service page](./handbook/service#service-manager). - -### Service manifest - -The service manifest is a JSON file that defines the configuration and different parts of a WAVS service, including information about the service, workflows, components, submission, service manager contract, aggregator configuration, and more. For more information, see the [Service page](./handbook/service). - -### Workflows - -Workflows are the building blocks of a WAVS service flow. They define the different execution paths of a service, including the trigger, component, and submission logic. For more information, see the [Workflows page](./handbook/workflows). - -### Triggers - -A trigger is the event that activates a WAVS service. Triggers can be EVM or Cosmos events, cron, or blockheight triggers. When a specified event is triggered, WAVS operators detect it and execute the corresponding service component off-chain. The results are then verified and submitted back on-chain, completing the execution cycle. For more information, see the [Triggers page](./handbook/triggers). - -### Service components - -Service components are the heart of the WAVS platform, encapsulating the core logic that powers a service. They contain the off-chain business logic for a service, [written in Rust, Go, or JavaScript](./handbook/components/component#component-languages) (with other languages coming soon). These service components are compiled to WASM and are uploaded to the WAVS platform where they can be run by operators. In the WAVS runtime, service components are sandboxed from each other and from the node's operating system. This way, operators and AVS services maintain a clean separation, with AVS builders defining service components and operators having to opt in to each service. - -Service components are lightweight and built for adaptability. Building a service used to take thousands of lines of code and the configuration of dozens of files to execute even the simplest logic. With WAVS, service components can consist of a few lines of code that can be dynamically deployed to the WAVS platform. - -Service components are also designed for composability: an AVS can chain multiple components together, creating decentralized flows of off-chain and on-chain executions. These intelligent protocols merge the performance of off-chain computation with on-chain verifiability, creating a complex mesh of decentralized transaction flows. - -To learn more, visit the [Components page](./handbook/components/component). For a hands-on example of a service component, visit the [tutorial](./tutorial/1-overview). - -### WAVS runtime - -The WAVS (WASI-AVS) runtime serves as the off-chain execution environment for all services running on the WAVS platform. Powered by operators running the WAVS node software, it provides a structured framework for deploying and managing service components that run within a WASI (WebAssembly System Interface) environment. You can think of WASI as a lightweight OS-like interface, offering a standard set of APIs that allow service components to perform system-level operations such as file handling and environment interactions. WAVS enables service components to execute securely within this WASI-powered sandbox, ensuring isolation from both the host system and other components. - -### WASM and WASI - -[WASM (Web Assembly)](https://webassembly.org/) is a high-performance, low-level binary format that can be compiled from multiple programming languages. WASM can even run in web browsers at near-native speed. By leveraging WASM, AVSs built with WAVS are lightning-fast, lightweight, and easy to develop. - -WASI (WebAssembly System Interface) is a standardized API that enables WASM (WebAssembly) modules to interact with a host system in a secure and platform-independent way. It provides WASM modules with a standardized set of APIs to access system resources. For more information, visit the [WASI documentation](https://wasi.dev/). - -There are significant advantages in leveraging a WASM/WASI-based platform for AVSs: - -- Lightweight execution: Service components are lightweight WASM binaries ideal for high-frequency, low-latency AVS tasks. -- Speed: Components can run in the WASI environment at near-native speeds, providing a significant advantage over Dockerized AVSs. -- Low overhead: Instead of each service needing its own dedicated Docker container, the WAVS runtime provides a computational layer that can be used by many components, saving storage and startup time. -- Dynamic deployment: To upgrade a service, simply upload a new component and update your service metadata to point to the new component. No more downtime or coordination of new binaries among operators. -- Security and separation: The WAVS WASI runtime enforces security by sandboxing service components, allowing them to interact with the host (WAVS) only through explicitly permitted WASI APIs. - -### Registry - -WAVS uses a registry to store the WASM components. A service like [wa.dev](https://wa.dev) is recommended for proper distribution in production. For more information, visit the [Component page](./handbook/components/component#registry). - -### Signing and aggregation - -When a service is triggered, each operator registered to the service will run the service component on their machine and generate the result. These results are signed by the operator before being submitted to an aggregator. - -For services that submit results on Ethereum, an off-chain aggregator can be used to conserve gas fees. Instead of each individual operator submitting results of a service directly on-chain, operators sign the results and submit them off-chain to an aggregator, which aggregates the results and submits a result to be posted to the chain in a single transaction. Results are signed using an operator's individual private key to produce an signature, which is used to prove that the result is associated with an operator's specific private and public key pair. The aggregator accepts the result submissions from operators, verifies their validity, and compares the responses. If there is a consensus among valid operator results, the results and verified signatures are submitted on-chain as a single transaction. Support for BLS signatures and a decentralized aggregator are currently under development. - -To learn more about the aggregator, visit the [Submission and aggregator page](./handbook/submission#aggregator). For more discussion on aggregation considerations, visit the [Design considerations page](./design). - -### Submission - -A submission contract is the final destination of the results of a service component. Known as a Service Handler, this logic can be any contract as long as it implements the [`handleSignedEnvelope()` function using the `IWavsServiceHandler`](./handbook/submission#submission-contract) interface to validate data and signatures using the service manager. - -After the execution of a service component in the WAVS runtime, the results are aggregated in the [aggregator](./handbook/submission#aggregator) and passed to the submission contract. Developers can use this contract to define their own logic for results and implement different rules depending on the specific needs of their service. - -To learn more about the submission contract, visit the [Submission page](./handbook/submission). - -### Updating a service - -Because of the lightweight and portable nature of WebAssembly, WAVS operators only need to run a single Docker image. WAVS provides a runtime for all registered services to run, each sandboxed from the other and from the node's operating system due to the nature of [WASI](#wasm-and-wasi). Operators will need to opt in to running different services by registering to an AVS. - -Updates to service logic do not require node upgrades. Instead, developers can dynamically deploy a new service component and update their service manifest. Instead of needing to run a new Docker image every time a service is updated, operators only need to upgrade if there is a breaking change to the WAVS node software itself. diff --git a/docs/index.mdx b/docs/index.mdx deleted file mode 100644 index 1bba3cfb..00000000 --- a/docs/index.mdx +++ /dev/null @@ -1,47 +0,0 @@ ---- -title: WAVS Docs -description: Welcome page with links to WAVS documentation sections ---- - - -WAVS - -Welcome to the WAVS Docs! - -WAVS is a next-generation AVS platform that makes it easy to create, manage, and operate high-performance AVSs. Use this documentation to learn [about WAVS](/overview), [how it works](/how-it-works), and how to [start building your AVS](./tutorial/1-overview). - - diff --git a/docs/overview.mdx b/docs/overview.mdx deleted file mode 100644 index 966f526e..00000000 --- a/docs/overview.mdx +++ /dev/null @@ -1,94 +0,0 @@ ---- -title: Overview -description: Introduction to WAVS platform and its core concepts ---- - - -WAVS - -## What is WAVS? - -WAVS is a [WASI](./how-it-works#wasm-and-wasi) runtime for building AVSs (Autonomous Verifiable Services). With WAVS, you can create, manage, and operate high-performance AVSs using the languages you already know and love (like [Rust, Go, and JavaScript](./handbook/components/component#languages), with more languages like Python coming soon). By providing a base layer of AVS infrastructure, WAVS lets you focus on implementing the core logic of your service. WAVS compiles that logic to [WASM](./how-it-works#wasm-and-wasi), and lets you deploy it as lightweight service components. - -Better yet, WAVS solves the trust problem in off-chain compute: it separates services from the operators that run them. Builders create their components, and operators run them in WAVS runtime at near-native speed. Then, operators sign the results of the off-chain computation and place them on-chain. Boom: off-chain compute with on-chain verifiability. - -> In simple terms, WAVS streamlines the process of building and managing an AVS. - -Finally, WAVS utilizes restaking (via EigenLayer) to secure its AVSs. A service of services, WAVS is composable by nature, allowing an AVS to dynamically run and manage multiple components that work together to build flexible and intelligent applications. - - -## Use cases - -WAVS supports a wide range of AVS use cases, enabling powerful, verifiable off-chain computation across different domains: - -- **Decentralized AI**: WAVS unlocks decentralized AI that is [deterministic and verifiable](https://www.layer.xyz/news-and-insights/deterministic-ai), enabling trustless decision-making and autonomous governance through DAO-controlled AI agents. -- **Oracles**: [Create](./tutorial/1-overview) and dynamically deploy new oracles as easily as uploading a component to your service to verifiably bring data from any source on-chain. -- **Zero Knowledge Proofs**: ZK verifiers, ZK Prover Marketplaces, and ZK aggregation layers can be deployed as lightweight WAVS [service components](#service-components), making it simple to build modular and composable proof services. -- **Crosschain Bridges**: Build secure, decentralized bridges with WAVS. Lock assets on one chain, trigger a verifiable service component, and mint them on another—all with trust-minimized execution. -- **Dynamic applications**: WAVS combines on-chain and off-chain services to build a cross-chain, decentralized application layer. -- **Intelligent protocols**: Build protocols that are verifiably aware of on-chain and off-chain events without relying on centralized infrastructure. Compose services and applications to enable complex webs of decentralized transaction flows. -- **TEEs**: WAVS can be used to build TEEs (Trusted Execution Environments) that run off-chain computations in a secure and verifiable manner, ensuring data integrity and confidentiality. - -## Building a service - -WAVS removes the complexity of building an AVS, making it easy to develop and deploy custom services. With built-in AVS infrastructure and developer tooling, WAVS powers a new multichain ecosystem of composable, decentralized, and verifiable applications. - - -The following is a basic overview of a WAVS service. For a more in-depth overview of WAVS, visit the [How it works section](./how-it-works). Check out the WAVS tutorial to learn how to build a service. - - -This example will cover a basic AVS with three parts: a trigger, a service component, and submission logic. - -### Defining triggers - -Triggers are the actions or events that prompt your service to be run. Currently, WAVS supports triggers from on-chain events from EVM and Cosmos chains, cron schedules, and block intervals for EVM or Cosmos chains. Triggers can be used to pass arbitrary data as the inputs for service components to be run. Operators running a service listen for specific trigger events and run the corresponding service component. - -WAVS - -To learn more about triggers, visit the [triggers page](./handbook/triggers). - -### Service components - -Service components are the core logic of an AVS. They are written in [Rust, Go, or JavaScript](./handbook/components/component#languages) and compiled to [WASM](./how-it-works#wasm-and-wasi) as lightweight WASI components. WAVS provides a base layer of AVS infrastructure, allowing you to focus solely on the logic of your service. - -Service components can contain logic for processing input data from triggers. If a trigger passes data, a service component can use that data as input. For example, a simple service component could contain logic for receiving a number as input and returning the square of that number as output. - -WAVS - -To learn more about service components, visit the [How it works](/how-it-works#service-components) page. Check out the [WAVS tutorial](./tutorial/1-overview) to learn how to create a service component. - - -### Submission logic - -Along with your component, you'll also need to define how the results of your service are submitted on-chain. With WAVS, you can use an [aggregator and submission contract](/how-it-works#submission) to define this logic. - - -### Run your service - -AVS builders define their service in a [service manifest or service.json file](./handbook/service) with a workflow that includes a trigger, service component, and submission logic. Registered operators will then listen for the triggers specified by your service. Once triggered, operators will run your service off-chain, where data from a trigger is passed to a service component and run in a sandboxed WASI environment. Operators sign the result of the service computation and the verified result can be returned as an on-chain response. - -WAVS - -With WAVS, service updates are streamlined: updates to services can be made by AVS builders directly without needing to coordinate with operators. Operators only need to upgrade if there is a change to the WAVS node software itself. - -## Multichain capability - -WAVS is built to be multichain. A service can be triggered by events on one chain, run by operators off-chain, and write verified responses to another chain. This interoperability is what makes WAVS so flexible, creating a decentralized computational layer that can function across multiple chains. - -## Composability - -WAVS is composable by nature, allowing an AVS to dynamically run and manage multiple workflows that work together to build flexible and intelligent applications. Workflows include a trigger, service component, and submission logic. To [compose services and workflows](./handbook/workflows), the trigger of one workflow can be the submission logic of another workflow. The registry model allows component bytecode to be stored in one location and reused across multiple workflows. - -## Security - -The WAVS platform is secured via Ethereum restaking on EigenLayer, which provides a base security layer for AVSs built using WAVS. Restaking refers to the utilization of staked Ethereum to secure AVSs by imposing additional slashing terms on the staked Ethereum for operator misbehavior. In this way, the cryptoeconomic security of Ethereum is extended to WAVS AVSs. - -## Full-stack decentralization - -WAVS enables full-stack decentralization by unifying off-chain computation with on-chain verifiability. The chain layer connects to Ethereum and other chains, while the security layer extends cryptoeconomic security via EigenLayer restaking. At the AVS layer, lightweight WAVS powers WASM-based services to process off-chain computations triggered by on-chain events. Operators validate, sign, and commit the results back on-chain, ensuring verifiability and trust-minimized execution. This architecture makes WAVS a scalable, decentralized framework for full-stack applications. - -WAVS diff --git a/docs/resources/llms.mdx b/docs/resources/llms.mdx deleted file mode 100644 index 3881bf6b..00000000 --- a/docs/resources/llms.mdx +++ /dev/null @@ -1,44 +0,0 @@ ---- -title: LLM docs -description: Access WAVS documentation in formats optimized for AI tools and integration. ---- - - - -The LLM text format presents documentation in a clean, plain text format optimized for large language models (LLMs) like Claude, ChatGPT, and others. - -## llms.txt - -The `llms.txt` format is a structured index of documentation pages organized by sections, including page titles, URLs and descriptions. This format is ideal for AI assistants to understand the documentation structure without processing the full content. - -[https://docs.wavs.xyz/llms.txt](https://docs.wavs.xyz/llms.txt) - -``` -curl https://docs.wavs.xyz/llms.txt -``` - -## llms-full.txt - -The `llms-full.txt` format returns all documentation pages as a single text document. - -[https://docs.wavs.xyz/llms-full.txt](https://docs.wavs.xyz/llms-full.txt) - -``` -curl https://docs.wavs.xyz/llms-full.txt -``` - -Returns all documentation pages as a single text document. - - -## Markdown Format - -Get any page as standard Markdown by appending `.md` to its URL. - -``` -curl https://docs.wavs.xyz/path/to/page.md -``` - -Examples: -- `/overview.md` - Overview page as Markdown -- `/tutorial/1-overview.md` - Tutorial introduction as Markdown -- `/handbook/service.md` - Service handbook as Markdown diff --git a/docs/tutorial/1-overview.mdx b/docs/tutorial/1-overview.mdx deleted file mode 100644 index 491079b3..00000000 --- a/docs/tutorial/1-overview.mdx +++ /dev/null @@ -1,42 +0,0 @@ ---- -title: 1. Oracle service tutorial -description: Introduction to WAVS tutorial series ---- - - -Start-building - -In this guide, you will build a simple oracle service that fetches Bitcoin price data from [coinmarketcap.com](https://coinmarketcap.com/api/). This example is built using the [WAVS Foundry Template](https://github.com/Lay3rLabs/wavs-foundry-template), which contains the tools you need to build your own custom service. - -The price oracle service example has three basic parts: - -1. [A trigger contract](https://github.com/Lay3rLabs/wavs-foundry-template/tree/main/src/contracts/WavsTrigger.sol): A trigger can be any on-chain event emitted from a contract. This event **triggers** a service to run. In the WAVS Foundry Template, there is a simple trigger contract that stores trigger requests, assigns them unique IDs, and emits an event when a new trigger is added. In this example, the trigger event `addTrigger` will pass data pertaining to the ID of an asset for the CoinMarketCap price feed. - -2. [A service component](https://github.com/Lay3rLabs/wavs-foundry-template/tree/main/components/evm-price-oracle/src/lib.rs): The service component contains the business logic of a service. It is written in Rust, compiled to WASM, and run by operators in the WAVS runtime. In this example, operators will listen for a new trigger event to be emitted and then run the service component off-chain, using the asset ID data from the trigger event as input. The component contains logic to fetch the price of the asset from the CoinMarketCap price feed API, which is then processed and encoded before being sent back on-chain. - -3. [A submission contract](https://github.com/Lay3rLabs/wavs-foundry-template/tree/main/src/contracts/WavsSubmit.sol): Also known as the "service handler," this contract contains the on-chain submission logic for the service. It validates and stores the processed data returned by the WAVS component. When an operator submits a response, the contract verifies the data's integrity by checking the operator's signature and then associates it with the original trigger ID, bringing the queried price on-chain. - -These three parts come together to create a basic oracle service using WAVS. To learn more about services and how they work, visit the [How it works page](../how-it-works). - -## Video tutorial - -{/* todo: needs a 0.4 demo video */} - -You can follow along with this guide by watching the video tutorial: -
- -
- - -} - href="/tutorial/2-setup" - title="Get Started" - description="Click here to set up your environment and start building your service." -/> diff --git a/docs/tutorial/2-setup.mdx b/docs/tutorial/2-setup.mdx deleted file mode 100644 index 304ff7d7..00000000 --- a/docs/tutorial/2-setup.mdx +++ /dev/null @@ -1,224 +0,0 @@ ---- -title: 2. System setup -description: Setting up development environment for WAVS ---- - - -The following installations are required to run this example. Follow the steps below to set up your system. - - - -This tutorial is designed for Windows WSL, Linux, and macOS systems. - - - -## Environment - -Install [VS Code](https://code.visualstudio.com/download) and the [Solidity extension](https://marketplace.visualstudio.com/items?itemName=JuanBlanco.solidity) if you don't already have them. - -## Rust - -Run the following command to install [Rust](https://www.rust-lang.org/tools/install). - - -```bash docci-ignore -curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -``` - - - -If you installed Rust using Homebrew, you will need to uninstall it and install it again using the rustup command. - -```bash docci-ignore -brew uninstall rust -``` -Then run: -```bash docci-ignore -curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -``` - - - - -If you just installed Rust for the first time, you will need to run the following commands: - -```bash docci-ignore -# Install required target and toolchain -rustup toolchain install stable -rustup target add wasm32-wasip2 -``` - - - - - -If you already have a previous version of Rust installed, you will need to run the following commands to upgrade it to the latest stable version: - -```bash docci-ignore -# Remove old targets if present -rustup target remove wasm32-wasi || true -rustup target remove wasm32-wasip1 || true - -# Update and add required target -rustup update stable -rustup target add wasm32-wasip2 -``` - - - -## Cargo components - -Install the following for building WebAssembly components. Visit the [Cargo Component documentation](https://github.com/bytecodealliance/cargo-component#installation) for more information. - -{/* This section is also in [](./5-build.mdx). Remember to update there as well */} -```bash docci-ignore -cargo install cargo-binstall -cargo binstall cargo-component wasm-tools warg-cli wkg --locked --no-confirm --force - -# Configure default registry -# Found at: $HOME/.config/wasm-pkg/config.toml -wkg config --default-registry wa.dev - -# Allow publishing to a registry -# -# if WSL: `warg config --keyring-backend linux-keyutils` -warg key new -``` - - - -If you are on Ubuntu LTS but encounter an error like `wkg: /lib/x86_64-linux-gnu/libm.so.6: version 'GLIBC_2.38' not found (required by wkg)`: - -```bash -sudo do-release-upgrade -``` - - -## Foundry - -[Foundry](https://book.getfoundry.sh/) is a solidity development suite. The Foundry toolchain contains Anvil (a local testnet node), Forge (build and test smart contracts), Cast (an RPC call CLI), and Chisel (a Solidity REPL). - - -1. Install Foundryup, the official Foundry installer. - -```bash docci-ignore -curl -L https://foundry.paradigm.xyz | bash -``` - -2. Install Foundry - -```bash docci-ignore -foundryup -``` - -## Docker - -Visit the [Docker Documentation](https://docs.docker.com/get-started/get-docker/) for more info. - - - - - ```bash docci-ignore - brew install --cask docker - ``` - - - - - The following commands will install Docker and [Docker Compose](https://docs.docker.com/compose/). - - ```bash docci-ignore - # Install Docker - sudo apt -y install docker.io - # Install Docker Compose - sudo apt-get install docker-compose-v2 - ``` - - - - - - -{/* This section is also in [](./5-build.mdx). Remember to update there as well */} - -If prompted, remove container with `sudo apt remove containerd.io` - -If you are using Docker Desktop, make sure it is open and running for this tutorial. - -Before proceeding, make sure that the following setting is updated: - -**Enable Host Networking**: Open Docker and navigate to -> Settings -> Resources -> Network. Make sure that 'Enable Host Networking' is turned on. - -Alternatively, you can install the following: - -```bash docci-ignore -brew install chipmk/tap/docker-mac-net-connect && sudo brew services start chipmk/tap/docker-mac-net-connect -``` - -If you are running on a Mac with an ARM chip, you will need to do the following: - -- Set up Rosetta: - -```bash docci-ignore -softwareupdate --install-rosetta -``` - -- Enable Rosetta (Docker Desktop: Settings -> General -> enable "Use Rosetta for x86_64/amd64 emulation on Apple Silicon") - - - -## Make - -Visit the [Make Documentation](https://www.gnu.org/software/make/manual/make.html) for more info. - - - - - - ```bash docci-ignore - brew install make - ``` - - - - ```bash docci-ignore - sudo apt -y install make - ``` - - - - - -## JQ - -Visit the [JQ Documentation](https://jqlang.org/download/) for more info. - - - - - ```bash docci-ignore - brew install jq - ``` - - - - ```bash docci-ignore - sudo apt -y install jq - ``` - - - - -## Node.js - -Node v21+ is needed for the WAVS template. Visit the [NVM Installation guide](https://github.com/nvm-sh/nvm?tab=readme-ov-file#installing-and-updating) to install Node Version Manager and update your Node version. - -```bash -curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.40.3/install.sh | bash -nvm install --lts -``` - -After setting up your system, continue to the next page to create your project. diff --git a/docs/tutorial/3-project.mdx b/docs/tutorial/3-project.mdx deleted file mode 100644 index 866c5a8d..00000000 --- a/docs/tutorial/3-project.mdx +++ /dev/null @@ -1,68 +0,0 @@ ---- -title: 3. Create your project -description: Creating and configuring WAVS project structure ---- - - -{/* todo: update --branch main once the template is updated */} - -1. After setting up your environment, open a terminal and run the following command to create your WAVS Foundry Template project. In this example, your project will be called `my-wavs`. - -```bash docci-ignore -forge init --template Lay3rLabs/wavs-foundry-template my-wavs --branch main -``` - -2. Then, enter your project: - -```bash docci-ignore -cd my-wavs -``` - -3. Run the following command to open your project in VS code, or open it in the editor of your choice: - -```bash docci-ignore -code . -``` - -## Explore the template - -This template repo contains all the files you'll need to build, run, and test WAVS services locally. - -The template already contains the necessary files for the oracle example to run. For example, the trigger ([`WavsTrigger.sol`](https://github.com/Lay3rLabs/wavs-foundry-template/tree/main/src/contracts/WavsTrigger.sol)) and submission ([`WavsSubmit.sol`](https://github.com/Lay3rLabs/wavs-foundry-template/tree/main/src/contracts/WavsSubmit.sol)) contracts can be found in the [`/my-wavs/src/contracts`](https://github.com/Lay3rLabs/wavs-foundry-template/tree/main/src) folder. - -In [`/evm-price-oracle/src/lib.rs`](https://github.com/Lay3rLabs/wavs-foundry-template/tree/main/components/evm-price-oracle/src/lib.rs) you'll find the oracle service component. - -This template uses a [`Makefile`](https://github.com/Lay3rLabs/wavs-foundry-template/tree/main/Makefile) and environment variables to help with your developer experience. If you are ever curious about one of the `Make` commands in the following sections, you can always look at the [`Makefile`](https://github.com/Lay3rLabs/wavs-foundry-template/tree/main/Makefile) to learn more. - - - -You can run the following command from the root of the repo to see all of the commands and environment variable overrides available: - -```bash docci-ignore -make help -``` - - - -The next sections will show you how to deploy your contracts and components, set up WAVS, and run the oracle example. - - -## Build and test your contracts - -Run the following commands from the root of your project to install necessary dependencies, build the template contracts, and run tests using Forge. - -```bash -# Install dependencies -make setup - -# Build the contracts -forge build - -# Run the solidity tests. -forge test -``` - -The last command runs a basic unit test which verifies that the `SimpleTrigger` contract in `/WavsTrigger.sol` correctly stores and retrieves trigger data. diff --git a/docs/tutorial/4-component.mdx b/docs/tutorial/4-component.mdx deleted file mode 100644 index c7ddfa75..00000000 --- a/docs/tutorial/4-component.mdx +++ /dev/null @@ -1,225 +0,0 @@ ---- -title: 4. Oracle component walkthrough -description: evm-price-oracle component walkthrough ---- - - -The core logic of the price oracle in this example is located in the [`/evm-price-oracle/src/lib.rs` file](https://github.com/Lay3rLabs/wavs-foundry-template/tree/main/components/evm-price-oracle/src/lib.rs). Scroll down to follow a walkthrough of the code for the oracle component. - -## trigger.rs - -The [trigger.rs](https://github.com/Lay3rLabs/wavs-foundry-template/tree/main/components/evm-price-oracle/src/trigger.rs) file handles the decoding of incoming trigger data from the trigger event emitted by the trigger contract. The component uses `decode_event_log_data!()` from the [wavs-wasi-utils crate](https://docs.rs/wavs-wasi-utils/latest/wavs_wasi_utils/) to decode the event log data and prepares it for processing within the WAVS component. Trigger.rs handles both ABI encoded data for trigger and submission data and raw data for local testing. For more information on different trigger types, visit the [Triggers page](../handbook/triggers). To learn more about trigger input handling, visit the [Component page](../handbook/components/component#trigger-inputs). - -```rust trigger.rs -use crate::bindings::wavs::worker::layer_types::{ - TriggerData, TriggerDataEvmContractEvent, WasmResponse, -}; -use alloy_sol_types::SolValue; -use anyhow::Result; -use wavs_wasi_utils::decode_event_log_data; - -/// Represents the destination where the trigger output should be sent -pub enum Destination { - Ethereum, - CliOutput, -} - -/// Decodes incoming trigger event data into its components -/// Handles two types of triggers: -/// 1. EvmContractEvent - Decodes Ethereum event logs using the NewTrigger ABI -/// 2. Raw - Used for direct CLI testing with no encoding -pub fn decode_trigger_event(trigger_data: TriggerData) -> Result<(u64, Vec, Destination)> { - match trigger_data { - TriggerData::EvmContractEvent(TriggerDataEvmContractEvent { log, .. }) => { - let event: solidity::NewTrigger = decode_event_log_data!(log)?; - let trigger_info = - ::abi_decode(&event._triggerInfo)?; - Ok((trigger_info.triggerId, trigger_info.data.to_vec(), Destination::Ethereum)) - } - TriggerData::Raw(data) => Ok((0, data.clone(), Destination::CliOutput)), - _ => Err(anyhow::anyhow!("Unsupported trigger data type")), - } -} - -/// Encodes the output data for submission back to Ethereum -pub fn encode_trigger_output(trigger_id: u64, output: impl AsRef<[u8]>) -> WasmResponse { - WasmResponse { - payload: solidity::DataWithId { - triggerId: trigger_id, - data: output.as_ref().to_vec().into(), - } - .abi_encode(), - ordering: None, - } -} - -/// The `sol!` macro from alloy_sol_macro reads a Solidity interface file -/// and generates corresponding Rust types and encoding/decoding functions. -pub mod solidity { - use alloy_sol_macro::sol; - pub use ITypes::*; - - // The objects here will be generated automatically into Rust types. - sol!("../../src/interfaces/ITypes.sol"); - - // Encode string input from the trigger contract function - sol! { - function addTrigger(string data) external; - } -} -``` - -Visit the [Blockchain interactions page](../handbook/components/blockchain-interactions) for more information on the `sol!` macro and how to use it to generate Rust types from Solidity interfaces. - -## Oracle component logic - -The [`lib.rs`](https://github.com/Lay3rLabs/wavs-foundry-template/tree/main/components/evm-price-oracle/src/lib.rs) file contains the main component logic for the oracle. The first section of the code imports the required modules for requests, serialization, and bindings, defines the component struct, and exports the component for execution within the WAVS runtime. - -```rust lib.rs -mod trigger; -use trigger::{decode_trigger_event, encode_trigger_output, Destination}; -use wavs_wasi_utils::{ - evm::alloy_primitives::hex, - http::{fetch_json, http_request_get}, -}; -pub mod bindings; -use crate::bindings::{export, Guest, TriggerAction, WasmResponse}; -use alloy_sol_types::SolValue; -use serde::{Deserialize, Serialize}; -use wstd::{http::HeaderValue, runtime::block_on}; -``` - -The `run` function is the main entry point for the price oracle component. WAVS is subscribed to watch for events emitted by the blockchain. When WAVS observes an event is emitted, it will internally route the event and its data to this function (component). The processing then occurs before the output is returned back to WAVS to be submitted to the blockchain by the operator(s). - -This is why the `Destination::Ethereum` requires the encoded trigger output, it must be ABI encoded for the solidity contract. - -After the data is submitted to the blockchain, any user can query the price data from the blockchain in the solidity contract. You can also return `None` as the output if nothing needs to be saved to the blockchain (useful for performing some off chain action). - -The `run` function: - -1. Receives a trigger action containing encoded data -2. Decodes the input to get a cryptocurrency ID (in hex) -3. Fetches current price data from CoinMarketCap -4. Returns the encoded response based on the destination - -```rust lib.rs - -struct Component; -export!(Component with_types_in bindings); - -impl Guest for Component { - - fn run(action: TriggerAction) -> std::result::Result, String> { - let (trigger_id, req, dest) = - decode_trigger_event(action.data).map_err(|e| e.to_string())?; - - let hex_data = match String::from_utf8(req.clone()) { - Ok(input_str) if input_str.starts_with("0x") => { - // Local testing: hex string input - hex::decode(&input_str[2..]) - .map_err(|e| format!("Failed to decode hex string: {}", e))? - } - _ => { - // Production: direct binary ABI input - req.clone() - } - }; - - let decoded = ::abi_decode(&hex_data) - .map_err(|e| format!("Failed to decode ABI string: {}", e))?; - - let id = - decoded.trim().parse::().map_err(|_| format!("Invalid number: {}", decoded))?; - - let res = block_on(async move { - let resp_data = get_price_feed(id).await?; - println!("resp_data: {:?}", resp_data); - serde_json::to_vec(&resp_data).map_err(|e| e.to_string()) - })?; - - let output = match dest { - Destination::Ethereum => Some(encode_trigger_output(trigger_id, &res)), - Destination::CliOutput => Some(WasmResponse { payload: res.into(), ordering: None }), - }; - Ok(output) - } -} -``` - -Visit the [Component page](../handbook/components/component) for more information on the `run` function and the main requirements for component structure. - -## Fetching price data - -The `get_price_feed` function is responsible for fetching price data from CoinMarketCap's API. It takes the cryptocurrency ID passed from the trigger as input and returns a structured `PriceFeedData` containing the symbol, current price in USD, and server timestamp. For more information on making network requests in WAVS components, visit the [Network Requests page](../handbook/components/network-requests). - -```rust lib.rs -async fn get_price_feed(id: u64) -> Result { - let url = format!( - "https://api.coinmarketcap.com/data-api/v3/cryptocurrency/detail?id={}&range=1h", - id - ); - - let current_time = std::time::SystemTime::now().elapsed().unwrap().as_secs(); - - let mut req = http_request_get(&url).map_err(|e| e.to_string())?; - req.headers_mut().insert("Accept", HeaderValue::from_static("application/json")); - req.headers_mut().insert("Content-Type", HeaderValue::from_static("application/json")); - req.headers_mut() - .insert("User-Agent", HeaderValue::from_static("Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/132.0.0.0 Safari/537.36")); - req.headers_mut().insert( - "Cookie", - HeaderValue::from_str(&format!("myrandom_cookie={}", current_time)).unwrap(), - ); - - let json: Root = fetch_json(req).await.map_err(|e| e.to_string())?; - - // round to the nearest 3 decimal places - let price = (json.data.statistics.price * 100.0).round() / 100.0; - // timestamp is 2025-04-30T19:59:44.161Z, becomes 2025-04-30T19:59:44 - let timestamp = json.status.timestamp.split('.').next().unwrap_or(""); - - Ok(PriceFeedData { symbol: json.data.symbol, price, timestamp: timestamp.to_string() }) -} -``` - -## Handling the response - -The processed price data is returned as a `WasmResponse` which contains the response payload. The response is formatted differently based on the destination. - -```rust lib.rs -let output = match dest { - Destination::Ethereum => Some(encode_trigger_output(trigger_id, &res)), - Destination::CliOutput => Some(WasmResponse { payload: res.into(), ordering: None }), -}; -Ok(output) -``` -- For `Destination::CliOutput`, the raw data is returned directly for local testing and debugging using the `wasi-exec` command. -- For `Destination::Ethereum`, the data is ABI encoded using `encode_trigger_output`. This ensures that processed data is formatted correctly before being sent to the [submission contract](../handbook/submission). - -In `trigger.rs`, the `WasmResponse` struct is used to standardize the format of data returned from components. The `payload` field contains the processed data from the component. - -```rust trigger.rs -pub fn encode_trigger_output(trigger_id: u64, output: impl AsRef<[u8]>) -> WasmResponse { - WasmResponse { - payload: solidity::DataWithId { - triggerId: trigger_id, - data: output.as_ref().to_vec().into(), - } - .abi_encode(), - ordering: None, - } -} -``` - -For more information on component outputs, visit the [Component page](../handbook/components/component#component-output). To learn more about submission logic, visit the [Submission page](../handbook/submission). - -The Service handbook contains more detailed information on each part of developing services and components. Visit the [Service handbook overview](../handbook/overview) to learn more. - -## Next steps - -Continue to the [next section](./5-build) to learn how to build and test your component. - diff --git a/docs/tutorial/5-build.mdx b/docs/tutorial/5-build.mdx deleted file mode 100644 index 262f883c..00000000 --- a/docs/tutorial/5-build.mdx +++ /dev/null @@ -1,91 +0,0 @@ ---- -title: 5. Build and test components -description: Building and testing WAVS service components ---- - - - - -{/* This section is also in [](./2-setup.mdx). Remember to update there as well */} - -1. Make sure that Docker is installed. If you are using Docker Desktop, make sure it is open and running. If you are using Mac OS, make sure that your[ Docker app is configured correctly](./2-setup#docker). - -2. Make sure that you have already run the following commands from the [system setup section](./2-setup#cargo-components). - -```bash docci-ignore -cargo install cargo-binstall -cargo binstall cargo-component wasm-tools warg-cli wkg --locked --no-confirm --force - -# Configure default registry -wkg config --default-registry wa.dev - -# Allow publishing to a registry -# -# if WSL: `warg config --keyring-backend linux-keyutils` -warg key new -``` - - - - -## Build components - -Run the following command in your terminal to build your component. Exclude `WASI_BUILD_DIR` to build all components. - -```bash docci-if-file-not-exists="./compiled/evm_price_oracle.wasm" -WASI_BUILD_DIR=components/evm-price-oracle make wasi-build -``` - -This command will build any components present in the `/components` directory, as well as auto-generate bindings and compile the components to WASM. The output will be placed in the `compiled` directory. - - -You can also use the command below to build your solidity contracts and components in one command: - - -```bash docci-if-file-not-exists="./compiled/evm_price_oracle.wasm" -make build -``` - - - -## Testing and debugging - -You can use the following command to execute the component using Cast. This command is handy for testing components without having to deploy WAVS. - -An ID of 1 is Bitcoin. Nothing will be saved on-chain, just the output of the component is shown. - - -```bash -make wasi-exec -``` - -This command runs your component locally in a simulated environment and lets you easily view `print` statements for debugging. Running this command in the oracle example will print the information from the oracle [component code](./4-component). Visit the [component walkthrough](../handbook/components/component#logging-in-a-component) for more information on logging during testing and production. - - -Upon successful execution, you should receive a result similar to the following: - -```bash docci-ignore -resp_data: Ok(PriceFeedData { symbol: "BTC", timestamp: "2025-02-14T01:23:03.963Z", price: 96761.74120116462 }) - INFO Fuel used: -1477653 - -Result (hex encoded): -7b2273796d626f6c223a22425443222c2274696d657374616d70223a22323032352d30322d31345430313a32333a30332e3936335a222c227072696365223a39363736312e37343132303131363436327d - -Result (utf8): -{"symbol":"BTC","timestamp":"2025-02-14T01:23:03.963Z","price":96761.74120116462} -``` - - - -In the output above, the `INFO Fuel used` value represents the computational power consumed during execution. Similar to how on-chain transactions have a gas limit to cap transaction costs, WAVS enforces a fuel limit to control off-chain computational workload and protect against DoS attacks. - -The maximum fuel allocation can be adjusted in the `Makefile` to accommodate different processing needs. - - - - - diff --git a/docs/tutorial/6-run-service.mdx b/docs/tutorial/6-run-service.mdx deleted file mode 100644 index 75e40c4f..00000000 --- a/docs/tutorial/6-run-service.mdx +++ /dev/null @@ -1,186 +0,0 @@ ---- -title: 6. Run your service -description: Deploying and running WAVS services ---- - - -## Local: Start Anvil, WAVS, and deploy Eigenlayer - -1. Create a `.env` file for your project by copying over the example with the following command: - -```bash -cp .env.example .env -``` - -2. Use the following command to start an Anvil test chain, IPFS, Registry, and some optional telemetry. This only runs with `LOCAL` being set in the `.env` (default). - -```bash docci-background docci-delay-after=5 -make start-all-local -``` - - - -The command must remain running in your terminal. Open another terminal to run other commands. - -You can stop the services with `ctrl+c`. Some MacOS terminals require pressing this twice. - - - - - -You can skip all the setup steps below and just run a single command to deploy and run the entire service setup - -```bash -export RPC_URL=`bash ./script/get-rpc.sh` -export AGGREGATOR_URL=http://127.0.0.1:8001 - -bash ./script/deploy-script.sh -``` - -This performs all the below steps (with the exception of actually triggering the contract). - - - -With the chain running, you can deploy and run your service. - -## Create Deployer - -An account is required to upload the contracts and to be the original admin of them. The `create-deployer.sh` script creates a new wallet then sets a balance if using a local deployment, or waits until it has testnet funds before returning. - -You can skip this step by setting `FUNDED_KEY=` in `.env` to a private key of your choice that has network funds. - -```bash docci-delay-after=2 -bash ./script/create-deployer.sh -``` - -## Deploy EigenLayer Middleware - -Local deployments use the real testnet contracts via a forked anvil instance. This middleware will setup all the required contracts and configurations for the base of your AVS. - -```bash docci-delay-after=2 -COMMAND=deploy make wavs-middleware -``` - -## Deploy solidity contracts - -The `deploy-contracts.sh` script is used to deploy the trigger and submission solidity contracts to the chain. - -```bash docci-delay-per-cmd=2 -source script/deploy-contracts.sh -``` - -## Deploy Service - -Deploy the compiled component with the contract information from the previous steps. - -```bash docci-delay-per-cmd=3 -export COMPONENT_FILENAME=evm_price_oracle.wasm -export PKG_NAME="evmrustoracle" -export PKG_VERSION="0.1.0" -# ** Testnet Setup: https://wa.dev/account/credentials/new -> warg login -source script/upload-to-wasi-registry.sh || true - -# Testnet: set values (default: local if not set) -# export TRIGGER_CHAIN=holesky -# export SUBMIT_CHAIN=holesky - -# Package not found with wa.dev? -- make sure it is public -export AGGREGATOR_URL=http://127.0.0.1:8001 -REGISTRY=${REGISTRY} source ./script/build-service.sh -``` - -The build-service.sh script is used to create a service manifest (service.json) with the configuration for the service, including a workflow with the trigger event, component, aggregator, submission logic, and more. Visit the [Service handbook](../handbook/service) for more information on service configuration. - -## Upload to IPFS - -The `ipfs-upload.sh` script is used to upload the service manifest to IPFS where it can be referenced by its URI. - -```bash docci-delay-per-cmd=2 -# Upload service.json to IPFS -SERVICE_FILE=.docker/service.json source ./script/ipfs-upload.sh -``` - -## Aggregator - -Start the [aggregator](../handbook/submission#aggregator) and register the service with the aggregator. The aggregator is used to collect and validate responses from multiple operators before submitting them to the blockchain. - -```bash docci-delay-per-cmd=2 -bash ./script/create-aggregator.sh 1 - -IPFS_GATEWAY=${IPFS_GATEWAY} bash ./infra/aggregator-1/start.sh - -wget -q --header="Content-Type: application/json" --post-data="{\"uri\": \"${IPFS_URI}\"}" ${AGGREGATOR_URL}/register-service -O - -``` - -## Start WAVS - -Create an operator and start WAVS. The create-operator.sh script configures the operator's environment and starts running WAVS. - -```bash -bash ./script/create-operator.sh 1 - -IPFS_GATEWAY=${IPFS_GATEWAY} bash ./infra/wavs-1/start.sh - -# Deploy the service JSON to WAVS so it now watches and submits. -# 'opt in' for WAVS to watch (this is before we register to Eigenlayer) -WAVS_ENDPOINT=http://127.0.0.1:8000 SERVICE_URL=${IPFS_URI} IPFS_GATEWAY=${IPFS_GATEWAY} make deploy-service -``` - -## Register service specific operator - -Each service gets its own key path (hd_path). The first service starts at 1 and increments from there. The following commands are used to register the operator with the [service manager contract](../handbook/service#service-manager). - -```bash -SERVICE_INDEX=0 source ./script/avs-signing-key.sh - -# Local: -export WAVS_SERVICE_MANAGER_ADDRESS=$(jq -r .addresses.WavsServiceManager ./.nodes/avs_deploy.json) -# TESTNET: set WAVS_SERVICE_MANAGER_ADDRESS - -COMMAND="register ${OPERATOR_PRIVATE_KEY} ${AVS_SIGNING_ADDRESS} 0.001ether" make wavs-middleware - -# Verify registration -COMMAND="list_operators" PAST_BLOCKS=500 make wavs-middleware -``` - -## Trigger the service - -Next, use your deployed trigger contract to trigger the oracle to be run. In the following command, you'll specify the `INPUT_DATA` as abi encoded `1`, which corresponds to the ID of Bitcoin. - -Running this command will execute [`/script/Trigger.s.sol`](https://github.com/Lay3rLabs/wavs-foundry-template/tree/main/script/Trigger.s.sol) and pass the ID to the trigger contract, starting the following chain of events: - -1. The trigger contract will emit an event with the specified ID as its data. -2. Operators listening for the event will receive the data and run it in the oracle component off-chain. -3. The oracle component will use the ID to query the price of Bitcoin from the CoinMarketCap API. -4. The returned data will be signed by operators and passed to the [aggregator and then the submission contract](../handbook/submission), which will verify the operator's signature and submit the price of Bitcoin on-chain 🎉 - - -```bash docci-delay-per-cmd=2 -# Request BTC from CMC -export INPUT_DATA=`cast abi-encode "addTrigger(string)" "1"` -# Get the trigger address from previous Deploy forge script -export SERVICE_TRIGGER_ADDR=`make get-trigger-from-deploy` - -# uses FUNDED_KEY as the executor (local: anvil account) -source .env - -forge script ./script/Trigger.s.sol ${SERVICE_TRIGGER_ADDR} ${INPUT_DATA} --sig 'run(string,string)' --rpc-url ${RPC_URL} --broadcast -``` - -## Show the result - -Run the following to view the result of your service in your terminal: - -```bash docci-delay-per-cmd=2 docci-output-contains="BTC" -# Get the latest TriggerId and show the result via `script/ShowResult.s.sol` -TRIGGER_ID=1 RPC_URL=${RPC_URL} make show-result -``` - -Congratulations, you've just made a simple Bitcoin price oracle service using WAVS! - -Proceed to the [Prediction Market demo](./7-prediction) to learn how a similar oracle service can be used in a prediction market. - -Check out the [Service handbook](../handbook/overview) to learn more about services and creating components. diff --git a/docs/tutorial/7-prediction.mdx b/docs/tutorial/7-prediction.mdx deleted file mode 100644 index a7e52d64..00000000 --- a/docs/tutorial/7-prediction.mdx +++ /dev/null @@ -1,103 +0,0 @@ ---- -title: 7. Prediction market -description: prediction market demo overview ---- - - -{/* todo: verify the info in this page and update if needed */} -Now that you've built a simple oracle service, take a look at the [WAVS Demo Repo](https://github.com/Lay3rLabs/wavs-demos/blob/main/demos/PREDICTION_MARKET_DEMO.md) to see a similar component used in action to resolve a prediction market. - -This page will give an overview of the prediction market demo, how it works, and how the oracle component is used to resolve markets. - -Prediction market demo repo: https://github.com/Lay3rLabs/wavs-demos/tree/main - -## What is a prediction market? - -A prediction market is a marketplace that gathers insights about the future by rewarding participants for making accurate predictions based on available information. For example, a prediction market could be created for whether it will snow in Oslo on November 5th. Users can create positions by depositing money based on two outcomes: yes or no. After the event transpires, an oracle service can be used to bring in the weather outcome, resolving the market and rewarding those who predicted correctly. - -## How does it work? - -### Market Solidity contracts - -These contracts handle the creation of markets and conditional tokens. - -[`conditional-tokens-contracts`](https://github.com/Lay3rLabs/conditional-tokens-contracts) - these contracts are forked from Gnosis and updated to a recent Solidity version, and they are the core protocol that creates a conditional share in a future outcome. - -[`conditional-tokens-market-makers`](https://github.com/Lay3rLabs/conditional-tokens-market-makers) - these contracts are forked from Gnosis and updated to a recent Solidity version, and they are the market makers that create a market based on the conditional shares above. - -[`PredictionMarketFactory.sol`](https://github.com/Lay3rLabs/wavs-prediction-market/blob/main/src/contracts/PredictionMarketFactory.sol) - this contract sets up all the contracts required for a functioning prediction market using the forked contracts above, and it has the power to resolve the market once the outcome is determined by the oracle AVS. - -### Extending the trigger contract - -In this demo, the oracle that resolves the market is triggered by the [`PredictionMarketOracleController.sol`](https://github.com/Lay3rLabs/wavs-prediction-market/blob/main/src/contracts/PredictionMarketOracleController.sol) contract. - -This contract contains modifications to the `WavsTrigger.sol` contract from the [WAVS Foundry Template repo](https://github.com/Lay3rLabs/wavs-foundry-template/tree/main/src/contracts/WavsTrigger.sol). Similar to the simple trigger contract, it passes data to the oracle AVS via the `NewTrigger` event. - -It extends the contract by storing trigger metadata, validating signed AVS outputs, and interacting with the external contracts mentioned above (`PredictionMarketFactory`, `MarketMaker`, and `ConditionalTokens`). - -It also contains logic to enforce a payment when a trigger is added: - -```rust -function addTrigger( - TriggerInputData calldata triggerData - ) external payable returns (ITypes.TriggerId triggerId) { - require(msg.value == 0.1 ether, "Payment must be exactly 0.1 ETH"); -``` - -Take a look at the [`PredictionMarketOracleController.sol`](https://github.com/Lay3rLabs/wavs-prediction-market/blob/main/src/contracts/PredictionMarketOracleController.sol) file to get an idea of how the contract is structured. - -This contract is responsible for interacting with the oracle service, triggering WAVS to run the oracle, waiting for the oracle's response, and telling the market factory to resolve the market. - -### Oracle WASI component - -Similar to the oracle you created in the tutorial, the prediction market uses a simple oracle service to resolve a market by bringing off-chain data on-chain. - -This[ WASI component](https://github.com/Lay3rLabs/wavs-prediction-market/blob/main/components/prediction-market-oracle/src/lib.rs) runs in WAVS and fetches the prediction market's resolution when necessary. A wallet executes the [`PredictionMarketOracleController.sol`](https://github.com/Lay3rLabs/wavs-prediction-market/blob/main/src/contracts/PredictionMarketOracleController.sol#L65) contract's `addTrigger` function, which triggers WAVS to run this oracle by emitting an event. Then, WAVS commits the response from this oracle component with a signature back to the contract on-chain, and the market is resolved. - -Below is the `run` function for the Prediction Market oracle component. This function is responsible for fetching the price of Bitcoin and resolving the market based on the price. - -```rust -impl Guest for Component { - fn run(action: TriggerAction) -> std::result::Result, String> { - let market_maker_address = - config_var("market_maker").ok_or_else(|| "Failed to get market maker address")?; - let conditional_tokens_address = config_var("conditional_tokens") - .ok_or_else(|| "Failed to get conditional tokens address")?; - - let trigger_info = decode_trigger_event(action.data)?; - - let bitcoin_price = block_on(get_price_feed(1))?; - - // Resolve the market as YES if the price of Bitcoin is over $1. - let result = bitcoin_price > 1.0; - - Ok(Some(WasmResponse { - payload: encode_trigger_output( - trigger_info.triggerId, - Address::from_str(&market_maker_address).unwrap(), - Address::from_str(&conditional_tokens_address).unwrap(), - result, - ), - ordering: None, - })) - } -} -``` - -Performing this market resolution via WAVS means prediction markets can exist in a fully decentralized manner. Because money is on the line, entrusting any party to honestly resolve the market is a critical security decision—WAVS enables distributing this trust over multiple independent parties, taking advantage of the verifiability and security of the existing WAVS infrastructure instead of relying on any centralized individual. - -## Try it out - -You can run the prediction market demo locally by following the steps from the [README](https://github.com/Lay3rLabs/wavs-prediction-market/tree/main). - -Follow along in the video tutorial to see how to run the prediction market demo locally: - - - -
- -
- diff --git a/test_utils/README.md b/test_utils/README.md deleted file mode 100644 index a2ee7604..00000000 --- a/test_utils/README.md +++ /dev/null @@ -1,66 +0,0 @@ -# WAVS Component Test Utilities - -This library provides essential validation tools for WAVS components. All components **MUST** pass these tests before running the `make wasi-build` command. - -## Overview - -The test_utils component is a collection of utilities and validation scripts to ensure WAVS components meet the required standards and follow best practices. It's designed to catch common errors before they cause build failures or runtime issues. - -## What It Does - -- Validates component structure and implementation -- Checks for common anti-patterns and implementation mistakes -- Provides a standardized way to verify components -- Ensures consistent error handling, data management, and API usage - -## Key Features - -- Automated code analysis -- Comprehensive validation of ABI encoding/decoding -- Data ownership and cloning validation -- Error handling pattern verification -- Network request and API security validation - -## Using the Validation Script - -The main validation script can be used to verify any component: - -```bash -# Validate a component using the Makefile command -make validate-component COMPONENT=your-component-name - -# Or run the script directly -cd test_utils -./validate_component.sh your-component-name -``` - - -## Test Modules - -The test utilities are organized into focused modules: - -| Module | Description | -|--------|-------------| -| `abi_encoding` | Proper handling of ABI-encoded data, avoiding common String::from_utf8 errors | -| `code_quality` | Code quality checks, including detecting unused imports and other best practices | -| `data_handling` | Correct data ownership, cloning, and avoiding moved value errors | -| `error_handling` | Proper Option/Result handling, avoiding map_err on Option errors | -| `network_requests` | HTTP request setup, error handling, and API key management | -| `solidity_types` | Working with Solidity types, numeric conversions, and struct handling | -| `input_validation` | Input data validation, safe decoding, and defensive programming | - -## Common Errors Prevented - -These tests help you avoid the following common errors: - -1. Using `String::from_utf8` directly on ABI-encoded data -2. Missing Clone derivation on API response structs -3. Using `map_err()` on Option types instead of `ok_or_else()` -4. Improper Rust-Solidity type conversions -5. Ownership issues with collection elements -6. Using `&data.clone()` pattern creating temporary values -7. Missing trait imports causing "no method" errors -8. Ambiguous method calls requiring fully qualified syntax -9. Unused imports cluttering the code -10. Direct version specifications instead of workspace dependencies - diff --git a/test_utils/validate_component.sh b/test_utils/validate_component.sh deleted file mode 100755 index 776b66fb..00000000 --- a/test_utils/validate_component.sh +++ /dev/null @@ -1,574 +0,0 @@ -#!/bin/bash -# Component validation script - IMPROVED VERSION -# Runs comprehensive test utilities to validate a component before building -# Catches all common errors that would prevent successful builds or execution - -# Don't exit on error, we want to collect all errors -set +e - -# Create an array to hold all errors -errors=() -warnings=() - -# Function to add an error -add_error() { - errors+=("$1") - echo "❌ Error: $1" -} - -# Function to add a warning -add_warning() { - warnings+=("$1") - echo "⚠️ Warning: $1" -} - -if [ -z "$1" ]; then - echo "Usage: $0 " - echo "Example: $0 eth-price-oracle" - exit 1 -fi - -COMPONENT_NAME=$1 -COMPONENT_DIR="../components/$COMPONENT_NAME" -SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" - -# Check if component directory exists -if [ ! -d "$COMPONENT_DIR" ]; then - echo "❌ Error: Component directory $COMPONENT_DIR not found" - exit 1 -fi - -echo "🔍 Validating component: $COMPONENT_NAME" - -# Print a section header for better organization -print_section() { - echo - echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" - echo "🔍 $1" - echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" -} - -#===================================================================================== -# ABI ENCODING CHECKS -#===================================================================================== -print_section "ABI ENCODING CHECKS" - -# 1. Check for String::from_utf8 usage on ABI data in non-generated files -echo "📝 Checking for common String::from_utf8 misuse..." -grep_result=$(grep -r "String::from_utf8" "$COMPONENT_DIR/src" --include="*.rs" | grep -v "bindings.rs" | grep -v "test" | grep -v "# CORRECT" || true) -if [ ! -z "$grep_result" ]; then - if grep -r "String::from_utf8.*data" "$COMPONENT_DIR"/src/*.rs | grep -v "bindings.rs" > /dev/null; then - error_detail=$(grep -r "String::from_utf8.*data" "$COMPONENT_DIR"/src/*.rs | grep -v "bindings.rs") - add_error "Found String::from_utf8 used directly on ABI-encoded data. - This will ALWAYS fail with 'invalid utf-8 sequence' because ABI-encoded data is binary. - Use proper ABI decoding methods instead: - 1. For function calls with string params: functionCall::abi_decode() - 2. For string params: String::abi_decode() - $error_detail" - else - add_warning "Found String::from_utf8 usage. Ensure it's not being used on ABI-encoded data. - This can cause runtime errors if used with encoded data. You can ignore this warning if you are using correctly. - $grep_result" - fi -fi - -# 1b. Check for proper ABI decoding methods -echo "📝 Checking for proper ABI decoding methods..." -if grep -r "TriggerData::Raw" "$COMPONENT_DIR"/src/*.rs > /dev/null || - grep -r "cast abi-encode" "$COMPONENT_DIR" > /dev/null; then - - # Component deals with ABI-encoded input data - if ! grep -r "abi_decode" "$COMPONENT_DIR"/src/*.rs > /dev/null && - ! grep -r "::abi_decode" "$COMPONENT_DIR"/src/*.rs > /dev/null; then - add_error "Component appears to handle ABI-encoded input but doesn't use abi_decode methods. - This will cause runtime errors when processing ABI-encoded data. - For ABI-encoded input, use proper decoding methods: - 1. ::abi_decode(&hex_data) - 2. ::abi_decode(&data) - 3. functionCall::abi_decode(&data)" - fi - - # Check for Solidity function definitions when receiving function calls - if grep -r "cast abi-encode \"f(string)" "$COMPONENT_DIR" > /dev/null && - ! grep -r "function.*external" "$COMPONENT_DIR"/src/*.rs > /dev/null; then - add_error "Component receives ABI-encoded function calls but doesn't define Solidity functions. - This will cause runtime errors when trying to decode function calls. - Define appropriate Solidity functions to decode inputs, for example: - sol! { - function checkBalance(string address) external; - }" - fi -fi - -#===================================================================================== -# DATA HANDLING CHECKS -#===================================================================================== -print_section "DATA HANDLING CHECKS" - -# 2a. Check for proper Clone derivation on API structs used with network requests -echo "📝 Checking for Clone derivation on structs..." -# Look for structs used in HTTP responses -HTTP_USAGE=$(if grep -r "fetch_json\|http_request_get" "$COMPONENT_DIR"/src/*.rs > /dev/null 2>&1; then echo "1"; else echo "0"; fi) - -# Find structs with Deserialize but missing Clone -STRUCTS_WITH_DERIVE=$(grep -r -B 2 "struct" "$COMPONENT_DIR/src" | grep "derive" || true) -STRUCTS_WITH_DESERIALIZE=$(echo "$STRUCTS_WITH_DERIVE" | grep "Deserialize" || true) -STRUCTS_WITHOUT_CLONE=$(echo "$STRUCTS_WITH_DESERIALIZE" | grep -v "Clone" || true) - -if [ ! -z "$STRUCTS_WITHOUT_CLONE" ]; then - # Check if any struct without Clone is used more than once - STRUCT_USAGE_ERROR=false - - # Extract struct names from the output - while read -r line; do - # Extract struct name using sed - matches "struct Name {" - STRUCT_LINE=$(echo "$line" | grep -A 1 "derive" || true) - if [ ! -z "$STRUCT_LINE" ]; then - STRUCT_NAME=$(echo "$STRUCT_LINE" | grep "struct" | sed -E 's/.*struct\s+([A-Za-z0-9_]+).*/\1/') - - if [ ! -z "$STRUCT_NAME" ]; then - # Count usages of this struct (excluding declaration and imports) - USAGE_COUNT=$(grep -r "$STRUCT_NAME" "$COMPONENT_DIR"/src/*.rs | grep -v "struct $STRUCT_NAME" | grep -v "use.*$STRUCT_NAME" | wc -l) - - # If used multiple times or in JSON handling, it should have Clone - if [ "$USAGE_COUNT" -gt 2 ] || grep -q "serde_json.*$STRUCT_NAME" "$COMPONENT_DIR"/src/*.rs; then - STRUCT_USAGE_ERROR=true - break - fi - fi - fi - done <<< "$STRUCTS_WITHOUT_CLONE" - - # If HTTP request component or multiple usages detected, make it an error - if [ "$HTTP_USAGE" != "0" ] && [ "$STRUCT_USAGE_ERROR" = true ]; then - add_error "Found structs with Deserialize but missing Clone derivation that are used multiple times: - $STRUCTS_WITHOUT_CLONE - - Structs used multiple times with API responses MUST derive Clone to prevent ownership errors. - Fix: Add Clone to the derive list like this: - #[derive(Serialize, Deserialize, Debug, Clone)]" - else - add_warning "Found structs with Deserialize but missing Clone derivation: - $STRUCTS_WITHOUT_CLONE - - Consider adding Clone for consistency: - #[derive(Serialize, Deserialize, Debug, Clone)]" - fi -fi - -# 2b. Check for temporary clone pattern (&data.clone()) -echo "📝 Checking for incorrect &data.clone() pattern..." -TEMP_CLONE_PATTERN=$(grep -r "&.*\.clone()" "$COMPONENT_DIR"/src/*.rs || true) -if [ ! -z "$TEMP_CLONE_PATTERN" ]; then - add_error "Found dangerous &data.clone() pattern which creates temporary values that are immediately dropped. - This pattern causes ownership issues because the cloned data is immediately dropped. - Fix: Create a named variable to hold the cloned data instead: - WRONG: let result = std::str::from_utf8(&data.clone()); - RIGHT: let data_clone = data.clone(); - let result = std::str::from_utf8(&data_clone); - $TEMP_CLONE_PATTERN" -fi - -# 2c. Check for potential "move out of index" errors -echo "📝 Checking for potential 'move out of index' errors..." -MOVE_OUT_INDEX=$(grep -r "\[.*\]\..*" "$COMPONENT_DIR"/src/*.rs | grep -v "\.clone()" | grep -v "\.as_ref()" | grep -v "&" | grep -v "bindings.rs" || true) -if [ ! -z "$MOVE_OUT_INDEX" ]; then - add_error "Found potential 'move out of index' errors - accessing collection elements without cloning. - When accessing fields from elements in a collection, you should clone the field to avoid - moving out of the collection, which would make the collection unusable afterward. - WRONG: let field = collection[0].field; // This moves the field out of the collection - RIGHT: let field = collection[0].field.clone(); // This clones the field - $MOVE_OUT_INDEX" -fi - -#===================================================================================== -# ERROR HANDLING CHECKS -#===================================================================================== -print_section "ERROR HANDLING CHECKS" - -# 3a. Check for map_err on Option types - focus only on get_evm_chain_config specifically -echo "📝 Checking for map_err on Option types..." -MAP_ERR_CHAIN_CONFIG=$(grep -r "get_evm_chain_config" "$COMPONENT_DIR"/src/*.rs | grep "map_err" | grep -v "ok_or_else" 2>/dev/null || true) - -if [ ! -z "$MAP_ERR_CHAIN_CONFIG" ]; then - add_error "Found map_err used directly on get_evm_chain_config which returns Option, not Result. - Option types don't have map_err method - it's only available on Result types. - WRONG: get_evm_chain_config(\"ethereum\").map_err(|e| e.to_string())? - RIGHT: get_evm_chain_config(\"ethereum\").ok_or_else(|| \"Failed to get config\".to_string())? - $MAP_ERR_CHAIN_CONFIG" -fi - -#===================================================================================== -# IMPORT CHECKS -#===================================================================================== -print_section "IMPORT CHECKS" - -# 4a. Check for proper import of essential traits and types -echo "📝 Checking for essential imports..." -if grep -r "FromStr" "$COMPONENT_DIR"/src/*.rs > /dev/null && ! grep -r "use std::str::FromStr" "$COMPONENT_DIR"/src/*.rs > /dev/null; then - FROMSTR_USAGE=$(grep -r "FromStr" "$COMPONENT_DIR"/src/*.rs | grep -v "use std::str::FromStr" || true) - add_error "Found FromStr usage but std::str::FromStr is not imported. - This will cause a compile error when using methods like from_str or parse(). - Fix: Add 'use std::str::FromStr;' to your imports. - $FROMSTR_USAGE" -fi - -# 4b. Check for min function usage without import -if grep -r "min(" "$COMPONENT_DIR"/src/*.rs > /dev/null && ! grep -r "use std::cmp::min" "$COMPONENT_DIR"/src/*.rs > /dev/null; then - MIN_USAGE=$(grep -r "min(" "$COMPONENT_DIR"/src/*.rs | grep -v "use std::cmp::min" || true) - add_error "Found min function usage but std::cmp::min is not imported. - This will cause a compile error when using min(). - Fix: Add 'use std::cmp::min;' to your imports. - $MIN_USAGE" -fi - -# 4c. Check for TxKind import issues -if grep -r "alloy_rpc_types::eth::TxKind" "$COMPONENT_DIR"/src/*.rs > /dev/null; then - TXKIND_USAGE=$(grep -r "alloy_rpc_types::eth::TxKind" "$COMPONENT_DIR"/src/*.rs || true) - add_error "Found incorrect TxKind import path. Use alloy_primitives::TxKind instead of alloy_rpc_types::eth::TxKind. - This is a critical error that will prevent component compilation. - Fix: 1. Add 'use alloy_primitives::{Address, TxKind, U256};' (or add TxKind to existing import) - 2. Replace 'alloy_rpc_types::eth::TxKind::Call' with 'TxKind::Call' - $TXKIND_USAGE" -fi - -# 4d. Check for TxKind usage without import -if grep -r "::Call" "$COMPONENT_DIR"/src/*.rs > /dev/null && ! grep -r "use.*TxKind" "$COMPONENT_DIR"/src/*.rs > /dev/null; then - CALL_USAGE=$(grep -r "::Call" "$COMPONENT_DIR"/src/*.rs | grep -v "use.*TxKind" || true) - add_error "Found TxKind usage but TxKind is not properly imported. - Fix: Add 'use alloy_primitives::TxKind;' to your imports. - $CALL_USAGE" -fi - -# 4e. Check for block_on usage without the correct import - improved to handle grouped imports -echo "📝 Checking for block_on import..." -if grep -r "block_on" "$COMPONENT_DIR"/src/*.rs > /dev/null; then - # Check both formats: direct import and grouped import - DIRECT_IMPORT=$(grep -r "use wstd::runtime::block_on" "$COMPONENT_DIR"/src/*.rs || true) - GROUPED_IMPORT=$(grep -r "use wstd::{.*runtime::block_on" "$COMPONENT_DIR"/src/*.rs || true) - RUNTIME_IMPORT=$(grep -r "use wstd::.*runtime" "$COMPONENT_DIR"/src/*.rs || true) - - if [ -z "$DIRECT_IMPORT" ] && [ -z "$GROUPED_IMPORT" ] && [ -z "$RUNTIME_IMPORT" ]; then - BLOCK_ON_USAGE=$(grep -r "block_on" "$COMPONENT_DIR"/src/*.rs || true) - add_error "Found block_on usage but wstd::runtime::block_on is not imported. - This will cause a compile error when using async functions. - Fix: Add 'use wstd::runtime::block_on;' to your imports. - $BLOCK_ON_USAGE" - fi -fi - -# 4f. Check for HTTP function imports -if grep -r "http_request_" "$COMPONENT_DIR"/src/*.rs > /dev/null || grep -r "fetch_json" "$COMPONENT_DIR"/src/*.rs > /dev/null; then - # Check for both direct import and grouped import patterns - DIRECT_HTTP_IMPORT=$(grep -r "use wavs_wasi_utils::http::" "$COMPONENT_DIR"/src/*.rs || true) - GROUPED_HTTP_IMPORT=$(grep -r "use wavs_wasi_utils::{.*http::{.*fetch_json\|.*http_request_" "$COMPONENT_DIR"/src/*.rs || true) - - if [ -z "$DIRECT_HTTP_IMPORT" ] && [ -z "$GROUPED_HTTP_IMPORT" ]; then - HTTP_USAGE=$(grep -r "http_request_\|fetch_json" "$COMPONENT_DIR"/src/*.rs || true) - add_error "Found HTTP function usage but wavs_wasi_utils::http is not imported. - Fix: Add 'use wavs_wasi_utils::http::{fetch_json, http_request_get};' to your imports. - $HTTP_USAGE" - fi -fi - -# 4g. Check for SolCall trait missing when using abi_encode -if grep -r "abi_encode" "$COMPONENT_DIR"/src/*.rs > /dev/null && ! grep -r "use.*SolCall" "$COMPONENT_DIR"/src/*.rs > /dev/null; then - if grep -r "Call.*abi_encode" "$COMPONENT_DIR"/src/*.rs > /dev/null; then - CALL_ABI_USAGE=$(grep -r "Call.*abi_encode" "$COMPONENT_DIR"/src/*.rs || true) - add_error "Found Call::abi_encode usage but SolCall trait is not imported. - Function calls require the SolCall trait for encoding. - Fix: Add 'use alloy_sol_types::{SolCall, SolValue};' to your imports. - $CALL_ABI_USAGE" - fi -fi - -# After the existing import checks, add: -echo "📝 Checking for missing dependencies..." -# Get all local module names (mod foo;) from src/*.rs -LOCAL_MODS=$(grep -h -E '^mod ' "$COMPONENT_DIR"/src/*.rs | sed -E 's/^mod ([a-zA-Z0-9_]+);/\1/' | sort | uniq) -# Add known local modules -LOCAL_MODS="$LOCAL_MODS trigger bindings" -# Get all imports from the code, extract just the crate names -IMPORTS=$(grep -h -r "^use" "$COMPONENT_DIR"/src/*.rs | \ - sed -E 's/^use[[:space:]]+//' | \ - sed -E 's/ as [^;]+//' | \ - sed -E 's/[{].*//' | \ - sed -E 's/;.*//' | \ - cut -d: -f1 | \ - awk -F'::' '{print $1}' | \ - grep -vE '^(crate|self|super|std|core|wavs_wasi_utils|wstd)$' | \ - sort | uniq) - -# Check each import against Cargo.toml dependencies -for import in $IMPORTS; do - # Skip empty lines - if [[ -z "$import" ]]; then - continue - fi - # Skip local modules - if echo "$LOCAL_MODS" | grep -wq "$import"; then - continue - fi - # Convert import name to Cargo.toml format (replace underscores with hyphens) - cargo_name=$(echo "$import" | tr '_' '-') - # Check if the import is in Cargo.toml (either directly or as a workspace dependency) - if ! grep -q "$cargo_name.*=.*{.*workspace.*=.*true" "$COMPONENT_DIR/Cargo.toml" && ! grep -q "$cargo_name.*=.*\"" "$COMPONENT_DIR/Cargo.toml"; then - add_error "Import '$import' is used but not found in Cargo.toml dependencies.\n Add it to your [dependencies] section in Cargo.toml and to [workspace.dependencies] in the root Cargo.toml." - fi -done - -#===================================================================================== -# COMPONENT STRUCTURE CHECKS -#===================================================================================== -print_section "COMPONENT STRUCTURE CHECKS" - -# 5a. Check for proper export! macro usage and syntax -echo "📝 Checking for proper component export..." -if ! grep -r "export!" "$COMPONENT_DIR"/src/*.rs > /dev/null; then - add_error "export! macro not found. Components must use export! macro. - Fix: Add 'export!(YourComponent with_types_in bindings);' to your component." -fi - -# 5b. Check for correct export! macro syntax with with_types_in -if grep -r "export!" "$COMPONENT_DIR"/src/*.rs > /dev/null && ! grep -r "export!.*with_types_in bindings" "$COMPONENT_DIR"/src/*.rs > /dev/null; then - EXPORT_USAGE=$(grep -r "export!" "$COMPONENT_DIR"/src/*.rs || true) - add_error "Incorrect export! macro syntax. Use 'export!(YourComponent with_types_in bindings)' instead of just 'export!(YourComponent)'. - Fix: Update to 'export!(YourComponent with_types_in bindings);' - $EXPORT_USAGE" -fi - -# 5c. Check for TriggerAction structure usage issues -echo "📝 Checking for TriggerAction structure usage..." -if grep -r "trigger.trigger_data" "$COMPONENT_DIR"/src/*.rs > /dev/null; then - TRIGGER_DATA_USAGE=$(grep -r "trigger.trigger_data" "$COMPONENT_DIR"/src/*.rs || true) - add_error "Component accesses non-existent 'trigger_data' field on TriggerAction. Use 'trigger.data' instead. - $TRIGGER_DATA_USAGE" -fi - -# 5d. Check for incorrect match pattern on trigger.data (treating it as Option) -if grep -r -A 5 -B 2 "match trigger.data" "$COMPONENT_DIR"/src/*.rs 2>/dev/null | grep -q "Some(" && - grep -r -A 8 -B 2 "match trigger.data" "$COMPONENT_DIR"/src/*.rs 2>/dev/null | grep -q "None =>"; then - TRIGGER_MATCH=$(grep -r -A 5 -B 2 "match trigger.data" "$COMPONENT_DIR"/src/*.rs || true) - add_error "Component incorrectly treats 'trigger.data' as an Option, but it's a TriggerData. - The field is not optional - don't match against Some/None patterns. - $TRIGGER_MATCH" -fi - -# 5e. Check for Guest trait implementation -if ! grep -r "impl Guest for" "$COMPONENT_DIR"/src/*.rs > /dev/null; then - add_error "Guest trait implementation not found. Components must implement the Guest trait. - Fix: Add 'impl Guest for YourComponent { fn run(trigger: TriggerAction) -> Result, String> { ... } }'" -fi - -# 5f. Check for run function with correct signature - improved to accept variations in naming/qualification -if ! grep -r "fn run(.*TriggerAction.*) -> .*Result, String>" "$COMPONENT_DIR"/src/*.rs > /dev/null; then - add_error "run function with correct result signature not found. - The run function must return std::result::Result, String>" -fi - -#===================================================================================== -# SECURITY CHECKS -#===================================================================================== -print_section "SECURITY CHECKS" - -# 6a. Check for hardcoded API keys -echo "📝 Checking for hardcoded API keys..." -API_KEYS=$(grep -r "key=.*[0-9a-zA-Z]\{8,\}" "$COMPONENT_DIR" --include="*.rs" || true) -if [ ! -z "$API_KEYS" ]; then - add_error "Found possible hardcoded API key. Use environment variables instead. - Fix: Use std::env::var(\"WAVS_ENV_YOUR_API_KEY\") to get API keys from environment variables. - $API_KEYS" -fi - -# 6b. Check for other potential hardcoded secrets -OTHER_SECRETS=$(grep -r "token=\|secret=\|password=" "$COMPONENT_DIR" --include="*.rs" | grep "[0-9a-zA-Z]\{8,\}" || true) -if [ ! -z "$OTHER_SECRETS" ]; then - add_error "Found possible hardcoded secret. Use environment variables instead. - Fix: Use std::env::var(\"WAVS_ENV_YOUR_SECRET\") to get secrets from environment variables. - $OTHER_SECRETS" -fi - -#===================================================================================== -# DEPENDENCIES CHECKS -#===================================================================================== -print_section "DEPENDENCIES CHECKS" - -# 7. Check for proper workspace dependency usage -echo "📝 Checking for proper workspace dependency usage..." -VERSION_NUMBERS=$(grep -r "version = \"[0-9]" "$COMPONENT_DIR/Cargo.toml" || true) -if [ ! -z "$VERSION_NUMBERS" ]; then - add_error "Found direct version numbers in Cargo.toml. Use workspace = true instead. - Fix: Replace version numbers with { workspace = true } for all dependencies. - WRONG: some-crate = \"0.1.0\" - RIGHT: some-crate = { workspace = true } - $VERSION_NUMBERS" -fi - -#===================================================================================== -# CODE QUALITY CHECKS -#===================================================================================== -print_section "CODE QUALITY CHECKS" - -# 8. Check for unused imports with cargo check -echo "📝 Checking for unused imports and code issues..." -cd "$SCRIPT_DIR/.." -COMPONENT_NAME_SIMPLE=$(basename "$COMPONENT_DIR") - -# Run cargo check and capture any errors (not just warnings) -CARGO_OUTPUT=$(cargo check -p "$COMPONENT_NAME_SIMPLE" 2>&1) -CARGO_ERRORS=$(echo "$CARGO_OUTPUT" | grep -i "error:" | grep -v "generated file bindings.rs" || true) - -if [ ! -z "$CARGO_ERRORS" ]; then - add_error "cargo check found compilation errors: - $CARGO_ERRORS" -fi - -# Show warnings but don't fail on them -CARGO_WARNINGS=$(echo "$CARGO_OUTPUT" | grep -i "warning:" | grep -v "profiles for the non root package" || true) -if [ ! -z "$CARGO_WARNINGS" ]; then - add_warning "cargo check found warnings that might indicate issues: - $CARGO_WARNINGS" -fi - -cd "$SCRIPT_DIR" - -#===================================================================================== -# SOLIDITY TYPES CHECKS -#===================================================================================== -print_section "SOLIDITY TYPES CHECKS" - -# 9a. Check for sol! macro usage without proper import -echo "📝 Checking for sol! macro imports..." -if grep -r "sol!" "$COMPONENT_DIR"/src/*.rs > /dev/null; then - if ! grep -r "use alloy_sol_types::sol" "$COMPONENT_DIR"/src/*.rs > /dev/null && ! grep -r "use alloy_sol_macro::sol" "$COMPONENT_DIR"/src/*.rs > /dev/null; then - SOL_USAGE=$(grep -r "sol!" "$COMPONENT_DIR"/src/*.rs || true) - add_error "Found sol! macro usage but neither alloy_sol_types::sol nor alloy_sol_macro::sol is imported. - Fix: Add 'use alloy_sol_types::sol;' to your imports. - $SOL_USAGE" - fi -fi - -# 9b. Check for solidity module structure -echo "📝 Checking for proper solidity module structure..." -if grep -r "sol::" "$COMPONENT_DIR"/src/*.rs > /dev/null && ! grep -r "mod solidity" "$COMPONENT_DIR"/src/*.rs > /dev/null; then - SOL_NAMESPACE=$(grep -r "sol::" "$COMPONENT_DIR"/src/*.rs || true) - add_error "Found 'sol::' namespace usage without defining a 'solidity' module. - Fix: Create a proper module structure like: - mod solidity { - use alloy_sol_types::sol; - sol! { /* your solidity types */ } - } - $SOL_NAMESPACE" -fi - -# 9c. Check for string literals assigned to String type fields in structs -echo "📝 Checking for string literal to String conversions..." -# Look for patterns like 'field: "string literal",' in struct initializations -# Only check lib.rs to avoid auto-generated bindings.rs -if [ -f "$COMPONENT_DIR/src/lib.rs" ]; then - STRING_FIELDS=$(grep -A 20 "pub struct" "$COMPONENT_DIR/src/lib.rs" | grep -E "^\s*pub\s+[a-zA-Z0-9_]+:\s+String," | sed -E 's/^\s*pub\s+([a-zA-Z0-9_]+):\s+String,.*/\1/' || true) - - if [ ! -z "$STRING_FIELDS" ]; then - # For each string field, check for literals without to_string() - for FIELD in $STRING_FIELDS; do - # Skip if field name is empty or contains special characters - if [[ "$FIELD" =~ ^[a-zA-Z0-9_]+$ ]]; then - # Look for patterns like 'field: "literal",' without to_string() - STRING_LITERAL_USAGE=$(grep -r "$FIELD: \"" "$COMPONENT_DIR"/src/lib.rs | grep -v "\.to_string()" || true) - - if [ ! -z "$STRING_LITERAL_USAGE" ]; then - add_error "Found string literals assigned directly to String type fields without .to_string() conversion: - $STRING_LITERAL_USAGE - - This will cause a type mismatch error because &str cannot be assigned to String. - Fix: Always convert string literals to String type using .to_string(): - WRONG: field: \"literal string\", - RIGHT: field: \"literal string\".to_string()," - break - fi - fi - done - fi -fi - -#===================================================================================== -# STRING SAFETY CHECKS -#===================================================================================== -print_section "STRING SAFETY CHECKS" - -# 10a. Check for unbounded string.repeat operations -echo "📝 Checking for string capacity overflow risks..." - -# First, collect all .repeat() calls - simpler approach to catch all possible cases -REPEAT_CALLS=$(grep -r "\.repeat(" "$COMPONENT_DIR"/src/*.rs || true) - -if [ ! -z "$REPEAT_CALLS" ]; then - # Look for any .repeat() calls with potentially unsafe variables - RISKY_REPEAT_PATTERNS="decimals\|padding\|len\|size\|count\|width\|height\|indent\|offset\|spaces\|zeros\|chars\|digits" - - # Check for specific safety patterns - SAFETY_PATTERNS="std::cmp::min\|::min(\|min(\|// SAFE:" - - # Check if any .repeat call doesn't use a safety bound - UNSAFE_REPEATS=$(echo "$REPEAT_CALLS" | grep -i "$RISKY_REPEAT_PATTERNS" | grep -v "$SAFETY_PATTERNS" || true) - - if [ ! -z "$UNSAFE_REPEATS" ]; then - add_error "Found potentially unbounded string.repeat operations: -$UNSAFE_REPEATS - -This can cause capacity overflow errors. Options to fix: - 1. Add a direct safety check: \".repeat(std::cmp::min(variable, 100))\" - 2. Use a bounded variable: \"let safe_value = std::cmp::min(variable, MAX_SIZE); .repeat(safe_value)\" - 3. Add a safety comment if manually verified: \"// SAFE: bounded by check above\"" - fi -fi - -#===================================================================================== -# NETWORK REQUEST CHECKS -#===================================================================================== -print_section "NETWORK REQUEST CHECKS" - -# 11a. Check for proper block_on usage with async functions -echo "📝 Checking for proper async handling..." -if grep -r "async fn" "$COMPONENT_DIR"/src/*.rs > /dev/null; then - if ! grep -r "block_on" "$COMPONENT_DIR"/src/*.rs > /dev/null; then - ASYNC_USAGE=$(grep -r "async fn" "$COMPONENT_DIR"/src/*.rs || true) - add_error "Found async functions but no block_on usage. - Async functions must be called with block_on in component run functions: - block_on(async { make_request().await }) - $ASYNC_USAGE" - fi -fi - -#===================================================================================== -# FINAL SUCCESS MESSAGE -#===================================================================================== -print_section "VALIDATION SUMMARY" - -# Check if there are any errors or warnings -ERROR_COUNT=${#errors[@]} -WARNING_COUNT=${#warnings[@]} - -if [ $ERROR_COUNT -gt 0 ]; then - echo "❌ Component validation failed with $ERROR_COUNT errors and $WARNING_COUNT warnings." - echo - echo "⚠️ YOU MUST FIX ALL ERRORS BEFORE RUNNING 'make wasi-build'." - echo " Failure to fix these issues will result in build or runtime errors." - exit 1 -else - if [ $WARNING_COUNT -gt 0 ]; then - echo "⚠️ Component validation passed with $WARNING_COUNT warnings." - echo " Consider fixing these warnings to improve your component's reliability." - else - echo "✅ Component validation checks complete! No errors or warnings found." - fi - - echo "🚀 Component is ready for building. Run the following command to build:" - echo " cd ../.. && make wasi-build" -fi - -# After all static checks, add: -echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" -echo "🔍 CARGO CHECK (compilation test)" -echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" -cargo check --manifest-path "$(pwd)/../components/$COMPONENT_NAME/Cargo.toml" --target wasm32-wasip1 From 10cd1bc39f07564d5e52a4a5297244a4042661bd Mon Sep 17 00:00:00 2001 From: Reece Williams Date: Wed, 1 Oct 2025 11:47:59 -0500 Subject: [PATCH 2/6] other --- .vscode/settings.json | 82 ------------------------------------------- tools/upgrade.sh | 40 --------------------- 2 files changed, 122 deletions(-) delete mode 100644 .vscode/settings.json delete mode 100755 tools/upgrade.sh diff --git a/.vscode/settings.json b/.vscode/settings.json deleted file mode 100644 index 6d0693fd..00000000 --- a/.vscode/settings.json +++ /dev/null @@ -1,82 +0,0 @@ -{ - "terminal.integrated.profiles.osx": { - "Claude Code": { - // docker compose -f compose.claude.yml run --rm --remove-orphans -it claude-code-sandbox bash -c 'claude; exec' - "path": "docker", - "args": [ - "compose", - "-f", - ".claude/docker-compose.claude.yml", - "run", - "--rm", - "--remove-orphans", - "-it", - "claude-code-sandbox", - "bash", - "-c", - "claude; exec" - ], - "icon": "robot", - "overrideName": true - }, - "Claude Code (unrestricted)": { - // docker compose -f compose.claude.yml run --rm --remove-orphans -it claude-code-sandbox bash -c 'claude --dangerously-skip-permissions; exec' - "path": "docker", - "args": [ - "compose", - "-f", - ".claude/docker-compose.claude.yml", - "run", - "--rm", - "--remove-orphans", - "-it", - "claude-code-sandbox", - "bash", - "-c", - "claude --dangerously-skip-permissions; exec" - ], - "icon": "robot", - "overrideName": true - } - }, - "terminal.integrated.profiles.linux": { - "Claude Code": { - // docker compose -f compose.claude.yml run --rm --remove-orphans -it claude-code-sandbox bash -c 'claude; exec' - "path": "docker", - "args": [ - "compose", - "-f", - ".claude/docker-compose.claude.yml", - "run", - "--rm", - "--remove-orphans", - "-it", - "claude-code-sandbox", - "bash", - "-c", - "claude; exec" - ], - "icon": "robot", - "overrideName": true - }, - "Claude Code (unrestricted)": { - // docker compose -f compose.claude.yml run --rm --remove-orphans -it claude-code-sandbox bash -c 'claude --dangerously-skip-permissions; exec' - "path": "docker", - "args": [ - "compose", - "-f", - ".claude/docker-compose.claude.yml", - "run", - "--rm", - "--remove-orphans", - "-it", - "claude-code-sandbox", - "bash", - "-c", - "claude --dangerously-skip-permissions; exec" - ], - "icon": "robot", - "overrideName": true - } - } -} diff --git a/tools/upgrade.sh b/tools/upgrade.sh deleted file mode 100755 index de992807..00000000 --- a/tools/upgrade.sh +++ /dev/null @@ -1,40 +0,0 @@ -#!/bin/bash - -set -e - -# Take first argument as the version to upgrade to -VERSION=$1 - -if [ -z "$VERSION" ]; then - echo "Usage: $0 " - exit 1 -fi - -# conditional sudo, just for docker -if groups | grep -q docker; then - SUDO=""; -else - SUDO="sudo"; -fi - -# pull this version to ensure we have it -if ! ${SUDO} docker pull ghcr.io/lay3rlabs/wavs:${VERSION}; then - echo "Invalid WAVS version, cannot pull ghcr.io/lay3rlabs/wavs:${VERSION}" - exit 1 -fi - -# Update Makefile -sed -E -i "s/ghcr.io\/lay3rlabs\/wavs:[^ ]+/ghcr.io\/lay3rlabs\/wavs:${VERSION}/g" Makefile - -# Update docker-compose.yml -sed -E -i "s/ghcr.io\/lay3rlabs\/wavs:[^\"]+/ghcr.io\/lay3rlabs\/wavs:${VERSION}/g" docker-compose.yml - -# Update Cargo.toml (for crates dependencies) -sed -E -i "s/wavs-wasi-utils = \"[^\"]+/wavs-wasi-utils = \"${VERSION}/g" Cargo.toml - -# Update [package.metadata.component] in components/*/Cargo.toml (for wit) -sed -E -i "s/wavs:worker\/layer-trigger-world@[^\"]+/wavs:worker\/layer-trigger-world@${VERSION}/g" components/*/Cargo.toml - -# Rebuild with cargo component build in order to update bindings and Cargo.lock -rm components/*/src/bindings.rs -make wasi-build From 8cecf88191cb0017f31a8d91427c5835794e1f51 Mon Sep 17 00:00:00 2001 From: Reece Williams Date: Wed, 1 Oct 2025 12:00:39 -0500 Subject: [PATCH 3/6] taskfiles --- .env.example | 24 ++++--- .gitignore | 1 + .gitmodules | 0 Taskfile.yml | 150 ++++++++++++++++++++++++++++++++++++++++ package.json | 2 +- taskfile/build.yml | 34 ++++++++++ taskfile/config.yml | 68 +++++++++++++++++++ taskfile/deploy.yml | 112 ++++++++++++++++++++++++++++++ taskfile/docker.yml | 32 +++++++++ taskfile/env.yml | 154 ++++++++++++++++++++++++++++++++++++++++++ taskfile/operator.yml | 102 ++++++++++++++++++++++++++++ taskfile/services.yml | 56 +++++++++++++++ taskfile/wasi.yml | 83 +++++++++++++++++++++++ 13 files changed, 808 insertions(+), 10 deletions(-) delete mode 100644 .gitmodules create mode 100644 Taskfile.yml create mode 100644 taskfile/build.yml create mode 100644 taskfile/config.yml create mode 100644 taskfile/deploy.yml create mode 100644 taskfile/docker.yml create mode 100644 taskfile/env.yml create mode 100644 taskfile/operator.yml create mode 100644 taskfile/services.yml create mode 100644 taskfile/wasi.yml diff --git a/.env.example b/.env.example index f76c49c7..1a4de5b5 100644 --- a/.env.example +++ b/.env.example @@ -1,16 +1,22 @@ DEPLOY_ENV=LOCAL -METADATA_URI=https://raw.githubusercontent.com/Lay3rLabs/wavs-foundry-template/refs/heads/main/metadata.json -LOCAL_ETHEREUM_RPC_URL=http://localhost:8545 -CHAIN_ID=17000 -TESTNET_RPC_URL=https://1rpc.io/holesky +CHAIN_ID=31337 # 31337 for local, 11155111 for Sepolia +LOCAL_ETHEREUM_RPC_URL=http://localhost:8545 +# RPC_URL=https://ethereum-sepolia-rpc.publicnode.com/ +# TESTNET_RPC_URL=https://ethereum-sepolia-rpc.publicnode.com/ # this key requires funds. used as the admin / deployer of contracts and core contracts. -FUNDED_KEY= +FUNDED_KEY=0x323ef3a895cad9fcb625aa859953bba2812a64b39f2fd0b857f4757f0e896858 -LST_CONTRACT_ADDRESS=0x3F1c547b21f65e10480dE3ad8E19fAAC46C95034 -LST_STRATEGY_ADDRESS=0x7D704507b76571a51d9caE8AdDAbBFd0ba0e63d3 +# WASI package namespace - required for non-LOCAL deployments (e.g., wavs for https://wa.dev/wavs) +WASI_NAMESPACE=wavs # https://app.pinata.cloud/developers/api-keys, JWT token -PINATA_API_KEY= -ETHERSCAN_API_KEY=foobar +WAVS_ENV_PINATA_API_URL= +WAVS_ENV_PINATA_API_KEY= + +# https://etherscan.io/apidashboard / https://docs.etherscan.io/getting-started/viewing-api-usage-statistics +WAVS_ENV_ETHERSCAN_API_KEY= + +# twitterapi.io API TOKEN +WAVS_ENV_TWITTERAPI_TOKEN= diff --git a/.gitignore b/.gitignore index 6a3e5378..83ac1842 100644 --- a/.gitignore +++ b/.gitignore @@ -7,6 +7,7 @@ node_modules/ # WAVS .env +!compiled/aggregator.wasm compiled/*.wasm .docker/*.json .docker/*.log diff --git a/.gitmodules b/.gitmodules deleted file mode 100644 index e69de29b..00000000 diff --git a/Taskfile.yml b/Taskfile.yml new file mode 100644 index 00000000..3073a55d --- /dev/null +++ b/Taskfile.yml @@ -0,0 +1,150 @@ +# https://taskfile.dev/installation/ +# - npm install -g @go-task/cli +version: "3" + +silent: true + +dotenv: [".env"] + +includes: + build: ./taskfile/build.yml + wasi: ./taskfile/wasi.yml + docker: ./taskfile/docker.yml + deploy: ./taskfile/deploy.yml + services: ./taskfile/services.yml + operator: ./taskfile/operator.yml + env: + taskfile: ./taskfile/env.yml + flatten: true + +vars: + # Docker configuration + DOCKER_IMAGE: '{{.DOCKER_IMAGE | default "ghcr.io/lay3rlabs/wavs:1.4.1"}}' + MIDDLEWARE_DOCKER_IMAGE: '{{.MIDDLEWARE_DOCKER_IMAGE | default "ghcr.io/lay3rlabs/wavs-middleware:0.5.0-beta.10"}}' + + # Check if user is in docker group to determine if sudo is needed + DOCKER_SUDO: + sh: | + if groups | grep -q docker; then echo ''; else echo 'sudo'; fi + + # Default endpoints + RPC_URL: '{{.RPC_URL | default "http://127.0.0.1:8545"}}' + WAVS_ENDPOINT: '{{.WAVS_ENDPOINT | default "http://127.0.0.1:8000"}}' + AGGREGATOR_URL: '{{.AGGREGATOR_URL | default "http://localhost:8001"}}' + IPFS_ENDPOINT: '{{.IPFS_ENDPOINT | default "http://127.0.0.1:5001"}}' + IPFS_GATEWAY: '{{.IPFS_GATEWAY | default "http://127.0.0.1:8080/ipfs/"}}' + +tasks: + default: + cmds: + - task --list-all + + help: + desc: "Show available tasks" + cmds: + - task --list-all + + setup: + desc: "Install initial dependencies" + cmds: + - | + echo "📦 Installing npm dependencies..." + npm install + echo "📦 Installing forge dependencies..." + forge install + echo "✅ Dependencies installed" + + start-all-local: + desc: "Start all local services (anvil, IPFS, WARG, Jaeger, prometheus)" + deps: [setup-env] + cmds: + - task: services:start-all + + deploy-full: + desc: "Run complete WAVS deployment pipeline" + cmds: + - | + export AGGREGATOR_URL={{.AGGREGATOR_URL}} + bash ./script/deploy-script.sh + + clean: + desc: "Clean up project files" + cmds: + - forge clean + - cargo clean + - rm -rf cache out broadcast + - task: docker:clean + + test: + desc: "Run all tests" + cmds: + - forge test + + fmt: + desc: "Format Solidity and Rust code" + cmds: + - forge fmt + - cargo fmt + + lint:check: + desc: "Check linting and formatting" + cmds: + - npm run lint:check + + lint:fix: + desc: "Fix linting and formatting issues" + cmds: + - npm run lint:fix + + setup-env: + desc: "Create .env file from example if it doesn't exist" + cmds: + - | + if [ ! -f .env ]; then + if [ -f .env.example ]; then + echo "Creating .env file from .env.example..." + cp .env.example .env + echo ".env file created successfully!" + fi + fi + + check-requirements: + desc: "Verify system requirements are installed" + cmds: + - | + echo "🔍 Validating system requirements..." + # Check Node.js + if ! command -v node >/dev/null 2>&1; then + echo "❌ Node.js not found. Install with: curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.40.3/install.sh | bash && nvm install --lts" + exit 1 + fi + NODE_VERSION=$(node --version) + MAJOR_VERSION=$(echo $NODE_VERSION | sed 's/^v\([0-9]*\)\..*/\1/') + if [ $MAJOR_VERSION -lt 21 ]; then + echo "❌ Node.js version $NODE_VERSION is less than required v21" + echo "💡 Upgrade with: nvm install --lts" + exit 1 + fi + # Check jq + if ! command -v jq >/dev/null 2>&1; then + echo "❌ jq not found. Install with: brew install jq (macOS) or apt install jq (Linux)" + exit 1 + fi + # Check cargo + if ! command -v cargo >/dev/null 2>&1; then + echo "❌ cargo not found. Install with: curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh" + exit 1 + fi + # Check docker + if ! command -v docker >/dev/null 2>&1; then + echo "❌ docker not found. Visit: https://docs.docker.com/get-docker/" + exit 1 + fi + echo "✅ All requirements satisfied" + + dos2unix: + desc: "Convert all script files to Unix line endings" + cmds: + - | + find script/ -type f -name "*.sh" -exec dos2unix {} \; + find script/ -type f -name "*.env*" -exec dos2unix {} \; diff --git a/package.json b/package.json index bcc9bd92..b854a0c9 100644 --- a/package.json +++ b/package.json @@ -27,7 +27,7 @@ "@defi-wonderland/natspec-smells": "1.1.6", "@openzeppelin/contracts": "^5.2.0", "forge-std": "github:foundry-rs/forge-std#v1.9.6", - "@wavs/solidity": "0.4.0", + "@wavs/solidity": "0.5.0-beta.10", "lint-staged": ">=10", "solhint-community": "4.0.0", "sort-package-json": "2.10.0" diff --git a/taskfile/build.yml b/taskfile/build.yml new file mode 100644 index 00000000..ec901d55 --- /dev/null +++ b/taskfile/build.yml @@ -0,0 +1,34 @@ +version: "3" + +tasks: + all: + desc: "Build everything (Solidity + WASI components)" + cmds: + - task: forge + - task: wasi + + forge: + desc: "Build Solidity contracts" + cmds: + - forge build + + wasi: + desc: "Build WASI components (all or specific one)" + vars: + WASI_BUILD_DIR: '{{.WASI_BUILD_DIR | default ""}}' + cmds: + - | + warg reset || echo "warg reset failed (warg server not started), continuing..." + RECIPE="wasi-build" + MAKEFILE_DIRS=$(find components/* -maxdepth 1 -name "Makefile" -o -name "makefile") + + for makefile_path in $MAKEFILE_DIRS; do + if grep -q "^${RECIPE}:" "$makefile_path" 2>/dev/null; then + if [ "{{.WASI_BUILD_DIR}}" == "" ] || [[ "$makefile_path" == *"{{.WASI_BUILD_DIR}}"* ]]; then + parent_dir=$(dirname "$makefile_path") + make -s -C "$parent_dir" $RECIPE + fi + else + echo "Recipe '$RECIPE' not found in $makefile_path" + fi + done diff --git a/taskfile/config.yml b/taskfile/config.yml new file mode 100644 index 00000000..c0272e5b --- /dev/null +++ b/taskfile/config.yml @@ -0,0 +1,68 @@ +version: "3" + +tasks: + wallet-address: + desc: "Get the wallet address" + cmds: + - 'echo "{{.WALLET_ADDRESS}}"' + silent: true + + funded-key: + desc: "Get the funded private key" + cmds: + - 'echo "{{.FUNDED_KEY}}"' + silent: true + + service-manager-address: + desc: "Get the WAVS Service Manager address" + cmds: + - 'echo "{{.WAVS_SERVICE_MANAGER_ADDRESS}}"' + silent: true + + service-id: + desc: "Get the service ID" + cmds: + - 'echo "{{.SERVICE_ID}}"' + silent: true + +vars: + # Repository general + REPO_ROOT: + sh: git rev-parse --show-toplevel + + # Wallet configuration (from .env via Taskfile dotenv) + FUNDED_KEY: "{{.FUNDED_KEY}}" + WALLET_ADDRESS: + sh: test -n "{{.FUNDED_KEY}}" && cast wallet address "{{.FUNDED_KEY}}" 2>/dev/null || echo "" + + # Docker images + WAVS_DOCKER_IMAGE: "ghcr.io/lay3rlabs/wavs:1.4.1" + MIDDLEWARE_DOCKER_IMAGE: "ghcr.io/lay3rlabs/wavs-middleware:0.5.0-beta.10" + + # Service endpoints + IPFS_ENDPOINT: "http://localhost:5001" + IPFS_GATEWAY: "http://localhost:8080/ipfs/" + RPC_URL: "http://127.0.0.1:8545" + WAVS_ENDPOINT: "http://127.0.0.1:8000" + AGGREGATOR_URL: "http://localhost:8001" + + # Deployment summary path + DEPLOY_SUMMARY: ".docker/deployment_summary.json" + + SERVICE_INDEX: '{{.SERVICE_INDEX | default "0"}}' + SERVICE_ID: + sh: curl -s http://localhost:8000/services | jq -r ".service_ids[{{.SERVICE_INDEX}}]" + WAVS_SERVICE_MANAGER_ADDRESS: + sh: | + if [ -n "${WAVS_SERVICE_MANAGER_ADDRESS}" ]; then + echo "${WAVS_SERVICE_MANAGER_ADDRESS}" + else + addr=$(jq -r '.addresses.POAStakeRegistry' .nodes/poa_deploy.json 2>/dev/null || echo "") + if [ -n "$addr" ] && [ "$addr" != "null" ]; then + echo "$addr" + else + # writes to stderr + echo "⚠️ WAVS Service Manager address not found (looked in \$WAVS_SERVICE_MANAGER_ADDRESS env and .nodes/poa_deploy.json file)" 1>&2; + echo "" + fi + fi diff --git a/taskfile/deploy.yml b/taskfile/deploy.yml new file mode 100644 index 00000000..cda6b6a0 --- /dev/null +++ b/taskfile/deploy.yml @@ -0,0 +1,112 @@ +version: "3" + +tasks: + full: + desc: "Complete WAVS deployment pipeline" + vars: + PRIVATE_KEY: '{{.PRIVATE_KEY | default "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80"}}' + cmds: + - | + echo "🚀 Starting complete WAVS deployment..." + export RPC_URL=$(task get-rpc) + export AGGREGATOR_URL={{.AGGREGATOR_URL}} + bash ./script/deploy-script.sh + + ipfs: + desc: "Upload service config to IPFS" + vars: + SERVICE_FILE: '{{.SERVICE_FILE | default ".docker/service.json"}}' + PINATA_API_KEY: "{{.PINATA_API_KEY}}" + preconditions: + - test -f {{.SERVICE_FILE}} + cmds: + - | + if [ "$(task get-deploy-status)" = "LOCAL" ]; then + curl -X POST "http://127.0.0.1:5001/api/v0/add?pin=true" \ + -H "Content-Type: multipart/form-data" \ + -F file=@{{.SERVICE_FILE}} | jq -r .Hash + else + if [ -z "{{.PINATA_API_KEY}}" ]; then + echo "Error: PINATA_API_KEY is not set. Please set it to your Pinata API key" + echo "Get one at: https://app.pinata.cloud/developers/api-keys" + exit 1 + fi + curl -X POST --url https://uploads.pinata.cloud/v3/files \ + --header "Authorization: Bearer {{.PINATA_API_KEY}}" \ + --header 'Content-Type: multipart/form-data' \ + --form file=@{{.SERVICE_FILE}} \ + --form network=public \ + --form name=service-$(date +"%b-%d-%Y").json | jq -r .data.cid + fi + + component: + desc: "Upload WASI component to WAVS endpoint" + vars: + COMPONENT_FILENAME: '{{.COMPONENT_FILENAME | default "wavs_eas_attest.wasm"}}' + WAVS_ENDPOINT: "{{.WAVS_ENDPOINT}}" + preconditions: + - test -n "{{.COMPONENT_FILENAME}}" + - test -f "./compiled/{{.COMPONENT_FILENAME}}" + cmds: + - | + echo "📤 Uploading component: {{.COMPONENT_FILENAME}}..." + wget --post-file=./compiled/{{.COMPONENT_FILENAME}} \ + --header="Content-Type: application/wasm" \ + -O - {{.WAVS_ENDPOINT}}/components | jq -r .digest + echo "✅ Component uploaded successfully" + + service: + desc: "Deploy WAVS service from URL" + vars: + SERVICE_URL: "{{.SERVICE_URL}}" + WAVS_ENDPOINT: "{{.WAVS_ENDPOINT}}" + IPFS_GATEWAY: "{{.IPFS_GATEWAY}}" + preconditions: + - test -n "{{.SERVICE_URL}}" + cmds: + - | + if [ -n "{{.WAVS_ENDPOINT}}" ]; then + echo "🔍 Checking WAVS service at {{.WAVS_ENDPOINT}}..." + if [ "$(curl -s -o /dev/null -w "%{http_code}" {{.WAVS_ENDPOINT}}/info)" != "200" ]; then + echo "❌ WAVS service not reachable at {{.WAVS_ENDPOINT}}" + echo "💡 Make sure the service is running" + exit 1 + fi + echo "✅ WAVS service is running" + fi + echo "🚀 Deploying service from: {{.SERVICE_URL}}..." + {{.DOCKER_SUDO}} docker run --rm --network host \ + $(test -f .env && echo "--env-file ./.env") \ + -v $(pwd):/data {{.DOCKER_IMAGE}} wavs-cli \ + deploy-service --service-url {{.SERVICE_URL}} \ + --log-level=debug --data /data/.docker --home /data \ + {{if .WAVS_ENDPOINT}}--wavs-endpoint {{.WAVS_ENDPOINT}}{{end}} \ + {{if .IPFS_GATEWAY}}--ipfs-gateway {{.IPFS_GATEWAY}}{{end}} + echo "✅ Service deployed successfully" + + single-operator-poa-local: + desc: "Deploy a single operator with POA" + cmds: + - | + if [ "$(task get-deploy-status)" != "LOCAL" ]; then + echo "Not in LOCAL mode, skipping single operator PoA. Register manually in ServiceManager (README_TESTNET.md)." + exit 0 + fi + + echo "Deploying single operator with POA..." + + # Register operator with initial weight using owner operation + PRIVATE_KEY=`task config:funded-key` OPERATOR_NUM=1 OPERATOR_WEIGHT=1000 task operator:register + + # Update signing key for the operator + OPERATOR_NUM=1 task operator:update-signing-key + + # Verify the registration + OPERATOR_NUM=1 task operator:verify + + if [ "$(task get-deploy-status)" = "LOCAL" ]; then + echo "Mining 1 block on local network..." + cast rpc anvil_mine --rpc-url $(task get-rpc) 2&> /dev/null + fi + + echo "✅ Single operator with POA deployed successfully" diff --git a/taskfile/docker.yml b/taskfile/docker.yml new file mode 100644 index 00000000..d7c1b5b5 --- /dev/null +++ b/taskfile/docker.yml @@ -0,0 +1,32 @@ +version: "3" + +tasks: + clean: + desc: "Remove unused docker containers" + ignore_error: true + cmds: + - | + {{.DOCKER_SUDO}} docker rm -v $({{.DOCKER_SUDO}} docker ps -a --filter status=exited -q) 2>/dev/null || true + + wavs-cli: + desc: "Run wavs-cli in Docker" + vars: + ARGS: '{{.CLI_ARGS}}' + cmds: + - | + {{.DOCKER_SUDO}} docker run --rm --network host \ + $(test -f .env && echo "--env-file ./.env") \ + -v $(pwd):/data {{.DOCKER_IMAGE}} wavs-cli {{.ARGS}} + + middleware: + desc: "Run wavs-middleware in Docker" + vars: + COMMAND: '{{.COMMAND}}' + cmds: + - | + {{.DOCKER_SUDO}} docker run --rm --network host --env-file .env \ + $(if {{.WAVS_SERVICE_MANAGER_ADDRESS}}; then echo "-e WAVS_SERVICE_MANAGER_ADDRESS={{.WAVS_SERVICE_MANAGER_ADDRESS}}"; fi) \ + $(if {{.OPERATOR_KEY}}; then echo "-e OPERATOR_KEY={{.OPERATOR_KEY}}"; fi) \ + $(if {{.WAVS_SIGNING_KEY}}; then echo "-e WAVS_SIGNING_KEY={{.WAVS_SIGNING_KEY}}"; fi) \ + $(if {{.WAVS_DELEGATE_AMOUNT}}; then echo "-e WAVS_DELEGATE_AMOUNT={{.WAVS_DELEGATE_AMOUNT}}"; fi) \ + -v ./.nodes:/root/.nodes {{.MIDDLEWARE_DOCKER_IMAGE}} {{.COMMAND}} diff --git a/taskfile/env.yml b/taskfile/env.yml new file mode 100644 index 00000000..fa922eaf --- /dev/null +++ b/taskfile/env.yml @@ -0,0 +1,154 @@ +version: "3" + +includes: + config: ./config.yml + +vars: + DEPLOY_SUMMARY: '.docker/deployment_summary.json' + +tasks: + get-rpc: + desc: "Get RPC URL" + env: + DEPLOY_ENV: + sh: task get-deploy-status + cmds: + - | + # Get deployment environment + if [ ! -f .env ]; then + cp .env.example .env + fi + + # Get RPC URL based on environment + if [ "$DEPLOY_ENV" = "LOCAL" ]; then + grep "^LOCAL_ETHEREUM_RPC_URL=" .env | cut -d '=' -f2- + elif [ "$DEPLOY_ENV" = "TESTNET" ]; then + grep "^TESTNET_RPC_URL=" .env | cut -d '=' -f2- + else + echo "Unknown DEPLOY_ENV: $DEPLOY_ENV" >&2 + exit 1 + fi + + get-chain-id: + desc: "Get chain ID" + cmds: + - | + if [ ! -f .env ]; then + cp .env.example .env + fi + grep "^CHAIN_ID=" .env | cut -d '=' -f2 | tr -d '\r\n' + + get-deploy-status: + desc: "Get deployment environment status" + cmds: + - | + # Get deployment environment + if [ ! -f .env ]; then + cp .env.example .env + fi + DEPLOY_ENV=$(grep "^DEPLOY_ENV=" .env | cut -d '=' -f2 | tr '[:lower:]' '[:upper:]' | tr -d '\r\n') + echo "$DEPLOY_ENV" + + get-registry: + desc: "Get WASI registry based on deployment environment" + cmds: + - | + DEPLOY_ENV=$(task get-deploy-status) + if [ "$DEPLOY_ENV" = "LOCAL" ]; then + echo "localhost:8090" + elif [ "$DEPLOY_ENV" = "TESTNET" ]; then + echo "wa.dev" + else + echo "Unknown DEPLOY_ENV: $DEPLOY_ENV" >&2 + exit 1 + fi + + get-wasi-namespace: + desc: "Get WASI package namespace" + vars: + REGISTRY: '{{.REGISTRY}}' + cmds: + - | + # Auto-get registry if not provided + REGISTRY="{{.REGISTRY}}" + if [ -z "$REGISTRY" ]; then + REGISTRY=$(task get-registry) + fi + + # Get deployment environment + if [ ! -f .env ]; then + cp .env.example .env + fi + + DEPLOY_ENV=$(task get-deploy-status) + if [ "$DEPLOY_ENV" = "LOCAL" ]; then + echo "example" + else + # Check for WASI_NAMESPACE in .env file + WASI_NAMESPACE=$(grep "^WASI_NAMESPACE=" .env 2>/dev/null | cut -d '=' -f2 | tr -d '\r\n') + if [ -z "$WASI_NAMESPACE" ]; then + echo "Error: WASI_NAMESPACE must be set in .env file for non-LOCAL deployments" >&2 + echo "Please add WASI_NAMESPACE=your_namespace to your .env file" >&2 + exit 1 + fi + echo "${WASI_NAMESPACE}" + fi + + get-ipfs-gateway: + desc: "Get IPFS gateway based on deployment environment" + cmds: + - | + DEPLOY_ENV=$(task get-deploy-status) + if [ "$DEPLOY_ENV" = "LOCAL" ]; then + echo "http://127.0.0.1:8080/ipfs/" + elif [ "$DEPLOY_ENV" = "TESTNET" ]; then + echo "https://gateway.pinata.cloud/ipfs/" + else + echo "Unknown DEPLOY_ENV: $DEPLOY_ENV" >&2 + exit 1 + fi + + setup-avs-signing: + desc: "Setup AVS signing keys for service" + vars: + HD_INDEX: '{{.HD_INDEX | default "1"}}' + DEFAULT_ENV_FILE: '{{.DEFAULT_ENV_FILE | default "infra/wavs-1/.env"}}' + cmds: + - | + # SERVICE_ID=`curl -s http://localhost:8000/services | jq -r ".services[{{.SERVICE_INDEX}}].id"` + # if [ -z "$SERVICE_ID" ] || [ "$SERVICE_ID" == "null" ]; then + # echo "Error: SERVICE_ID is null or not found for index {{.SERVICE_INDEX}}." >&2 + # exit 1 + # fi + + # wavs 0.5.2 changed this where there are no longer service ids to get from the app. + # hardcoding for now :shrug: idk.. + HD_INDEX={{.HD_INDEX}} + + source {{.DEFAULT_ENV_FILE}} + export OPERATOR_PRIVATE_KEY=`cast wallet private-key --mnemonic "$WAVS_SUBMISSION_MNEMONIC" --mnemonic-index 0` + export AVS_SIGNING_ADDRESS=`cast wallet address --mnemonic-path "$WAVS_SUBMISSION_MNEMONIC" --mnemonic-index ${HD_INDEX}` + + echo "HD_INDEX=${HD_INDEX}" + echo "SERVICE_ID=${SERVICE_ID}" + echo "AVS_SIGNING_ADDRESS=${AVS_SIGNING_ADDRESS}" + + AVS_SIGNING_FILE=".docker/avs_signing.json" + if [ ! -f ${AVS_SIGNING_FILE} ]; then + echo "{}" > ${AVS_SIGNING_FILE} + fi + jq --arg key "$AVS_SIGNING_ADDRESS" \ + --arg hd_index "$HD_INDEX" \ + --arg service_id "$SERVICE_ID" \ + '.[$key] = {hd_index: $hd_index, service_id: $service_id}' \ + ${AVS_SIGNING_FILE} > .docker/avs_signing.tmp && mv .docker/avs_signing.tmp ${AVS_SIGNING_FILE} + + show-deploy-summary: + desc: "Display all deployment addresses" + preconditions: + - test -f {{.DEPLOY_SUMMARY}} + cmds: + - | + echo "📋 Deployment Summary" + echo "====================" + jq . {{.DEPLOY_SUMMARY}} diff --git a/taskfile/operator.yml b/taskfile/operator.yml new file mode 100644 index 00000000..d0da09c3 --- /dev/null +++ b/taskfile/operator.yml @@ -0,0 +1,102 @@ +version: "3" + +silent: true + +vars: + RPC_URL: '{{.RPC_URL | default "http://localhost:8545"}}' + OPERATOR_NUM: '{{.OPERATOR_NUM}}' + OPERATOR_WEIGHT: '{{.OPERATOR_WEIGHT | default "1000"}}' + WAVS_ENV_FILE: 'infra/wavs-{{.OPERATOR_NUM}}/.env' + + # Common shell script snippets + VALIDATE_INPUTS: | + if [ -z "{{.OPERATOR_NUM}}" ]; then + echo "❌ Error: OPERATOR_NUM is required" + echo "Usage: task operator: OPERATOR_NUM=1" + exit 1 + fi + if [ ! -f "{{.WAVS_ENV_FILE}}" ]; then + echo "❌ Error: Environment file {{.WAVS_ENV_FILE}} not found" + exit 1 + fi + echo "📝 Loading operator {{.OPERATOR_NUM}} configuration from {{.WAVS_ENV_FILE}}" + + LOAD_ENV_AND_KEYS: | + source {{.WAVS_ENV_FILE}} + OPERATOR_KEY=$(cast wallet private-key --mnemonic "$WAVS_SUBMISSION_MNEMONIC" --mnemonic-index 0) + OPERATOR_ADDRESS=$(cast wallet address ${OPERATOR_KEY}) + OPERATOR_MNEMONIC="$WAVS_SUBMISSION_MNEMONIC" + WAVS_SIGNING_KEY=$(cast wallet address --mnemonic "$WAVS_SUBMISSION_MNEMONIC" --mnemonic-index 1) + +tasks: + register: + desc: "Register an operator with WAVS Service Manager" + cmds: + - | + {{.VALIDATE_INPUTS}} + {{.LOAD_ENV_AND_KEYS}} + + echo "🔑 Operator Address: ${OPERATOR_ADDRESS}" + + # Fund operator if on local network + if [[ "{{.RPC_URL}}" == *"localhost"* ]] || [[ "{{.RPC_URL}}" == *"127.0.0.1"* ]]; then + echo "💰 Funding operator on local network..." + cast rpc anvil_setBalance ${OPERATOR_ADDRESS} $(cast to-hex 100000000000000000) --rpc-url {{.RPC_URL}} + fi + + echo "📝 Whitelisting (registering) operator in WAVS Service Manager..." + cast send {{.WAVS_SERVICE_MANAGER_ADDRESS}} "registerOperator(address,uint256)" ${OPERATOR_ADDRESS} {{.OPERATOR_WEIGHT}} \ + --private-key ${PRIVATE_KEY} \ + --rpc-url {{.RPC_URL}} + + echo "⚖️ Checking operator weight..." + cast call {{.WAVS_SERVICE_MANAGER_ADDRESS}} "getOperatorWeight(address)" ${OPERATOR_ADDRESS} \ + --rpc-url {{.RPC_URL}} + + echo "✅ Registration complete!" + + update-signing-key: + desc: "Update operator's signing key with WAVS Service Manager" + cmds: + - | + {{.VALIDATE_INPUTS}} + {{.LOAD_ENV_AND_KEYS}} + + echo "🔑 Operator Address: ${OPERATOR_ADDRESS}" + echo "🔑 New Signing Key: ${WAVS_SIGNING_KEY}" + + encoded_operator_address=$(cast abi-encode "f(address)" "$OPERATOR_ADDRESS") + signing_message=$(cast keccak "$encoded_operator_address") + signing_signature=$(cast wallet sign --no-hash --mnemonic "$OPERATOR_MNEMONIC" --mnemonic-index 1 "$signing_message") + echo "Signing signature: $signing_signature" + + echo "🔐 Updating signing key..." + cast send {{.WAVS_SERVICE_MANAGER_ADDRESS}} "updateOperatorSigningKey(address,bytes)" ${WAVS_SIGNING_KEY} ${signing_signature} \ + --private-key ${OPERATOR_KEY} \ + --rpc-url {{.RPC_URL}} + + echo "✅ Signing key update complete!" + + verify: + desc: "Verify operator registration status" + cmds: + - | + {{.VALIDATE_INPUTS}} + {{.LOAD_ENV_AND_KEYS}} + + echo "🔍 Verifying operator {{.OPERATOR_NUM}} registration..." + + echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━" + echo "Operator Address: ${OPERATOR_ADDRESS}" + + echo -n "Operator Signing Key: " + cast call {{.WAVS_SERVICE_MANAGER_ADDRESS}} "getLatestOperatorSigningKey(address)" ${OPERATOR_ADDRESS} \ + --rpc-url {{.RPC_URL}} + + echo -n "Signing Key -> Operator: " + cast call {{.WAVS_SERVICE_MANAGER_ADDRESS}} "getLatestOperatorForSigningKey(address)" ${WAVS_SIGNING_KEY} \ + --rpc-url {{.RPC_URL}} + + echo -n "Operator Weight: " + cast call {{.WAVS_SERVICE_MANAGER_ADDRESS}} "getOperatorWeight(address)" ${OPERATOR_ADDRESS} \ + --rpc-url {{.RPC_URL}} diff --git a/taskfile/services.yml b/taskfile/services.yml new file mode 100644 index 00000000..5861a0c7 --- /dev/null +++ b/taskfile/services.yml @@ -0,0 +1,56 @@ +version: "3" + +tasks: + start-all: + desc: "Start all local services (anvil, IPFS, WARG, Jaeger, prometheus)" + cmds: + - | + set -e + + # if [ -f .env ] && grep -q '^TESTNET_RPC_URL=' .env; then + # TESTNET_RPC_URL=$(grep -E '^TESTNET_RPC_URL=' .env | cut -d '=' -f2- | tr -d '"') + # else + # TESTNET_RPC_URL="https://ethereum-sepolia-rpc.publicnode.com" + # echo "No TESTNET_RPC_URL found in .env, using default ${TESTNET_RPC_URL}" + # fi + + PORT=8545 + # MIDDLEWARE_IMAGE=ghcr.io/lay3rlabs/wavs-middleware:0.5.0-beta.10 + FORK_RPC_URL=${FORK_RPC_URL:-"${TESTNET_RPC_URL}"} + DEPLOY_ENV=$(task get-deploy-status) + + ## == Base Anvil Testnet Fork == + if [ "$DEPLOY_ENV" = "TESTNET" ]; then + echo "Running in testnet mode, nothing to do" + exit 0 + fi + + if [ "$DEPLOY_ENV" = "LOCAL" ]; then + # anvil --fork-url ${FORK_RPC_URL} --port ${PORT} & + # removed the fork-url since this moved off of eigen and does not need (constantly hitting a Error: Failed to send transaction Context: - server returned an error response: error code -32003: Insufficient funds for gas * price + value) + anvil --port $PORT & + trap "killall anvil && echo -e '\nKilled anvil'" EXIT + while ! cast block-number --rpc-url http://localhost:${PORT} > /dev/null 2>&1 + do + sleep 0.25 + done + + FILES="-f docker-compose.yml" # -f telemetry/docker-compose.yml + docker compose ${FILES} pull + docker compose ${FILES} up --force-recreate -d + trap "docker compose ${FILES} down --remove-orphans && docker kill wavs-1 wavs-aggregator-1 > /dev/null 2>&1 && echo -e '\nKilled IPFS + Local WARG, and wavs instances'" EXIT + + echo "Started..." + wait + fi + + stop-all: + desc: "Stop all local services" + cmds: + - | + echo "Stopping all services..." + FILES="-f docker-compose.yml -f telemetry/docker-compose.yml" + docker compose ${FILES} down --remove-orphans || true + docker kill wavs-1 wavs-aggregator-1 > /dev/null 2>&1 || true + pkill -f anvil || true + echo "All services stopped" diff --git a/taskfile/wasi.yml b/taskfile/wasi.yml new file mode 100644 index 00000000..f7874717 --- /dev/null +++ b/taskfile/wasi.yml @@ -0,0 +1,83 @@ +version: "3" + +tasks: + exec: + desc: "Execute WASI component locally" + vars: + COMPONENT_FILENAME: '{{.COMPONENT_FILENAME | default "wavs_eas_attest.wasm"}}' + INPUT_DATA: '{{.INPUT_DATA | default ""}}' + WAVS_CMD: '{{.DOCKER_SUDO}} docker run --rm --network host $(test -f .env && echo "--env-file ./.env") -v $(pwd):/data {{.DOCKER_IMAGE}} wavs-cli' + cmds: + - | + {{.WAVS_CMD}} exec --log-level=info --data /data/.docker --home /data \ + --component "/data/compiled/{{.COMPONENT_FILENAME}}" \ + --input $(cast abi-encode "f(string)" "{{.INPUT_DATA}}") + + exec-fixed: + desc: "Execute WASI component with fixed input (for Go/TS components)" + vars: + COMPONENT_FILENAME: '{{.COMPONENT_FILENAME | default "wavs_eas_attest.wasm"}}' + INPUT_DATA: '{{.INPUT_DATA | default ""}}' + WAVS_CMD: '{{.DOCKER_SUDO}} docker run --rm --network host $(test -f .env && echo "--env-file ./.env") -v $(pwd):/data {{.DOCKER_IMAGE}} wavs-cli' + cmds: + - | + {{.WAVS_CMD}} exec --log-level=info --data /data/.docker --home /data \ + --component "/data/compiled/{{.COMPONENT_FILENAME}}" \ + --input `cast format-bytes32-string {{.INPUT_DATA}}` + + validate: + desc: "Validate a WASI component against best practices" + vars: + COMPONENT: '{{.COMPONENT}}' + preconditions: + - test -n "{{.COMPONENT}}" + - test -d "./components/{{.COMPONENT}}" + - test -d "./test_utils" + cmds: + - cd test_utils && ./validate_component.sh {{.COMPONENT}} + + upload-to-registry: + desc: "Upload WASI component to registry" + vars: + PKG_NAME: '{{.PKG_NAME}}' + PKG_VERSION: '{{.PKG_VERSION}}' + COMPONENT_FILENAME: '{{.COMPONENT_FILENAME}}' + preconditions: + - test -n "{{.PKG_NAME}}" + - test -n "{{.PKG_VERSION}}" + - test -n "{{.COMPONENT_FILENAME}}" + - test -f "./compiled/{{.COMPONENT_FILENAME}}" + cmds: + - | + export REGISTRY=`task get-registry` + if [ -z "$REGISTRY" ]; then + echo "REGISTRY is not set. Please set the REGISTRY environment variable." && exit 1 + fi + export PKG_NAMESPACE=`task get-wasi-namespace REGISTRY=${REGISTRY}` + if [ -z "$PKG_NAMESPACE" ]; then + echo "PKG_NAMESPACE is not set. Please set the PKG_NAMESPACE environment variable." && exit 1 + fi + + cd $(git rev-parse --show-toplevel) || exit + + PROTOCOL="https" + if [[ "$REGISTRY" == *"localhost"* ]] || [[ "$REGISTRY" == *"127.0.0.1"* ]]; then + PROTOCOL="http" + fi + echo "Publishing to registry (${PROTOCOL}://${REGISTRY})..." + + output=$(warg publish release --registry ${PROTOCOL}://${REGISTRY} --name ${PKG_NAMESPACE}:{{.PKG_NAME}} --version {{.PKG_VERSION}} ./compiled/{{.COMPONENT_FILENAME}}) + echo "output: ${output}" + exit_code=$? + + # Check for specific error conditions in the output + if [[ $exit_code -ne 0 ]]; then + if [[ "$output" =~ "failed to prove inclusion" ]]; then + echo "Package uploaded to local registry successfully..." + elif [[ "$output" =~ "error sending request for url" ]]; then + echo "NOTE: Check to make sure you are running the registry locally" + echo "${output}" + else + echo "Unknown error occurred ${output}" + fi + fi From 3155f2e96f5656b6b4dfb284acf62d3215283a80 Mon Sep 17 00:00:00 2001 From: Reece Williams Date: Wed, 1 Oct 2025 13:07:00 -0500 Subject: [PATCH 4/6] working v1.0.0 with golang --- Cargo-component.lock | 8 +- Cargo.lock | 387 +- Cargo.toml | 30 +- Makefile | 61 +- README.md | 43 +- compiled/README.md | 7 + components/evm-price-oracle/Cargo.toml | 2 +- components/evm-price-oracle/Makefile | 1 + components/evm-price-oracle/config.json | 14 - components/evm-price-oracle/src/bindings.rs | 12398 ++++++++++++------ components/evm-price-oracle/src/lib.rs | 60 +- components/evm-price-oracle/src/solidity.rs | 13 + components/evm-price-oracle/src/trigger.rs | 35 +- config/components.json | 27 + docker-compose.yml | 2 +- foundry.toml | 20 +- package-lock.json | 669 +- script/Common.s.sol | 2 +- script/avs-signing-key.sh | 22 - script/build-service.sh | 270 +- script/build_components.sh | 27 - script/create-aggregator.sh | 27 +- script/create-deployer.sh | 60 +- script/create-operator.sh | 28 +- script/deploy-contracts.sh | 33 - script/deploy-script.sh | 285 +- script/get-deploy-status.sh | 18 - script/get-ipfs-gateway.sh | 16 - script/get-registry.sh | 16 - script/get-rpc.sh | 16 - script/get-wasi-namespace.sh | 22 - script/start_all.sh | 44 - script/upload-components-background.sh | 209 + script/upload-to-wasi-registry.sh | 48 - src/common/CmdRunner.sol | 29 - src/contracts/WavsSubmit.sol | 6 +- src/contracts/WavsTrigger.sol | 2 +- src/interfaces/ITypes.sol | 2 +- src/interfaces/IWavsTrigger.sol | 2 +- {script => src/script}/ShowResult.s.sol | 2 +- {script => src/script}/Trigger.s.sol | 2 +- test/unit/WavsTrigger.t.sol | 2 +- wavs.toml | 107 +- 43 files changed, 10255 insertions(+), 4819 deletions(-) create mode 100644 compiled/README.md delete mode 100644 components/evm-price-oracle/config.json create mode 100644 components/evm-price-oracle/src/solidity.rs create mode 100644 config/components.json delete mode 100644 script/avs-signing-key.sh delete mode 100755 script/build_components.sh delete mode 100644 script/deploy-contracts.sh delete mode 100644 script/get-deploy-status.sh delete mode 100644 script/get-ipfs-gateway.sh delete mode 100644 script/get-registry.sh delete mode 100644 script/get-rpc.sh delete mode 100644 script/get-wasi-namespace.sh delete mode 100644 script/start_all.sh create mode 100755 script/upload-components-background.sh delete mode 100644 script/upload-to-wasi-registry.sh delete mode 100644 src/common/CmdRunner.sol rename {script => src/script}/ShowResult.s.sol (98%) rename {script => src/script}/Trigger.s.sol (96%) diff --git a/Cargo-component.lock b/Cargo-component.lock index 02fdaeeb..f727c5ed 100644 --- a/Cargo-component.lock +++ b/Cargo-component.lock @@ -3,9 +3,9 @@ version = 1 [[package]] -name = "wavs:worker" +name = "wavs:operator" [[package.version]] -requirement = "^0.4.0" -version = "0.4.0" -digest = "sha256:3530635218173131d0af9f114f61d97f08f080b3e25f1daacb15dc6dafa302ec" +requirement = "=1.2.0" +version = "1.2.0" +digest = "sha256:268da3de372803eb95bc93731fc5db190d7cd3d7eff5704d2e9ac26a88a2a34c" diff --git a/Cargo.lock b/Cargo.lock index 04acd446..7a17c268 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,6 +1,6 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 [[package]] name = "addr2line" @@ -36,15 +36,16 @@ dependencies = [ [[package]] name = "alloy-consensus" -version = "0.15.11" +version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32c3f3bc4f2a6b725970cd354e78e9738ea1e8961a91898f57bf6317970b1915" +checksum = "59094911f05dbff1cf5b29046a00ef26452eccc8d47136d50a47c0cf22f00c85" dependencies = [ "alloy-eips", "alloy-primitives", "alloy-rlp", "alloy-serde", "alloy-trie", + "alloy-tx-macros", "auto_impl", "c-kzg", "derive_more", @@ -54,15 +55,16 @@ dependencies = [ "rand 0.8.5", "secp256k1", "serde", + "serde_json", "serde_with", "thiserror", ] [[package]] name = "alloy-consensus-any" -version = "0.15.11" +version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dda014fb5591b8d8d24cab30f52690117d238e52254c6fb40658e91ea2ccd6c3" +checksum = "903cb8f728107ca27c816546f15be38c688df3c381d7bd1a4a9f215effc1ddb4" dependencies = [ "alloy-consensus", "alloy-eips", @@ -110,9 +112,9 @@ dependencies = [ [[package]] name = "alloy-eips" -version = "0.15.11" +version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f7b2f7010581f29bcace81776cf2f0e022008d05a7d326884763f16f3044620" +checksum = "ac7f1c9a1ccc7f3e03c36976455751a6166a4f0d2d2c530c3f87dfe7d0cdc836" dependencies = [ "alloy-eip2124", "alloy-eip2930", @@ -125,14 +127,16 @@ dependencies = [ "derive_more", "either", "serde", + "serde_with", "sha2", + "thiserror", ] [[package]] name = "alloy-json-abi" -version = "1.1.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0068ae277f5ee3153a95eaea8ff10e188ed8ccde9b7f9926305415a2c0ab2442" +checksum = "a2acb6637a9c0e1cdf8971e0ced8f3fa34c04c5e9dccf6bb184f6a64fe0e37d8" dependencies = [ "alloy-primitives", "alloy-sol-type-parser", @@ -142,12 +146,13 @@ dependencies = [ [[package]] name = "alloy-json-rpc" -version = "0.15.11" +version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca1e31b50f4ed9a83689ae97263d366b15b935a67c4acb5dd46d5b1c3b27e8e6" +checksum = "65f763621707fa09cece30b73ecc607eb43fd7a72451fe3b46f645b905086926" dependencies = [ "alloy-primitives", "alloy-sol-types", + "http", "serde", "serde_json", "thiserror", @@ -156,9 +161,9 @@ dependencies = [ [[package]] name = "alloy-network" -version = "0.15.11" +version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "879afc0f4a528908c8fe6935b2ab0bc07f77221a989186f71583f7592831689e" +checksum = "2f59a869fa4b4c3a7f08b1c8cb79aec61c29febe6e24a24fe0fcfded8a9b5703" dependencies = [ "alloy-consensus", "alloy-consensus-any", @@ -182,9 +187,9 @@ dependencies = [ [[package]] name = "alloy-network-primitives" -version = "0.15.11" +version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec185bac9d32df79c1132558a450d48f6db0bfb5adef417dbb1a0258153f879b" +checksum = "46e9374c667c95c41177602ebe6f6a2edd455193844f011d973d374b65501b38" dependencies = [ "alloy-consensus", "alloy-eips", @@ -195,17 +200,17 @@ dependencies = [ [[package]] name = "alloy-primitives" -version = "1.1.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a12fe11d0b8118e551c29e1a67ccb6d01cc07ef08086df30f07487146de6fa1" +checksum = "5b77f7d5e60ad8ae6bd2200b8097919712a07a6db622a4b201e7ead6166f02e5" dependencies = [ "alloy-rlp", "bytes", "cfg-if", "const-hex", "derive_more", - "foldhash", - "hashbrown 0.15.3", + "foldhash 0.2.0", + "hashbrown 0.16.0", "indexmap 2.9.0", "itoa", "k256", @@ -222,9 +227,9 @@ dependencies = [ [[package]] name = "alloy-provider" -version = "0.15.11" +version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2d918534afe9cc050eabd8309c107dafd161aa77357782eca4f218bef08a660" +checksum = "77818b7348bd5486491a5297579dbfe5f706a81f8e1f5976393025f1e22a7c7d" dependencies = [ "alloy-chains", "alloy-consensus", @@ -283,15 +288,14 @@ dependencies = [ [[package]] name = "alloy-rpc-client" -version = "0.15.11" +version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a15e30dcada47c04820b64f63de2423506c5c74f9ab59b115277ef5ad595a6fc" +checksum = "2430d5623e428dd012c6c2156ae40b7fe638d6fca255e3244e0fba51fa698e93" dependencies = [ "alloy-json-rpc", "alloy-primitives", "alloy-transport", "alloy-transport-http", - "async-stream", "futures", "pin-project", "reqwest", @@ -301,16 +305,15 @@ dependencies = [ "tokio-stream", "tower", "tracing", - "tracing-futures", "url", "wasmtimer", ] [[package]] name = "alloy-rpc-types-any" -version = "0.15.11" +version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a5a8f1efd77116915dad61092f9ef9295accd0b0b251062390d9c4e81599344" +checksum = "07429a1099cd17227abcddb91b5e38c960aaeb02a6967467f5bb561fbe716ac6" dependencies = [ "alloy-consensus-any", "alloy-rpc-types-eth", @@ -319,9 +322,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-eth" -version = "0.15.11" +version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc1323310d87f9d950fb3ff58d943fdf832f5e10e6f902f405c0eaa954ffbaf1" +checksum = "db46b0901ee16bbb68d986003c66dcb74a12f9d9b3c44f8e85d51974f2458f0f" dependencies = [ "alloy-consensus", "alloy-consensus-any", @@ -334,14 +337,15 @@ dependencies = [ "itertools 0.14.0", "serde", "serde_json", + "serde_with", "thiserror", ] [[package]] name = "alloy-serde" -version = "0.15.11" +version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d05ace2ef3da874544c3ffacfd73261cdb1405d8631765deb991436a53ec6069" +checksum = "5413814be7a22fbc81e0f04a2401fcc3eb25e56fd53b04683e8acecc6e1fe01b" dependencies = [ "alloy-primitives", "serde", @@ -350,9 +354,9 @@ dependencies = [ [[package]] name = "alloy-signer" -version = "0.15.11" +version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67fdabad99ad3c71384867374c60bcd311fc1bb90ea87f5f9c779fd8c7ec36aa" +checksum = "53410a18a61916e2c073a6519499514e027b01e77eeaf96acd1df7cf96ef6bb2" dependencies = [ "alloy-primitives", "async-trait", @@ -365,9 +369,9 @@ dependencies = [ [[package]] name = "alloy-sol-macro" -version = "1.1.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d3ef8e0d622453d969ba3cded54cf6800efdc85cb929fe22c5bdf8335666757" +checksum = "78c84c3637bee9b5c4a4d2b93360ee16553d299c3b932712353caf1cea76d0e6" dependencies = [ "alloy-sol-macro-expander", "alloy-sol-macro-input", @@ -379,9 +383,9 @@ dependencies = [ [[package]] name = "alloy-sol-macro-expander" -version = "1.1.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0e84bd0693c69a8fbe3ec0008465e029c6293494df7cb07580bf4a33eff52e1" +checksum = "a882aa4e1790063362434b9b40d358942b188477ac1c44cfb8a52816ffc0cc17" dependencies = [ "alloy-json-abi", "alloy-sol-macro-input", @@ -398,9 +402,9 @@ dependencies = [ [[package]] name = "alloy-sol-macro-input" -version = "1.1.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3de663412dadf9b64f4f92f507f78deebcc92339d12cf15f88ded65d41c7935" +checksum = "18e5772107f9bb265d8d8c86e0733937bb20d0857ea5425b1b6ddf51a9804042" dependencies = [ "alloy-json-abi", "const-hex", @@ -416,9 +420,9 @@ dependencies = [ [[package]] name = "alloy-sol-type-parser" -version = "1.1.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "251273c5aa1abb590852f795c938730fa641832fc8fa77b5478ed1bf11b6097e" +checksum = "e188b939aa4793edfaaa099cb1be4e620036a775b4bdf24fdc56f1cd6fd45890" dependencies = [ "serde", "winnow", @@ -426,25 +430,25 @@ dependencies = [ [[package]] name = "alloy-sol-types" -version = "1.1.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5460a975434ae594fe2b91586253c1beb404353b78f0a55bf124abcd79557b15" +checksum = "c3c8a9a909872097caffc05df134e5ef2253a1cdb56d3a9cf0052a042ac763f9" dependencies = [ "alloy-json-abi", "alloy-primitives", "alloy-sol-macro", - "const-hex", "serde", ] [[package]] name = "alloy-transport" -version = "0.15.11" +version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6964d85cd986cfc015b96887b89beed9e06d0d015b75ee2b7bfbd64341aab874" +checksum = "d94ee404368a3d9910dfe61b203e888c6b0e151a50e147f95da8baff9f9c7763" dependencies = [ "alloy-json-rpc", "alloy-primitives", + "auto_impl", "base64", "derive_more", "futures", @@ -462,9 +466,9 @@ dependencies = [ [[package]] name = "alloy-transport-http" -version = "0.15.11" +version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef7c5ea7bda4497abe4ea92dcb8c76e9f052c178f3c82aa6976bcb264675f73c" +checksum = "a2f8a6338d594f6c6481292215ee8f2fd7b986c80aba23f3f44e761a8658de78" dependencies = [ "alloy-json-rpc", "alloy-transport", @@ -477,9 +481,9 @@ dependencies = [ [[package]] name = "alloy-trie" -version = "0.8.1" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "983d99aa81f586cef9dae38443245e585840fcf0fc58b09aee0b1f27aed1d500" +checksum = "e3412d52bb97c6c6cc27ccc28d4e6e8cf605469101193b50b0bd5813b1f990b5" dependencies = [ "alloy-primitives", "alloy-rlp", @@ -491,6 +495,19 @@ dependencies = [ "tracing", ] +[[package]] +name = "alloy-tx-macros" +version = "1.0.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e64c09ec565a90ed8390d82aa08cd3b22e492321b96cb4a3d4f58414683c9e2f" +dependencies = [ + "alloy-primitives", + "darling 0.21.3", + "proc-macro2", + "quote", + "syn 2.0.101", +] + [[package]] name = "android-tzdata" version = "0.1.1" @@ -550,6 +567,26 @@ dependencies = [ "zeroize", ] +[[package]] +name = "ark-ff" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a177aba0ed1e0fbb62aa9f6d0502e9b46dad8c2eab04c14258a1212d2557ea70" +dependencies = [ + "ark-ff-asm 0.5.0", + "ark-ff-macros 0.5.0", + "ark-serialize 0.5.0", + "ark-std 0.5.0", + "arrayvec", + "digest 0.10.7", + "educe", + "itertools 0.13.0", + "num-bigint", + "num-traits", + "paste", + "zeroize", +] + [[package]] name = "ark-ff-asm" version = "0.3.0" @@ -570,6 +607,16 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "ark-ff-asm" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62945a2f7e6de02a31fe400aa489f0e0f5b2502e69f95f853adb82a96c7a6b60" +dependencies = [ + "quote", + "syn 2.0.101", +] + [[package]] name = "ark-ff-macros" version = "0.3.0" @@ -595,6 +642,19 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "ark-ff-macros" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09be120733ee33f7693ceaa202ca41accd5653b779563608f1234f78ae07c4b3" +dependencies = [ + "num-bigint", + "num-traits", + "proc-macro2", + "quote", + "syn 2.0.101", +] + [[package]] name = "ark-serialize" version = "0.3.0" @@ -616,6 +676,18 @@ dependencies = [ "num-bigint", ] +[[package]] +name = "ark-serialize" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f4d068aaf107ebcd7dfb52bc748f8030e0fc930ac8e360146ca54c1203088f7" +dependencies = [ + "ark-std 0.5.0", + "arrayvec", + "digest 0.10.7", + "num-bigint", +] + [[package]] name = "ark-std" version = "0.3.0" @@ -636,6 +708,16 @@ dependencies = [ "rand 0.8.5", ] +[[package]] +name = "ark-std" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "246a225cc6131e9ee4f24619af0f19d67761fff15d7ccc22e42b80846e69449a" +dependencies = [ + "num-traits", + "rand 0.8.5", +] + [[package]] name = "arrayvec" version = "0.7.6" @@ -667,6 +749,12 @@ dependencies = [ "syn 2.0.101", ] +[[package]] +name = "async-task" +version = "4.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de" + [[package]] name = "async-trait" version = "0.1.88" @@ -851,9 +939,9 @@ dependencies = [ [[package]] name = "cfg-if" -version = "1.0.0" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +checksum = "2fd1289c04a9ea8cb22300a459a72a385d7c73d3259e2ed7dcb2af674838cfa9" [[package]] name = "chrono" @@ -987,8 +1075,18 @@ version = "0.20.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc7f46116c46ff9ab3eb1597a45688b6715c6e628b5c133e288e709a29bcb4ee" dependencies = [ - "darling_core", - "darling_macro", + "darling_core 0.20.11", + "darling_macro 0.20.11", +] + +[[package]] +name = "darling" +version = "0.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9cdf337090841a411e2a7f3deb9187445851f91b309c0c0a29e05f74a00a48c0" +dependencies = [ + "darling_core 0.21.3", + "darling_macro 0.21.3", ] [[package]] @@ -1005,13 +1103,39 @@ dependencies = [ "syn 2.0.101", ] +[[package]] +name = "darling_core" +version = "0.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1247195ecd7e3c85f83c8d2a366e4210d588e802133e1e355180a9870b517ea4" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "serde", + "strsim", + "syn 2.0.101", +] + [[package]] name = "darling_macro" version = "0.20.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead" dependencies = [ - "darling_core", + "darling_core 0.20.11", + "quote", + "syn 2.0.101", +] + +[[package]] +name = "darling_macro" +version = "0.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d38308df82d1080de0afee5d069fa14b0326a88c14f15c5ccda35b4a6c414c81" +dependencies = [ + "darling_core 0.21.3", "quote", "syn 2.0.101", ] @@ -1135,6 +1259,18 @@ dependencies = [ "spki", ] +[[package]] +name = "educe" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d7bc049e1bd8cdeb31b68bbd586a9464ecf9f3944af3958a7a9d0f8b9799417" +dependencies = [ + "enum-ordinalize", + "proc-macro2", + "quote", + "syn 2.0.101", +] + [[package]] name = "either" version = "1.15.0" @@ -1164,6 +1300,26 @@ dependencies = [ "zeroize", ] +[[package]] +name = "enum-ordinalize" +version = "4.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fea0dcfa4e54eeb516fe454635a95753ddd39acda650ce703031c6973e315dd5" +dependencies = [ + "enum-ordinalize-derive", +] + +[[package]] +name = "enum-ordinalize-derive" +version = "4.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d28318a75d4aead5c4db25382e8ef717932d0346600cacae6357eb5941bc5ff" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.101", +] + [[package]] name = "equivalent" version = "1.0.2" @@ -1182,7 +1338,7 @@ dependencies = [ [[package]] name = "evm-price-oracle" -version = "0.4.0" +version = "1.0.0" dependencies = [ "alloy-sol-macro", "alloy-sol-types", @@ -1190,7 +1346,7 @@ dependencies = [ "serde", "serde_json", "wavs-wasi-utils", - "wit-bindgen-rt 0.42.1", + "wit-bindgen-rt 0.44.0", "wstd", ] @@ -1256,6 +1412,12 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" +[[package]] +name = "foldhash" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77ce24cb58228fbb8aa041425bb1050850ac19177686ea6e0f41a70416f56fdb" + [[package]] name = "foreign-types" version = "0.3.2" @@ -1458,7 +1620,16 @@ checksum = "84b26c544d002229e640969970a2e74021aadf6e2f96372b9c58eff97de08eb3" dependencies = [ "allocator-api2", "equivalent", - "foldhash", + "foldhash 0.1.5", +] + +[[package]] +name = "hashbrown" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5419bdc4f6a9207fbeba6d11b604d481addf78ecd10c11ad51e76c2f6482748d" +dependencies = [ + "foldhash 0.2.0", "serde", ] @@ -1790,6 +1961,15 @@ dependencies = [ "either", ] +[[package]] +name = "itertools" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" +dependencies = [ + "either", +] + [[package]] name = "itertools" version = "0.14.0" @@ -2030,13 +2210,14 @@ dependencies = [ [[package]] name = "nybbles" -version = "0.3.4" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8983bb634df7248924ee0c4c3a749609b5abcb082c28fffe3254b3eb3602b307" +checksum = "2c4b5ecbd0beec843101bffe848217f770e8b8da81d8355b7d6e226f2199b3dc" dependencies = [ "alloy-rlp", - "const-hex", + "cfg-if", "proptest", + "ruint", "serde", "smallvec", ] @@ -2492,13 +2673,14 @@ dependencies = [ [[package]] name = "ruint" -version = "1.14.0" +version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78a46eb779843b2c4f21fac5773e25d6d5b7c8f0922876c91541790d2ca27eef" +checksum = "a68df0380e5c9d20ce49534f292a36a7514ae21350726efe1865bdb1fa91d278" dependencies = [ "alloy-rlp", "ark-ff 0.3.0", "ark-ff 0.4.2", + "ark-ff 0.5.0", "bytes", "fastrlp 0.3.1", "fastrlp 0.4.0", @@ -2512,7 +2694,7 @@ dependencies = [ "rand 0.9.1", "rlp", "ruint-macro", - "serde", + "serde_core", "valuable", "zeroize", ] @@ -2714,18 +2896,28 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.219" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" +dependencies = [ + "serde_core", + "serde_derive", +] + +[[package]] +name = "serde_core" +version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.219" +version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" dependencies = [ "proc-macro2", "quote", @@ -2780,7 +2972,7 @@ version = "3.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8d00caa5193a3c8362ac2b73be6b9e768aa5a4b2f721d8f4b339600c3cb51f8e" dependencies = [ - "darling", + "darling 0.20.11", "proc-macro2", "quote", "syn 2.0.101", @@ -2951,9 +3143,9 @@ dependencies = [ [[package]] name = "syn-solidity" -version = "1.1.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d0f0d4760f4c2a0823063b2c70e97aa2ad185f57be195172ccc0e23c4b787c4" +checksum = "2375c17f6067adc651d8c2c51658019cef32edfff4a982adaf1d7fd1c039f08b" dependencies = [ "paste", "proc-macro2", @@ -3216,18 +3408,6 @@ dependencies = [ "once_cell", ] -[[package]] -name = "tracing-futures" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" -dependencies = [ - "futures", - "futures-task", - "pin-project", - "tracing", -] - [[package]] name = "try-lock" version = "0.2.5" @@ -3344,6 +3524,15 @@ dependencies = [ "wit-bindgen-rt 0.39.0", ] +[[package]] +name = "wasip2" +version = "1.0.1+wasi-0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7" +dependencies = [ + "wit-bindgen", +] + [[package]] name = "wasm-bindgen" version = "0.2.100" @@ -3431,9 +3620,9 @@ dependencies = [ [[package]] name = "wavs-wasi-utils" -version = "0.4.0" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14e8eac2bf7511c34b1efb62175ba84479d5f039b194a64bd1a089cfb65b0381" +checksum = "bcecf1034f84a086b020d9a300d7dd38e4071aa9662371351eb61952a38ddd69" dependencies = [ "alloy-json-rpc", "alloy-primitives", @@ -3450,7 +3639,6 @@ dependencies = [ "serde_json", "tower-service", "wasi 0.14.2+wasi-0.2.4", - "wit-bindgen-rt 0.42.1", "wstd", ] @@ -3698,6 +3886,15 @@ dependencies = [ "memchr", ] +[[package]] +name = "wit-bindgen" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" +dependencies = [ + "bitflags", +] + [[package]] name = "wit-bindgen-rt" version = "0.39.0" @@ -3709,13 +3906,11 @@ dependencies = [ [[package]] name = "wit-bindgen-rt" -version = "0.42.1" +version = "0.44.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "051105bab12bc78e161f8dfb3596e772dd6a01ebf9c4840988e00347e744966a" +checksum = "653c85dd7aee6fe6f4bded0d242406deadae9819029ce6f7d258c920c384358a" dependencies = [ "bitflags", - "futures", - "once_cell", ] [[package]] @@ -3726,26 +3921,26 @@ checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb" [[package]] name = "wstd" -version = "0.5.3" +version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6515b13373b9dfbbe62e4426972253a15b74d93b1e2e95f92bbb2801c239a1d3" +checksum = "d0736607b57fcb58dd3148cf34d6a6ca63ba041fde8a12ab3f2c48ddf6d11877" dependencies = [ - "futures-core", + "async-task", "http", "itoa", "pin-project-lite", "serde", "serde_json", "slab", - "wasi 0.14.2+wasi-0.2.4", + "wasip2", "wstd-macro", ] [[package]] name = "wstd-macro" -version = "0.5.3" +version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23d1758ed5cdf081802c60bc000a9cb90db8bcdf140fa8a7251a22b32af37d7b" +checksum = "cb142608f932022fa7d155d8ed99649d02c56a50532e71913a5a03c7c4e288d3" dependencies = [ "quote", "syn 2.0.101", diff --git a/Cargo.toml b/Cargo.toml index 158b821a..5f19d4aa 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,32 +4,40 @@ resolver = "2" [workspace.package] edition = "2021" -version = "0.4.0" +version = "1.0.0" license = "MIT" authors = ["Lay3r Labs Team"] repository = "https://github.com/Lay3rLabs/wavs" -rust-version = "1.80.0" +rust-version = "1.87.0" [workspace.dependencies] # WASI -wit-bindgen-rt = { version = "0.42.1", features = ["bitflags"] } -wit-bindgen = "0.42.1" -wstd = "0.5.3" -wasi = "0.14.1" -wavs-wasi-utils = "0.4.0" +wit-bindgen-rt = { version = "0.44.0", features = ["bitflags"] } +wit-bindgen = "0.44.0" +wstd = "0.5.4" +wasi = "0.14.2" +wavs-wasi-utils = "=1.0.0" # Other +schemars = "1.0.4" serde = { version = "1.0.219", features = ["derive"] } serde_json = "1.0.140" anyhow = "1.0.98" +async-trait = "0.1.88" +inventory = "0.3" +futures = "0.3.31" + ## Alloy alloy-sol-macro = { version = "1.0.0", features = ["json"] } alloy-sol-types = "1.0.0" -alloy-network = "0.15.10" +alloy-dyn-abi = "1.0.0" +alloy-json-abi = "1.0.0" alloy-primitives = "1.0.0" -alloy-provider = { version = "0.15.10", default-features = false, features = [ +alloy-network = "1.0.0" +alloy-provider = { version = "1.0.0", default-features = false, features = [ "rpc-api", ] } -alloy-rpc-types = "0.15.10" -alloy-contract = "0.15.10" +alloy-rpc-types = "1.0.0" +alloy-contract = "1.0.0" +alloy-transport-http = { version = "1.0.0", default-features = false } diff --git a/Makefile b/Makefile index 824f7edb..619e71af 100644 --- a/Makefile +++ b/Makefile @@ -5,20 +5,19 @@ SUDO := $(shell if groups | grep -q docker; then echo ''; else echo 'sudo'; fi) # Define common variables CARGO=cargo -INPUT_DATA?=1 +INPUT_DATA?=`` COMPONENT_FILENAME?=evm_price_oracle.wasm CREDENTIAL?="" -DOCKER_IMAGE?=ghcr.io/lay3rlabs/wavs:35c96a4 -MIDDLEWARE_DOCKER_IMAGE?=ghcr.io/lay3rlabs/wavs-middleware:0.4.1 +DOCKER_IMAGE?=ghcr.io/lay3rlabs/wavs:1.4.1 +MIDDLEWARE_DOCKER_IMAGE?=ghcr.io/lay3rlabs/wavs-middleware:0.5.0-beta.10 IPFS_ENDPOINT?=http://127.0.0.1:5001 RPC_URL?=http://127.0.0.1:8545 SERVICE_FILE?=.docker/service.json -SERVICE_SUBMISSION_ADDR?=`jq -r .deployedTo .docker/submit.json` -SERVICE_TRIGGER_ADDR?=`jq -r .deployedTo .docker/trigger.json` WASI_BUILD_DIR ?= "" ENV_FILE?=.env WAVS_CMD ?= $(SUDO) docker run --rm --network host $$(test -f ${ENV_FILE} && echo "--env-file ./${ENV_FILE}") -v $$(pwd):/data ${DOCKER_IMAGE} wavs-cli WAVS_ENDPOINT?="http://127.0.0.1:8000" +WAVS_SERVICE_MANAGER_ADDRESS?=`task config:service-manager-address` -include ${ENV_FILE} # Default target is build @@ -30,7 +29,8 @@ build: _build_forge wasi-build ## wasi-build: building WAVS wasi components | WASI_BUILD_DIR wasi-build: @echo "🔨 Building WASI components..." - @./script/build_components.sh $(WASI_BUILD_DIR) + @warg reset + @task build:wasi WASI_BUILD_DIR=$(WASI_BUILD_DIR) @echo "✅ WASI build complete" ## wasi-exec: executing the WAVS wasi component(s) with ABI function | COMPONENT_FILENAME, INPUT_DATA @@ -98,14 +98,6 @@ setup: check-requirements start-all-local: clean-docker setup-env @sh ./script/start_all.sh -## get-trigger-from-deploy: getting the trigger address from the script deploy -get-trigger-from-deploy: - @jq -r '.deployedTo' "./.docker/trigger.json" - -## get-submit-from-deploy: getting the submit address from the script deploy -get-submit-from-deploy: - @jq -r '.deployedTo' "./.docker/submit.json" - ## wavs-cli: running wavs-cli in docker wavs-cli: @$(WAVS_CMD) $(filter-out $@,$(MAKECMDGOALS)) @@ -134,31 +126,34 @@ deploy-service: fi @if [ -n "${WAVS_ENDPOINT}" ]; then \ echo "🔍 Checking WAVS service at ${WAVS_ENDPOINT}..."; \ - if [ "$$(curl -s -o /dev/null -w "%{http_code}" ${WAVS_ENDPOINT}/app)" != "200" ]; then \ - echo "❌ WAVS service not reachable at ${WAVS_ENDPOINT}"; \ - echo "💡 Re-try running in 1 second, if not then validate the wavs service is online / started."; \ - exit 1; \ - fi; \ - echo "✅ WAVS service is running"; \ + attempt=1; \ + max_attempts=10; \ + while [ $$attempt -le $$max_attempts ]; do \ + if [ "$$(curl -s -o /dev/null -w "%{http_code}" ${WAVS_ENDPOINT}/info)" = "200" ]; then \ + echo "✅ WAVS service is running"; \ + break; \ + else \ + echo "❌ WAVS service not reachable at ${WAVS_ENDPOINT} (attempt $$attempt/$$max_attempts)"; \ + if [ $$attempt -lt $$max_attempts ]; then \ + echo "⏳ Retrying in 5 seconds..."; \ + sleep 5; \ + attempt=$$((attempt + 1)); \ + else \ + echo "❌ Failed after $$max_attempts attempts. Please validate the WAVS service is online/started."; \ + exit 1; \ + fi; \ + fi; \ + done; \ fi @echo "🚀 Deploying service from: ${SERVICE_URL}..." @$(WAVS_CMD) deploy-service --service-url ${SERVICE_URL} --log-level=debug --data /data/.docker --home /data $(if $(WAVS_ENDPOINT),--wavs-endpoint $(WAVS_ENDPOINT),) $(if $(IPFS_GATEWAY),--ipfs-gateway $(IPFS_GATEWAY),) @echo "✅ Service deployed successfully" -## get-trigger: get the trigger id | SERVICE_TRIGGER_ADDR, RPC_URL -get-trigger: - @forge script ./script/ShowResult.s.sol ${SERVICE_TRIGGER_ADDR} --sig 'trigger(string)' --rpc-url $(RPC_URL) --broadcast - -TRIGGER_ID?=1 -## show-result: showing the result | SERVICE_SUBMISSION_ADDR, TRIGGER_ID, RPC_URL -show-result: - @forge script ./script/ShowResult.s.sol ${SERVICE_SUBMISSION_ADDR} ${TRIGGER_ID} --sig 'data(string,uint64)' --rpc-url $(RPC_URL) --broadcast - - PINATA_API_KEY?="" ## upload-to-ipfs: uploading the a service config to IPFS | SERVICE_FILE, [PINATA_API_KEY] upload-to-ipfs: - @if [ `sh script/get-deploy-status.sh` = "LOCAL" ]; then \ + @DEPLOY_STATUS="$$(task get-deploy-status)"; \ + if [ "$$DEPLOY_STATUS" = "LOCAL" ]; then \ curl -X POST "http://127.0.0.1:5001/api/v0/add?pin=true" -H "Content-Type: multipart/form-data" -F file=@${SERVICE_FILE} | jq -r .Hash; \ else \ if [ -z "${PINATA_API_KEY}" ]; then \ @@ -173,7 +168,9 @@ PAST_BLOCKS?=500 wavs-middleware: @docker run --rm --network host --env-file ${ENV_FILE} \ $(if ${WAVS_SERVICE_MANAGER_ADDRESS},-e WAVS_SERVICE_MANAGER_ADDRESS=${WAVS_SERVICE_MANAGER_ADDRESS}) \ - $(if ${PAST_BLOCKS},-e PAST_BLOCKS=${PAST_BLOCKS}) \ + $(if ${OPERATOR_KEY},-e OPERATOR_KEY=${OPERATOR_KEY}) \ + $(if ${WAVS_SIGNING_KEY},-e WAVS_SIGNING_KEY=${WAVS_SIGNING_KEY}) \ + $(if ${WAVS_DELEGATE_AMOUNT},-e WAVS_DELEGATE_AMOUNT=${WAVS_DELEGATE_AMOUNT}) \ -v ./.nodes:/root/.nodes ${MIDDLEWARE_DOCKER_IMAGE} ${COMMAND} ## update-submodules: update the git submodules diff --git a/README.md b/README.md index 902f5422..0801a0aa 100644 --- a/README.md +++ b/README.md @@ -144,13 +144,13 @@ Install the required packages to build the Solidity contracts. This project supp ```bash # Install packages (npm & submodules) -make setup +task setup # Build the contracts -forge build +task build:forge # Run the solidity tests -forge test +task test ``` ## Build WASI components @@ -169,7 +169,7 @@ Now build the WASI components into the `compiled` output directory. ```bash # Remove `WASI_BUILD_DIR` to build all components. -WASI_BUILD_DIR=components/evm-price-oracle make wasi-build +WASI_BUILD_DIR=components/evm-price-oracle task build:wasi ``` ## Testing the Price Feed Component Locally @@ -178,11 +178,11 @@ How to test the component locally for business logic validation before on-chain ```bash # Rust & Typescript components -INPUT_DATA="1" COMPONENT_FILENAME=evm_price_oracle.wasm make wasi-exec -INPUT_DATA="1" COMPONENT_FILENAME=js_evm_price_oracle.wasm make wasi-exec +INPUT_DATA="1" COMPONENT_FILENAME=evm_price_oracle.wasm task wasi:exec +INPUT_DATA="1" COMPONENT_FILENAME=js_evm_price_oracle.wasm task wasi:exec # Golang -INPUT_DATA="1" COMPONENT_FILENAME=golang_evm_price_oracle.wasm make wasi-exec-fixed +INPUT_DATA="1" COMPONENT_FILENAME=golang_evm_price_oracle.wasm task wasi:exec-fixed ``` Expected output: @@ -237,7 +237,7 @@ cp .env.example .env # update the .env for either LOCAL or TESTNET # Starts anvil + IPFS, WARG, Jaeger, and prometheus. -make start-all-local +task start-all-local ``` ## WAVS Deployment Script @@ -266,10 +266,19 @@ This script automates the complete WAVS deployment process in a single command: A fully operational WAVS service that monitors blockchain events, executes WebAssembly components, and submits verified results on-chain. ```bash -export RPC_URL=`bash ./script/get-rpc.sh` +# export RPC_URL=`bash ./script/get-rpc.sh` +# export AGGREGATOR_URL=http://127.0.0.1:8001 + +# task deploy:full +export RPC_URL=$(task get-rpc) export AGGREGATOR_URL=http://127.0.0.1:8001 -bash ./script/deploy-script.sh +# TODO: this is merged with deploy-script now +# bash ./script/deploy-contracts.sh + +# deploys contracts & components +# If you do not set the PoA operator you will get `ServiceManagerValidateAnyRevert("0x3dda1739")` +bash ./script/deploy-script.sh && task deploy:single-operator-poa-local ``` @@ -285,15 +294,16 @@ export INPUT_DATA=`cast abi-encode "addTrigger(string)" "1"` # export INPUT_DATA="1" # Get the trigger address from previous Deploy forge script -export SERVICE_TRIGGER_ADDR=`make get-trigger-from-deploy` +export SERVICE_TRIGGER_ADDR=`jq -r '.evmpriceoracle_trigger.deployedTo' .docker/deployment_summary.json` # Execute on the trigger contract, WAVS will pick this up and submit the result # on chain via the operators. # uses FUNDED_KEY as the executor (local: anvil account) -source .env -export RPC_URL=`sh ./script/get-rpc.sh` +# source .env +export RPC_URL=`task get-rpc` +export FUNDED_KEY=`task config:funded-key` -forge script ./script/Trigger.s.sol ${SERVICE_TRIGGER_ADDR} ${INPUT_DATA} --sig 'run(string,string)' --rpc-url ${RPC_URL} --broadcast +forge script ./src/script/Trigger.s.sol ${SERVICE_TRIGGER_ADDR} ${INPUT_DATA} --sig 'run(string,string)' --rpc-url ${RPC_URL} --broadcast --private-key ${FUNDED_KEY} ``` ## Show the result @@ -301,11 +311,12 @@ forge script ./script/Trigger.s.sol ${SERVICE_TRIGGER_ADDR} ${INPUT_DATA} --sig Query the latest submission contract id from the previous request made. ```bash docci-delay-per-cmd=2 docci-output-contains="1" -RPC_URL=${RPC_URL} make get-trigger +RPC_URL=${RPC_URL} forge script ./src/script/ShowResult.s.sol ${SERVICE_TRIGGER_ADDR} --sig 'trigger(string)' --rpc-url ${RPC_URL} ``` ```bash docci-delay-per-cmd=2 docci-output-contains="BTC" -TRIGGER_ID=1 RPC_URL=${RPC_URL} make show-result +export SERVICE_SUBMIT_ADDR=`jq -r '.evmpriceoracle_submit.deployedTo' .docker/deployment_summary.json` +RPC_URL=${RPC_URL} forge script ./src/script/ShowResult.s.sol ${SERVICE_SUBMIT_ADDR} 4 --sig 'data(string,uint64)' --rpc-url ${RPC_URL} ``` ## AI Coding Agents diff --git a/compiled/README.md b/compiled/README.md new file mode 100644 index 00000000..3c332e33 --- /dev/null +++ b/compiled/README.md @@ -0,0 +1,7 @@ +# Aggregator + +`Tue Sep 30 10:44:26 2025` in EN0VA. + +from github.com/Lay3rlabs/wavs + +Has gas price support with Etherscan API key & reorg support. diff --git a/components/evm-price-oracle/Cargo.toml b/components/evm-price-oracle/Cargo.toml index 036866a7..d62bcfb6 100644 --- a/components/evm-price-oracle/Cargo.toml +++ b/components/evm-price-oracle/Cargo.toml @@ -28,4 +28,4 @@ lto = true [package.metadata.component] package = "component:evm-price-oracle" -target = "wavs:worker/layer-trigger-world@0.4.0" +target = "wavs:operator@=1.2.0" diff --git a/components/evm-price-oracle/Makefile b/components/evm-price-oracle/Makefile index 73239b92..f2597c39 100644 --- a/components/evm-price-oracle/Makefile +++ b/components/evm-price-oracle/Makefile @@ -1,5 +1,6 @@ OUTPUT_DIR?=../../compiled +# WASI_BUILD_DIR=components/evm-price-oracle make wasi-build ## wasi-build: building the WAVS wasi component(s) wasi-build: diff --git a/components/evm-price-oracle/config.json b/components/evm-price-oracle/config.json deleted file mode 100644 index e9de6430..00000000 --- a/components/evm-price-oracle/config.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "__tool": "github.com/reecepbcups/docci", - "paths": [ - "README.md", - "docs/tutorial" - ], - "env_vars": {}, - "pre_cmds": [], - "log_level": "ERROR", - "cleanup_cmds": [ - "killall anvil || true", - "docker compose rm --stop --force --volumes || true" - ] -} diff --git a/components/evm-price-oracle/src/bindings.rs b/components/evm-price-oracle/src/bindings.rs index d62d859c..085beeea 100644 --- a/components/evm-price-oracle/src/bindings.rs +++ b/components/evm-price-oracle/src/bindings.rs @@ -1,13 +1,12 @@ // Generated by `wit-bindgen` 0.41.0. DO NOT EDIT! // Options used: // * runtime_path: "wit_bindgen_rt" -pub type TriggerAction = wavs::worker::layer_types::TriggerAction; -pub type WasmResponse = wavs::worker::layer_types::WasmResponse; +pub type TriggerAction = wavs::operator::input::TriggerAction; +pub type WasmResponse = wavs::operator::output::WasmResponse; #[doc(hidden)] #[allow(non_snake_case)] pub unsafe fn _export_run_cabi(arg0: *mut u8) -> *mut u8 { - #[cfg(target_arch = "wasm32")] - _rt::run_ctors_once(); + #[cfg(target_arch = "wasm32")] _rt::run_ctors_once(); let l0 = *arg0.add(0).cast::<*mut u8>(); let l1 = *arg0.add(::core::mem::size_of::<*const u8>()).cast::(); let len2 = l1; @@ -17,25 +16,37 @@ pub unsafe fn _export_run_cabi(arg0: *mut u8) -> *mut u8 { let len5 = l4; let bytes5 = _rt::Vec::from_raw_parts(l3.cast(), len5, len5); let l6 = i32::from(*arg0.add(4 * ::core::mem::size_of::<*const u8>()).cast::()); - use wavs::worker::layer_types::TriggerSource as V41; + use wavs::types::service::Trigger as V41; let v41 = match l6 { 0 => { let e41 = { - let l7 = *arg0.add(8 + 4 * ::core::mem::size_of::<*const u8>()).cast::<*mut u8>(); - let l8 = *arg0.add(8 + 5 * ::core::mem::size_of::<*const u8>()).cast::(); + let l7 = *arg0 + .add(8 + 4 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l8 = *arg0 + .add(8 + 5 * ::core::mem::size_of::<*const u8>()) + .cast::(); let len9 = l8; - let l10 = *arg0.add(8 + 6 * ::core::mem::size_of::<*const u8>()).cast::<*mut u8>(); - let l11 = *arg0.add(8 + 7 * ::core::mem::size_of::<*const u8>()).cast::(); + let l10 = *arg0 + .add(8 + 6 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l11 = *arg0 + .add(8 + 7 * ::core::mem::size_of::<*const u8>()) + .cast::(); let len12 = l11; let bytes12 = _rt::Vec::from_raw_parts(l10.cast(), len12, len12); - let l13 = *arg0.add(8 + 8 * ::core::mem::size_of::<*const u8>()).cast::<*mut u8>(); - let l14 = *arg0.add(8 + 9 * ::core::mem::size_of::<*const u8>()).cast::(); + let l13 = *arg0 + .add(8 + 8 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l14 = *arg0 + .add(8 + 9 * ::core::mem::size_of::<*const u8>()) + .cast::(); let len15 = l14; - wavs::worker::layer_types::TriggerSourceEvmContractEvent { - address: wavs::worker::layer_types::EvmAddress { + wavs::types::service::TriggerEvmContractEvent { + address: wavs::types::chain::EvmAddress { raw_bytes: _rt::Vec::from_raw_parts(l7.cast(), len9, len9), }, - chain_name: _rt::string_lift(bytes12), + chain: _rt::string_lift(bytes12), event_hash: _rt::Vec::from_raw_parts(l13.cast(), len15, len15), } }; @@ -43,25 +54,39 @@ pub unsafe fn _export_run_cabi(arg0: *mut u8) -> *mut u8 { } 1 => { let e41 = { - let l16 = *arg0.add(8 + 4 * ::core::mem::size_of::<*const u8>()).cast::<*mut u8>(); - let l17 = *arg0.add(8 + 5 * ::core::mem::size_of::<*const u8>()).cast::(); + let l16 = *arg0 + .add(8 + 4 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l17 = *arg0 + .add(8 + 5 * ::core::mem::size_of::<*const u8>()) + .cast::(); let len18 = l17; let bytes18 = _rt::Vec::from_raw_parts(l16.cast(), len18, len18); - let l19 = *arg0.add(8 + 6 * ::core::mem::size_of::<*const u8>()).cast::(); - let l20 = *arg0.add(8 + 7 * ::core::mem::size_of::<*const u8>()).cast::<*mut u8>(); - let l21 = *arg0.add(8 + 8 * ::core::mem::size_of::<*const u8>()).cast::(); + let l19 = *arg0 + .add(8 + 6 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let l20 = *arg0 + .add(8 + 7 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l21 = *arg0 + .add(8 + 8 * ::core::mem::size_of::<*const u8>()) + .cast::(); let len22 = l21; let bytes22 = _rt::Vec::from_raw_parts(l20.cast(), len22, len22); - let l23 = *arg0.add(8 + 9 * ::core::mem::size_of::<*const u8>()).cast::<*mut u8>(); - let l24 = *arg0.add(8 + 10 * ::core::mem::size_of::<*const u8>()).cast::(); + let l23 = *arg0 + .add(8 + 9 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l24 = *arg0 + .add(8 + 10 * ::core::mem::size_of::<*const u8>()) + .cast::(); let len25 = l24; let bytes25 = _rt::Vec::from_raw_parts(l23.cast(), len25, len25); - wavs::worker::layer_types::TriggerSourceCosmosContractEvent { - address: wavs::worker::layer_types::CosmosAddress { + wavs::types::service::TriggerCosmosContractEvent { + address: wavs::types::chain::CosmosAddress { bech32_addr: _rt::string_lift(bytes18), prefix_len: l19 as u32, }, - chain_name: _rt::string_lift(bytes22), + chain: _rt::string_lift(bytes22), event_type: _rt::string_lift(bytes25), } }; @@ -69,17 +94,25 @@ pub unsafe fn _export_run_cabi(arg0: *mut u8) -> *mut u8 { } 2 => { let e41 = { - let l26 = *arg0.add(8 + 4 * ::core::mem::size_of::<*const u8>()).cast::<*mut u8>(); - let l27 = *arg0.add(8 + 5 * ::core::mem::size_of::<*const u8>()).cast::(); + let l26 = *arg0 + .add(8 + 4 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l27 = *arg0 + .add(8 + 5 * ::core::mem::size_of::<*const u8>()) + .cast::(); let len28 = l27; let bytes28 = _rt::Vec::from_raw_parts(l26.cast(), len28, len28); - let l29 = *arg0.add(8 + 6 * ::core::mem::size_of::<*const u8>()).cast::(); - let l30 = - i32::from(*arg0.add(16 + 6 * ::core::mem::size_of::<*const u8>()).cast::()); - let l32 = - i32::from(*arg0.add(32 + 6 * ::core::mem::size_of::<*const u8>()).cast::()); - wavs::worker::layer_types::BlockIntervalSource { - chain_name: _rt::string_lift(bytes28), + let l29 = *arg0 + .add(8 + 6 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let l30 = i32::from( + *arg0.add(16 + 6 * ::core::mem::size_of::<*const u8>()).cast::(), + ); + let l32 = i32::from( + *arg0.add(32 + 6 * ::core::mem::size_of::<*const u8>()).cast::(), + ); + wavs::types::service::TriggerBlockInterval { + chain: _rt::string_lift(bytes28), n_blocks: l29 as u32, start_block: match l30 { 0 => None, @@ -113,15 +146,21 @@ pub unsafe fn _export_run_cabi(arg0: *mut u8) -> *mut u8 { } 3 => { let e41 = { - let l34 = *arg0.add(8 + 4 * ::core::mem::size_of::<*const u8>()).cast::<*mut u8>(); - let l35 = *arg0.add(8 + 5 * ::core::mem::size_of::<*const u8>()).cast::(); + let l34 = *arg0 + .add(8 + 4 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l35 = *arg0 + .add(8 + 5 * ::core::mem::size_of::<*const u8>()) + .cast::(); let len36 = l35; let bytes36 = _rt::Vec::from_raw_parts(l34.cast(), len36, len36); - let l37 = - i32::from(*arg0.add(8 + 6 * ::core::mem::size_of::<*const u8>()).cast::()); - let l39 = - i32::from(*arg0.add(24 + 6 * ::core::mem::size_of::<*const u8>()).cast::()); - wavs::worker::layer_types::TriggerSourceCron { + let l37 = i32::from( + *arg0.add(8 + 6 * ::core::mem::size_of::<*const u8>()).cast::(), + ); + let l39 = i32::from( + *arg0.add(24 + 6 * ::core::mem::size_of::<*const u8>()).cast::(), + ); + wavs::types::service::TriggerCron { schedule: _rt::string_lift(bytes36), start_time: match l37 { 0 => None, @@ -130,7 +169,9 @@ pub unsafe fn _export_run_cabi(arg0: *mut u8) -> *mut u8 { let l38 = *arg0 .add(16 + 6 * ::core::mem::size_of::<*const u8>()) .cast::(); - wavs::worker::layer_types::Timestamp { nanos: l38 as u64 } + wavs::types::core::Timestamp { + nanos: l38 as u64, + } }; Some(e) } @@ -143,7 +184,9 @@ pub unsafe fn _export_run_cabi(arg0: *mut u8) -> *mut u8 { let l40 = *arg0 .add(32 + 6 * ::core::mem::size_of::<*const u8>()) .cast::(); - wavs::worker::layer_types::Timestamp { nanos: l40 as u64 } + wavs::types::core::Timestamp { + nanos: l40 as u64, + } }; Some(e) } @@ -158,21 +201,34 @@ pub unsafe fn _export_run_cabi(arg0: *mut u8) -> *mut u8 { V41::Manual } }; - let l42 = i32::from(*arg0.add(48 + 6 * ::core::mem::size_of::<*const u8>()).cast::()); - use wavs::worker::layer_types::TriggerData as V87; - let v87 = match l42 { + let l42 = i32::from( + *arg0.add(48 + 6 * ::core::mem::size_of::<*const u8>()).cast::(), + ); + use wavs::types::events::TriggerData as V98; + let v98 = match l42 { 0 => { - let e87 = { - let l43 = *arg0.add(56 + 6 * ::core::mem::size_of::<*const u8>()).cast::<*mut u8>(); - let l44 = *arg0.add(56 + 7 * ::core::mem::size_of::<*const u8>()).cast::(); + let e98 = { + let l43 = *arg0 + .add(56 + 6 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l44 = *arg0 + .add(56 + 7 * ::core::mem::size_of::<*const u8>()) + .cast::(); let len45 = l44; - let l46 = *arg0.add(56 + 8 * ::core::mem::size_of::<*const u8>()).cast::<*mut u8>(); - let l47 = *arg0.add(56 + 9 * ::core::mem::size_of::<*const u8>()).cast::(); + let bytes45 = _rt::Vec::from_raw_parts(l43.cast(), len45, len45); + let l46 = *arg0 + .add(56 + 8 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l47 = *arg0 + .add(56 + 9 * ::core::mem::size_of::<*const u8>()) + .cast::(); let len48 = l47; - let bytes48 = _rt::Vec::from_raw_parts(l46.cast(), len48, len48); - let l49 = - *arg0.add(56 + 10 * ::core::mem::size_of::<*const u8>()).cast::<*mut u8>(); - let l50 = *arg0.add(56 + 11 * ::core::mem::size_of::<*const u8>()).cast::(); + let l49 = *arg0 + .add(56 + 10 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l50 = *arg0 + .add(56 + 11 * ::core::mem::size_of::<*const u8>()) + .cast::(); let base54 = l49; let len54 = l50; let mut result54 = _rt::Vec::with_capacity(len54); @@ -180,7 +236,9 @@ pub unsafe fn _export_run_cabi(arg0: *mut u8) -> *mut u8 { let base = base54.add(i * (2 * ::core::mem::size_of::<*const u8>())); let e54 = { let l51 = *base.add(0).cast::<*mut u8>(); - let l52 = *base.add(::core::mem::size_of::<*const u8>()).cast::(); + let l52 = *base + .add(::core::mem::size_of::<*const u8>()) + .cast::(); let len53 = l52; _rt::Vec::from_raw_parts(l51.cast(), len53, len53) }; @@ -191,176 +249,266 @@ pub unsafe fn _export_run_cabi(arg0: *mut u8) -> *mut u8 { len54 * (2 * ::core::mem::size_of::<*const u8>()), ::core::mem::size_of::<*const u8>(), ); - let l55 = - *arg0.add(56 + 12 * ::core::mem::size_of::<*const u8>()).cast::<*mut u8>(); - let l56 = *arg0.add(56 + 13 * ::core::mem::size_of::<*const u8>()).cast::(); + let l55 = *arg0 + .add(56 + 12 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l56 = *arg0 + .add(56 + 13 * ::core::mem::size_of::<*const u8>()) + .cast::(); let len57 = l56; - let l58 = *arg0.add(56 + 14 * ::core::mem::size_of::<*const u8>()).cast::(); - wavs::worker::layer_types::TriggerDataEvmContractEvent { - contract_address: wavs::worker::layer_types::EvmAddress { - raw_bytes: _rt::Vec::from_raw_parts(l43.cast(), len45, len45), - }, - chain_name: _rt::string_lift(bytes48), - log: wavs::worker::layer_types::EvmEventLogData { - topics: result54, - data: _rt::Vec::from_raw_parts(l55.cast(), len57, len57), + let l58 = *arg0 + .add(56 + 14 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l59 = *arg0 + .add(56 + 15 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len60 = l59; + let l61 = *arg0 + .add(56 + 16 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let l62 = *arg0 + .add(64 + 16 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let l63 = *arg0 + .add(72 + 16 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l64 = *arg0 + .add(72 + 17 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len65 = l64; + let l66 = i32::from( + *arg0.add(72 + 18 * ::core::mem::size_of::<*const u8>()).cast::(), + ); + let l68 = *arg0 + .add(88 + 18 * ::core::mem::size_of::<*const u8>()) + .cast::(); + wavs::types::events::TriggerDataEvmContractEvent { + chain: _rt::string_lift(bytes45), + log: wavs::types::chain::EvmEventLog { + address: wavs::types::chain::EvmAddress { + raw_bytes: _rt::Vec::from_raw_parts(l46.cast(), len48, len48), + }, + data: wavs::types::chain::EvmEventLogData { + topics: result54, + data: _rt::Vec::from_raw_parts(l55.cast(), len57, len57), + }, + tx_hash: _rt::Vec::from_raw_parts(l58.cast(), len60, len60), + block_number: l61 as u64, + log_index: l62 as u64, + block_hash: _rt::Vec::from_raw_parts(l63.cast(), len65, len65), + block_timestamp: match l66 { + 0 => None, + 1 => { + let e = { + let l67 = *arg0 + .add(80 + 18 * ::core::mem::size_of::<*const u8>()) + .cast::(); + l67 as u64 + }; + Some(e) + } + _ => _rt::invalid_enum_discriminant(), + }, + tx_index: l68 as u64, }, - block_height: l58 as u64, } }; - V87::EvmContractEvent(e87) + V98::EvmContractEvent(e98) } 1 => { - let e87 = { - let l59 = *arg0.add(56 + 6 * ::core::mem::size_of::<*const u8>()).cast::<*mut u8>(); - let l60 = *arg0.add(56 + 7 * ::core::mem::size_of::<*const u8>()).cast::(); - let len61 = l60; - let bytes61 = _rt::Vec::from_raw_parts(l59.cast(), len61, len61); - let l62 = *arg0.add(56 + 8 * ::core::mem::size_of::<*const u8>()).cast::(); - let l63 = *arg0.add(56 + 9 * ::core::mem::size_of::<*const u8>()).cast::<*mut u8>(); - let l64 = *arg0.add(56 + 10 * ::core::mem::size_of::<*const u8>()).cast::(); - let len65 = l64; - let bytes65 = _rt::Vec::from_raw_parts(l63.cast(), len65, len65); - let l66 = - *arg0.add(56 + 11 * ::core::mem::size_of::<*const u8>()).cast::<*mut u8>(); - let l67 = *arg0.add(56 + 12 * ::core::mem::size_of::<*const u8>()).cast::(); - let len68 = l67; - let bytes68 = _rt::Vec::from_raw_parts(l66.cast(), len68, len68); - let l69 = - *arg0.add(56 + 13 * ::core::mem::size_of::<*const u8>()).cast::<*mut u8>(); - let l70 = *arg0.add(56 + 14 * ::core::mem::size_of::<*const u8>()).cast::(); - let base77 = l69; - let len77 = l70; - let mut result77 = _rt::Vec::with_capacity(len77); - for i in 0..len77 { - let base = base77.add(i * (4 * ::core::mem::size_of::<*const u8>())); - let e77 = { - let l71 = *base.add(0).cast::<*mut u8>(); - let l72 = *base.add(::core::mem::size_of::<*const u8>()).cast::(); - let len73 = l72; - let bytes73 = _rt::Vec::from_raw_parts(l71.cast(), len73, len73); - let l74 = - *base.add(2 * ::core::mem::size_of::<*const u8>()).cast::<*mut u8>(); - let l75 = - *base.add(3 * ::core::mem::size_of::<*const u8>()).cast::(); - let len76 = l75; - let bytes76 = _rt::Vec::from_raw_parts(l74.cast(), len76, len76); - (_rt::string_lift(bytes73), _rt::string_lift(bytes76)) + let e98 = { + let l69 = *arg0 + .add(56 + 6 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l70 = *arg0 + .add(56 + 7 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len71 = l70; + let bytes71 = _rt::Vec::from_raw_parts(l69.cast(), len71, len71); + let l72 = *arg0 + .add(56 + 8 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let l73 = *arg0 + .add(56 + 9 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l74 = *arg0 + .add(56 + 10 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len75 = l74; + let bytes75 = _rt::Vec::from_raw_parts(l73.cast(), len75, len75); + let l76 = *arg0 + .add(56 + 11 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l77 = *arg0 + .add(56 + 12 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len78 = l77; + let bytes78 = _rt::Vec::from_raw_parts(l76.cast(), len78, len78); + let l79 = *arg0 + .add(56 + 13 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l80 = *arg0 + .add(56 + 14 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let base87 = l79; + let len87 = l80; + let mut result87 = _rt::Vec::with_capacity(len87); + for i in 0..len87 { + let base = base87.add(i * (4 * ::core::mem::size_of::<*const u8>())); + let e87 = { + let l81 = *base.add(0).cast::<*mut u8>(); + let l82 = *base + .add(::core::mem::size_of::<*const u8>()) + .cast::(); + let len83 = l82; + let bytes83 = _rt::Vec::from_raw_parts(l81.cast(), len83, len83); + let l84 = *base + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l85 = *base + .add(3 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len86 = l85; + let bytes86 = _rt::Vec::from_raw_parts(l84.cast(), len86, len86); + (_rt::string_lift(bytes83), _rt::string_lift(bytes86)) }; - result77.push(e77); + result87.push(e87); } _rt::cabi_dealloc( - base77, - len77 * (4 * ::core::mem::size_of::<*const u8>()), + base87, + len87 * (4 * ::core::mem::size_of::<*const u8>()), ::core::mem::size_of::<*const u8>(), ); - let l78 = *arg0.add(64 + 14 * ::core::mem::size_of::<*const u8>()).cast::(); - wavs::worker::layer_types::TriggerDataCosmosContractEvent { - contract_address: wavs::worker::layer_types::CosmosAddress { - bech32_addr: _rt::string_lift(bytes61), - prefix_len: l62 as u32, + let l88 = *arg0 + .add(64 + 14 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let l89 = *arg0 + .add(72 + 14 * ::core::mem::size_of::<*const u8>()) + .cast::(); + wavs::types::events::TriggerDataCosmosContractEvent { + contract_address: wavs::types::chain::CosmosAddress { + bech32_addr: _rt::string_lift(bytes71), + prefix_len: l72 as u32, }, - chain_name: _rt::string_lift(bytes65), - event: wavs::worker::layer_types::CosmosEvent { - ty: _rt::string_lift(bytes68), - attributes: result77, + chain: _rt::string_lift(bytes75), + event: wavs::types::chain::CosmosEvent { + ty: _rt::string_lift(bytes78), + attributes: result87, }, - block_height: l78 as u64, + event_index: l88 as u64, + block_height: l89 as u64, } }; - V87::CosmosContractEvent(e87) + V98::CosmosContractEvent(e98) } 2 => { - let e87 = { - let l79 = *arg0.add(56 + 6 * ::core::mem::size_of::<*const u8>()).cast::<*mut u8>(); - let l80 = *arg0.add(56 + 7 * ::core::mem::size_of::<*const u8>()).cast::(); - let len81 = l80; - let bytes81 = _rt::Vec::from_raw_parts(l79.cast(), len81, len81); - let l82 = *arg0.add(56 + 8 * ::core::mem::size_of::<*const u8>()).cast::(); - wavs::worker::layer_types::BlockIntervalData { - chain_name: _rt::string_lift(bytes81), - block_height: l82 as u64, + let e98 = { + let l90 = *arg0 + .add(56 + 6 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l91 = *arg0 + .add(56 + 7 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len92 = l91; + let bytes92 = _rt::Vec::from_raw_parts(l90.cast(), len92, len92); + let l93 = *arg0 + .add(56 + 8 * ::core::mem::size_of::<*const u8>()) + .cast::(); + wavs::types::events::TriggerDataBlockInterval { + chain: _rt::string_lift(bytes92), + block_height: l93 as u64, } }; - V87::BlockInterval(e87) + V98::BlockInterval(e98) } 3 => { - let e87 = { - let l83 = *arg0.add(56 + 6 * ::core::mem::size_of::<*const u8>()).cast::(); - wavs::worker::layer_types::TriggerDataCron { - trigger_time: wavs::worker::layer_types::Timestamp { nanos: l83 as u64 }, + let e98 = { + let l94 = *arg0 + .add(56 + 6 * ::core::mem::size_of::<*const u8>()) + .cast::(); + wavs::types::events::TriggerDataCron { + trigger_time: wavs::types::core::Timestamp { + nanos: l94 as u64, + }, } }; - V87::Cron(e87) + V98::Cron(e98) } n => { debug_assert_eq!(n, 4, "invalid enum discriminant"); - let e87 = { - let l84 = *arg0.add(56 + 6 * ::core::mem::size_of::<*const u8>()).cast::<*mut u8>(); - let l85 = *arg0.add(56 + 7 * ::core::mem::size_of::<*const u8>()).cast::(); - let len86 = l85; - _rt::Vec::from_raw_parts(l84.cast(), len86, len86) + let e98 = { + let l95 = *arg0 + .add(56 + 6 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l96 = *arg0 + .add(56 + 7 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len97 = l96; + _rt::Vec::from_raw_parts(l95.cast(), len97, len97) }; - V87::Raw(e87) + V98::Raw(e98) } }; - let result88 = T::run(wavs::worker::layer_types::TriggerAction { - config: wavs::worker::layer_types::TriggerConfig { + let result99 = T::run(wavs::operator::input::TriggerAction { + config: wavs::operator::input::TriggerConfig { service_id: _rt::string_lift(bytes2), workflow_id: _rt::string_lift(bytes5), - trigger_source: v41, + trigger: v41, }, - data: v87, + data: v98, }); - _rt::cabi_dealloc(arg0, 72 + 14 * ::core::mem::size_of::<*const u8>(), 8); - let ptr89 = (&raw mut _RET_AREA.0).cast::(); - match result88 { + _rt::cabi_dealloc(arg0, 96 + 18 * ::core::mem::size_of::<*const u8>(), 8); + let ptr100 = (&raw mut _RET_AREA.0).cast::(); + match result99 { Ok(e) => { - *ptr89.add(0).cast::() = (0i32) as u8; + *ptr100.add(0).cast::() = (0i32) as u8; match e { Some(e) => { - *ptr89.add(8).cast::() = (1i32) as u8; - let wavs::worker::layer_types::WasmResponse { - payload: payload90, - ordering: ordering90, + *ptr100.add(8).cast::() = (1i32) as u8; + let wavs::operator::output::WasmResponse { + payload: payload101, + ordering: ordering101, } = e; - let vec91 = (payload90).into_boxed_slice(); - let ptr91 = vec91.as_ptr().cast::(); - let len91 = vec91.len(); - ::core::mem::forget(vec91); - *ptr89.add(16 + 1 * ::core::mem::size_of::<*const u8>()).cast::() = - len91; - *ptr89.add(16).cast::<*mut u8>() = ptr91.cast_mut(); - match ordering90 { + let vec102 = (payload101).into_boxed_slice(); + let ptr102 = vec102.as_ptr().cast::(); + let len102 = vec102.len(); + ::core::mem::forget(vec102); + *ptr100 + .add(16 + 1 * ::core::mem::size_of::<*const u8>()) + .cast::() = len102; + *ptr100.add(16).cast::<*mut u8>() = ptr102.cast_mut(); + match ordering101 { Some(e) => { - *ptr89.add(16 + 2 * ::core::mem::size_of::<*const u8>()).cast::() = - (1i32) as u8; - *ptr89 + *ptr100 + .add(16 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::() = (1i32) as u8; + *ptr100 .add(24 + 2 * ::core::mem::size_of::<*const u8>()) .cast::() = _rt::as_i64(e); } None => { - *ptr89.add(16 + 2 * ::core::mem::size_of::<*const u8>()).cast::() = - (0i32) as u8; + *ptr100 + .add(16 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::() = (0i32) as u8; } }; } None => { - *ptr89.add(8).cast::() = (0i32) as u8; + *ptr100.add(8).cast::() = (0i32) as u8; } }; } Err(e) => { - *ptr89.add(0).cast::() = (1i32) as u8; - let vec92 = (e.into_bytes()).into_boxed_slice(); - let ptr92 = vec92.as_ptr().cast::(); - let len92 = vec92.len(); - ::core::mem::forget(vec92); - *ptr89.add(8 + 1 * ::core::mem::size_of::<*const u8>()).cast::() = len92; - *ptr89.add(8).cast::<*mut u8>() = ptr92.cast_mut(); + *ptr100.add(0).cast::() = (1i32) as u8; + let vec103 = (e.into_bytes()).into_boxed_slice(); + let ptr103 = vec103.as_ptr().cast::(); + let len103 = vec103.len(); + ::core::mem::forget(vec103); + *ptr100.add(8 + 1 * ::core::mem::size_of::<*const u8>()).cast::() = len103; + *ptr100.add(8).cast::<*mut u8>() = ptr103.cast_mut(); } }; - ptr89 + ptr100 } #[doc(hidden)] #[allow(non_snake_case)] @@ -373,8 +521,9 @@ pub unsafe fn __post_return_run(arg0: *mut u8) { 0 => {} _ => { let l2 = *arg0.add(16).cast::<*mut u8>(); - let l3 = - *arg0.add(16 + 1 * ::core::mem::size_of::<*const u8>()).cast::(); + let l3 = *arg0 + .add(16 + 1 * ::core::mem::size_of::<*const u8>()) + .cast::(); let base4 = l2; let len4 = l3; _rt::cabi_dealloc(base4, len4 * 1, 1); @@ -383,7 +532,9 @@ pub unsafe fn __post_return_run(arg0: *mut u8) { } _ => { let l5 = *arg0.add(8).cast::<*mut u8>(); - let l6 = *arg0.add(8 + 1 * ::core::mem::size_of::<*const u8>()).cast::(); + let l6 = *arg0 + .add(8 + 1 * ::core::mem::size_of::<*const u8>()) + .cast::(); _rt::cabi_dealloc(l5, l6, 1); } } @@ -392,7 +543,7 @@ pub trait Guest { fn run(trigger_action: TriggerAction) -> Result, _rt::String>; } #[doc(hidden)] -macro_rules! __export_world_layer_trigger_world_cabi { +macro_rules! __export_world_wavs_world_cabi { ($ty:ident with_types_in $($path_to_types:tt)*) => { const _ : () = { #[unsafe (export_name = "run")] unsafe extern "C" fn export_run(arg0 : * mut u8,) -> * mut u8 { unsafe { $($path_to_types)*:: @@ -402,11 +553,14 @@ macro_rules! __export_world_layer_trigger_world_cabi { }; } #[doc(hidden)] -pub(crate) use __export_world_layer_trigger_world_cabi; +pub(crate) use __export_world_wavs_world_cabi; #[repr(align(8))] -struct _RetArea([::core::mem::MaybeUninit; 32 + 2 * ::core::mem::size_of::<*const u8>()]); -static mut _RET_AREA: _RetArea = - _RetArea([::core::mem::MaybeUninit::uninit(); 32 + 2 * ::core::mem::size_of::<*const u8>()]); +struct _RetArea( + [::core::mem::MaybeUninit; 32 + 2 * ::core::mem::size_of::<*const u8>()], +); +static mut _RET_AREA: _RetArea = _RetArea( + [::core::mem::MaybeUninit::uninit(); 32 + 2 * ::core::mem::size_of::<*const u8>()], +); #[rustfmt::skip] #[allow(dead_code, clippy::all)] pub mod wasi { @@ -11198,169 +11352,68 @@ pub mod wasi { } } } - pub mod random { + pub mod keyvalue { #[allow(dead_code, async_fn_in_trait, unused_imports, clippy::all)] - pub mod random { + pub mod store { #[used] #[doc(hidden)] static __FORCE_SECTION_REF: fn() = super::super::super::__link_custom_section_describing_imports; use super::super::super::_rt; - #[allow(unused_unsafe, clippy::all)] - pub fn get_random_bytes(len: u64) -> _rt::Vec { - unsafe { - #[cfg_attr(target_pointer_width = "64", repr(align(8)))] - #[cfg_attr(target_pointer_width = "32", repr(align(4)))] - struct RetArea( - [::core::mem::MaybeUninit< - u8, - >; 2 * ::core::mem::size_of::<*const u8>()], - ); - let mut ret_area = RetArea( - [::core::mem::MaybeUninit::uninit(); 2 - * ::core::mem::size_of::<*const u8>()], - ); - let ptr0 = ret_area.0.as_mut_ptr().cast::(); - #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:random/random@0.2.0")] - unsafe extern "C" { - #[link_name = "get-random-bytes"] - fn wit_import1(_: i64, _: *mut u8); - } - #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import1(_: i64, _: *mut u8) { - unreachable!() - } - unsafe { wit_import1(_rt::as_i64(&len), ptr0) }; - let l2 = *ptr0.add(0).cast::<*mut u8>(); - let l3 = *ptr0 - .add(::core::mem::size_of::<*const u8>()) - .cast::(); - let len4 = l3; - let result5 = _rt::Vec::from_raw_parts(l2.cast(), len4, len4); - result5 - } + #[derive(Clone)] + pub enum Error { + NoSuchStore, + AccessDenied, + Other(_rt::String), } - #[allow(unused_unsafe, clippy::all)] - pub fn get_random_u64() -> u64 { - unsafe { - #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:random/random@0.2.0")] - unsafe extern "C" { - #[link_name = "get-random-u64"] - fn wit_import0() -> i64; - } - #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import0() -> i64 { - unreachable!() + impl ::core::fmt::Debug for Error { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + match self { + Error::NoSuchStore => { + f.debug_tuple("Error::NoSuchStore").finish() + } + Error::AccessDenied => { + f.debug_tuple("Error::AccessDenied").finish() + } + Error::Other(e) => { + f.debug_tuple("Error::Other").field(e).finish() + } } - let ret = unsafe { wit_import0() }; - ret as u64 } } - } - #[allow(dead_code, async_fn_in_trait, unused_imports, clippy::all)] - pub mod insecure { - #[used] - #[doc(hidden)] - static __FORCE_SECTION_REF: fn() = super::super::super::__link_custom_section_describing_imports; - use super::super::super::_rt; - #[allow(unused_unsafe, clippy::all)] - pub fn get_insecure_random_bytes(len: u64) -> _rt::Vec { - unsafe { - #[cfg_attr(target_pointer_width = "64", repr(align(8)))] - #[cfg_attr(target_pointer_width = "32", repr(align(4)))] - struct RetArea( - [::core::mem::MaybeUninit< - u8, - >; 2 * ::core::mem::size_of::<*const u8>()], - ); - let mut ret_area = RetArea( - [::core::mem::MaybeUninit::uninit(); 2 - * ::core::mem::size_of::<*const u8>()], - ); - let ptr0 = ret_area.0.as_mut_ptr().cast::(); - #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:random/insecure@0.2.0")] - unsafe extern "C" { - #[link_name = "get-insecure-random-bytes"] - fn wit_import1(_: i64, _: *mut u8); - } - #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import1(_: i64, _: *mut u8) { - unreachable!() - } - unsafe { wit_import1(_rt::as_i64(&len), ptr0) }; - let l2 = *ptr0.add(0).cast::<*mut u8>(); - let l3 = *ptr0 - .add(::core::mem::size_of::<*const u8>()) - .cast::(); - let len4 = l3; - let result5 = _rt::Vec::from_raw_parts(l2.cast(), len4, len4); - result5 + impl ::core::fmt::Display for Error { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + write!(f, "{:?}", self) } } - #[allow(unused_unsafe, clippy::all)] - pub fn get_insecure_random_u64() -> u64 { - unsafe { - #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:random/insecure@0.2.0")] - unsafe extern "C" { - #[link_name = "get-insecure-random-u64"] - fn wit_import0() -> i64; - } - #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import0() -> i64 { - unreachable!() - } - let ret = unsafe { wit_import0() }; - ret as u64 - } + impl std::error::Error for Error {} + #[derive(Clone)] + pub struct KeyResponse { + pub keys: _rt::Vec<_rt::String>, + pub cursor: Option<_rt::String>, } - } - #[allow(dead_code, async_fn_in_trait, unused_imports, clippy::all)] - pub mod insecure_seed { - #[used] - #[doc(hidden)] - static __FORCE_SECTION_REF: fn() = super::super::super::__link_custom_section_describing_imports; - #[allow(unused_unsafe, clippy::all)] - pub fn insecure_seed() -> (u64, u64) { - unsafe { - #[repr(align(8))] - struct RetArea([::core::mem::MaybeUninit; 16]); - let mut ret_area = RetArea([::core::mem::MaybeUninit::uninit(); 16]); - let ptr0 = ret_area.0.as_mut_ptr().cast::(); - #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:random/insecure-seed@0.2.0")] - unsafe extern "C" { - #[link_name = "insecure-seed"] - fn wit_import1(_: *mut u8); - } - #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import1(_: *mut u8) { - unreachable!() - } - unsafe { wit_import1(ptr0) }; - let l2 = *ptr0.add(0).cast::(); - let l3 = *ptr0.add(8).cast::(); - let result4 = (l2 as u64, l3 as u64); - result4 + impl ::core::fmt::Debug for KeyResponse { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + f.debug_struct("KeyResponse") + .field("keys", &self.keys) + .field("cursor", &self.cursor) + .finish() } } - } - } - pub mod sockets { - #[allow(dead_code, async_fn_in_trait, unused_imports, clippy::all)] - pub mod network { - #[used] - #[doc(hidden)] - static __FORCE_SECTION_REF: fn() = super::super::super::__link_custom_section_describing_imports; - use super::super::super::_rt; #[derive(Debug)] #[repr(transparent)] - pub struct Network { - handle: _rt::Resource, + pub struct Bucket { + handle: _rt::Resource, } - impl Network { + impl Bucket { #[doc(hidden)] pub unsafe fn from_handle(handle: u32) -> Self { Self { @@ -11376,385 +11429,664 @@ pub mod wasi { _rt::Resource::handle(&self.handle) } } - unsafe impl _rt::WasmResource for Network { + unsafe impl _rt::WasmResource for Bucket { #[inline] unsafe fn drop(_handle: u32) { #[cfg(not(target_arch = "wasm32"))] unreachable!(); #[cfg(target_arch = "wasm32")] { - #[link(wasm_import_module = "wasi:sockets/network@0.2.0")] + #[link(wasm_import_module = "wasi:keyvalue/store@0.2.0-draft2")] unsafe extern "C" { - #[link_name = "[resource-drop]network"] + #[link_name = "[resource-drop]bucket"] fn drop(_: u32); } unsafe { drop(_handle) }; } } } - #[repr(u8)] - #[derive(Clone, Copy, Eq, Ord, PartialEq, PartialOrd)] - pub enum ErrorCode { - Unknown, - AccessDenied, - NotSupported, - InvalidArgument, - OutOfMemory, - Timeout, - ConcurrencyConflict, - NotInProgress, - WouldBlock, - InvalidState, - NewSocketLimit, - AddressNotBindable, - AddressInUse, - RemoteUnreachable, - ConnectionRefused, - ConnectionReset, - ConnectionAborted, - DatagramTooLarge, - NameUnresolvable, - TemporaryResolverFailure, - PermanentResolverFailure, - } - impl ErrorCode { - pub fn name(&self) -> &'static str { - match self { - ErrorCode::Unknown => "unknown", - ErrorCode::AccessDenied => "access-denied", - ErrorCode::NotSupported => "not-supported", - ErrorCode::InvalidArgument => "invalid-argument", - ErrorCode::OutOfMemory => "out-of-memory", - ErrorCode::Timeout => "timeout", - ErrorCode::ConcurrencyConflict => "concurrency-conflict", - ErrorCode::NotInProgress => "not-in-progress", - ErrorCode::WouldBlock => "would-block", - ErrorCode::InvalidState => "invalid-state", - ErrorCode::NewSocketLimit => "new-socket-limit", - ErrorCode::AddressNotBindable => "address-not-bindable", - ErrorCode::AddressInUse => "address-in-use", - ErrorCode::RemoteUnreachable => "remote-unreachable", - ErrorCode::ConnectionRefused => "connection-refused", - ErrorCode::ConnectionReset => "connection-reset", - ErrorCode::ConnectionAborted => "connection-aborted", - ErrorCode::DatagramTooLarge => "datagram-too-large", - ErrorCode::NameUnresolvable => "name-unresolvable", - ErrorCode::TemporaryResolverFailure => { - "temporary-resolver-failure" - } - ErrorCode::PermanentResolverFailure => { - "permanent-resolver-failure" - } - } - } - pub fn message(&self) -> &'static str { - match self { - ErrorCode::Unknown => "", - ErrorCode::AccessDenied => "", - ErrorCode::NotSupported => "", - ErrorCode::InvalidArgument => "", - ErrorCode::OutOfMemory => "", - ErrorCode::Timeout => "", - ErrorCode::ConcurrencyConflict => "", - ErrorCode::NotInProgress => "", - ErrorCode::WouldBlock => "", - ErrorCode::InvalidState => "", - ErrorCode::NewSocketLimit => "", - ErrorCode::AddressNotBindable => "", - ErrorCode::AddressInUse => "", - ErrorCode::RemoteUnreachable => "", - ErrorCode::ConnectionRefused => "", - ErrorCode::ConnectionReset => "", - ErrorCode::ConnectionAborted => "", - ErrorCode::DatagramTooLarge => "", - ErrorCode::NameUnresolvable => "", - ErrorCode::TemporaryResolverFailure => "", - ErrorCode::PermanentResolverFailure => "", - } - } - } - impl ::core::fmt::Debug for ErrorCode { - fn fmt( - &self, - f: &mut ::core::fmt::Formatter<'_>, - ) -> ::core::fmt::Result { - f.debug_struct("ErrorCode") - .field("code", &(*self as i32)) - .field("name", &self.name()) - .field("message", &self.message()) - .finish() - } - } - impl ::core::fmt::Display for ErrorCode { - fn fmt( - &self, - f: &mut ::core::fmt::Formatter<'_>, - ) -> ::core::fmt::Result { - write!(f, "{} (error {})", self.name(), * self as i32) - } - } - impl std::error::Error for ErrorCode {} - impl ErrorCode { - #[doc(hidden)] - pub unsafe fn _lift(val: u8) -> ErrorCode { - if !cfg!(debug_assertions) { - return ::core::mem::transmute(val); - } - match val { - 0 => ErrorCode::Unknown, - 1 => ErrorCode::AccessDenied, - 2 => ErrorCode::NotSupported, - 3 => ErrorCode::InvalidArgument, - 4 => ErrorCode::OutOfMemory, - 5 => ErrorCode::Timeout, - 6 => ErrorCode::ConcurrencyConflict, - 7 => ErrorCode::NotInProgress, - 8 => ErrorCode::WouldBlock, - 9 => ErrorCode::InvalidState, - 10 => ErrorCode::NewSocketLimit, - 11 => ErrorCode::AddressNotBindable, - 12 => ErrorCode::AddressInUse, - 13 => ErrorCode::RemoteUnreachable, - 14 => ErrorCode::ConnectionRefused, - 15 => ErrorCode::ConnectionReset, - 16 => ErrorCode::ConnectionAborted, - 17 => ErrorCode::DatagramTooLarge, - 18 => ErrorCode::NameUnresolvable, - 19 => ErrorCode::TemporaryResolverFailure, - 20 => ErrorCode::PermanentResolverFailure, - _ => panic!("invalid enum discriminant"), - } - } - } - #[repr(u8)] - #[derive(Clone, Copy, Eq, Ord, PartialEq, PartialOrd)] - pub enum IpAddressFamily { - Ipv4, - Ipv6, - } - impl ::core::fmt::Debug for IpAddressFamily { - fn fmt( - &self, - f: &mut ::core::fmt::Formatter<'_>, - ) -> ::core::fmt::Result { - match self { - IpAddressFamily::Ipv4 => { - f.debug_tuple("IpAddressFamily::Ipv4").finish() + impl Bucket { + #[allow(unused_unsafe, clippy::all)] + pub fn get(&self, key: &str) -> Result>, Error> { + unsafe { + #[cfg_attr(target_pointer_width = "64", repr(align(8)))] + #[cfg_attr(target_pointer_width = "32", repr(align(4)))] + struct RetArea( + [::core::mem::MaybeUninit< + u8, + >; 4 * ::core::mem::size_of::<*const u8>()], + ); + let mut ret_area = RetArea( + [::core::mem::MaybeUninit::uninit(); 4 + * ::core::mem::size_of::<*const u8>()], + ); + let vec0 = key; + let ptr0 = vec0.as_ptr().cast::(); + let len0 = vec0.len(); + let ptr1 = ret_area.0.as_mut_ptr().cast::(); + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "wasi:keyvalue/store@0.2.0-draft2")] + unsafe extern "C" { + #[link_name = "[method]bucket.get"] + fn wit_import2(_: i32, _: *mut u8, _: usize, _: *mut u8); } - IpAddressFamily::Ipv6 => { - f.debug_tuple("IpAddressFamily::Ipv6").finish() + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import2( + _: i32, + _: *mut u8, + _: usize, + _: *mut u8, + ) { + unreachable!() } + unsafe { + wit_import2( + (self).handle() as i32, + ptr0.cast_mut(), + len0, + ptr1, + ) + }; + let l3 = i32::from(*ptr1.add(0).cast::()); + let result13 = match l3 { + 0 => { + let e = { + let l4 = i32::from( + *ptr1.add(::core::mem::size_of::<*const u8>()).cast::(), + ); + match l4 { + 0 => None, + 1 => { + let e = { + let l5 = *ptr1 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l6 = *ptr1 + .add(3 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len7 = l6; + _rt::Vec::from_raw_parts(l5.cast(), len7, len7) + }; + Some(e) + } + _ => _rt::invalid_enum_discriminant(), + } + }; + Ok(e) + } + 1 => { + let e = { + let l8 = i32::from( + *ptr1.add(::core::mem::size_of::<*const u8>()).cast::(), + ); + let v12 = match l8 { + 0 => Error::NoSuchStore, + 1 => Error::AccessDenied, + n => { + debug_assert_eq!(n, 2, "invalid enum discriminant"); + let e12 = { + let l9 = *ptr1 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l10 = *ptr1 + .add(3 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len11 = l10; + let bytes11 = _rt::Vec::from_raw_parts( + l9.cast(), + len11, + len11, + ); + _rt::string_lift(bytes11) + }; + Error::Other(e12) + } + }; + v12 + }; + Err(e) + } + _ => _rt::invalid_enum_discriminant(), + }; + result13 } } } - impl IpAddressFamily { - #[doc(hidden)] - pub unsafe fn _lift(val: u8) -> IpAddressFamily { - if !cfg!(debug_assertions) { - return ::core::mem::transmute(val); - } - match val { - 0 => IpAddressFamily::Ipv4, - 1 => IpAddressFamily::Ipv6, - _ => panic!("invalid enum discriminant"), + impl Bucket { + #[allow(unused_unsafe, clippy::all)] + pub fn set(&self, key: &str, value: &[u8]) -> Result<(), Error> { + unsafe { + #[cfg_attr(target_pointer_width = "64", repr(align(8)))] + #[cfg_attr(target_pointer_width = "32", repr(align(4)))] + struct RetArea( + [::core::mem::MaybeUninit< + u8, + >; 4 * ::core::mem::size_of::<*const u8>()], + ); + let mut ret_area = RetArea( + [::core::mem::MaybeUninit::uninit(); 4 + * ::core::mem::size_of::<*const u8>()], + ); + let vec0 = key; + let ptr0 = vec0.as_ptr().cast::(); + let len0 = vec0.len(); + let vec1 = value; + let ptr1 = vec1.as_ptr().cast::(); + let len1 = vec1.len(); + let ptr2 = ret_area.0.as_mut_ptr().cast::(); + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "wasi:keyvalue/store@0.2.0-draft2")] + unsafe extern "C" { + #[link_name = "[method]bucket.set"] + fn wit_import3( + _: i32, + _: *mut u8, + _: usize, + _: *mut u8, + _: usize, + _: *mut u8, + ); + } + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import3( + _: i32, + _: *mut u8, + _: usize, + _: *mut u8, + _: usize, + _: *mut u8, + ) { + unreachable!() + } + unsafe { + wit_import3( + (self).handle() as i32, + ptr0.cast_mut(), + len0, + ptr1.cast_mut(), + len1, + ptr2, + ) + }; + let l4 = i32::from(*ptr2.add(0).cast::()); + let result10 = match l4 { + 0 => { + let e = (); + Ok(e) + } + 1 => { + let e = { + let l5 = i32::from( + *ptr2.add(::core::mem::size_of::<*const u8>()).cast::(), + ); + let v9 = match l5 { + 0 => Error::NoSuchStore, + 1 => Error::AccessDenied, + n => { + debug_assert_eq!(n, 2, "invalid enum discriminant"); + let e9 = { + let l6 = *ptr2 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l7 = *ptr2 + .add(3 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len8 = l7; + let bytes8 = _rt::Vec::from_raw_parts( + l6.cast(), + len8, + len8, + ); + _rt::string_lift(bytes8) + }; + Error::Other(e9) + } + }; + v9 + }; + Err(e) + } + _ => _rt::invalid_enum_discriminant(), + }; + result10 } } } - pub type Ipv4Address = (u8, u8, u8, u8); - pub type Ipv6Address = (u16, u16, u16, u16, u16, u16, u16, u16); - #[derive(Clone, Copy)] - pub enum IpAddress { - Ipv4(Ipv4Address), - Ipv6(Ipv6Address), - } - impl ::core::fmt::Debug for IpAddress { - fn fmt( - &self, - f: &mut ::core::fmt::Formatter<'_>, - ) -> ::core::fmt::Result { - match self { - IpAddress::Ipv4(e) => { - f.debug_tuple("IpAddress::Ipv4").field(e).finish() + impl Bucket { + #[allow(unused_unsafe, clippy::all)] + pub fn delete(&self, key: &str) -> Result<(), Error> { + unsafe { + #[cfg_attr(target_pointer_width = "64", repr(align(8)))] + #[cfg_attr(target_pointer_width = "32", repr(align(4)))] + struct RetArea( + [::core::mem::MaybeUninit< + u8, + >; 4 * ::core::mem::size_of::<*const u8>()], + ); + let mut ret_area = RetArea( + [::core::mem::MaybeUninit::uninit(); 4 + * ::core::mem::size_of::<*const u8>()], + ); + let vec0 = key; + let ptr0 = vec0.as_ptr().cast::(); + let len0 = vec0.len(); + let ptr1 = ret_area.0.as_mut_ptr().cast::(); + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "wasi:keyvalue/store@0.2.0-draft2")] + unsafe extern "C" { + #[link_name = "[method]bucket.delete"] + fn wit_import2(_: i32, _: *mut u8, _: usize, _: *mut u8); } - IpAddress::Ipv6(e) => { - f.debug_tuple("IpAddress::Ipv6").field(e).finish() + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import2( + _: i32, + _: *mut u8, + _: usize, + _: *mut u8, + ) { + unreachable!() } + unsafe { + wit_import2( + (self).handle() as i32, + ptr0.cast_mut(), + len0, + ptr1, + ) + }; + let l3 = i32::from(*ptr1.add(0).cast::()); + let result9 = match l3 { + 0 => { + let e = (); + Ok(e) + } + 1 => { + let e = { + let l4 = i32::from( + *ptr1.add(::core::mem::size_of::<*const u8>()).cast::(), + ); + let v8 = match l4 { + 0 => Error::NoSuchStore, + 1 => Error::AccessDenied, + n => { + debug_assert_eq!(n, 2, "invalid enum discriminant"); + let e8 = { + let l5 = *ptr1 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l6 = *ptr1 + .add(3 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len7 = l6; + let bytes7 = _rt::Vec::from_raw_parts( + l5.cast(), + len7, + len7, + ); + _rt::string_lift(bytes7) + }; + Error::Other(e8) + } + }; + v8 + }; + Err(e) + } + _ => _rt::invalid_enum_discriminant(), + }; + result9 } } } - #[repr(C)] - #[derive(Clone, Copy)] - pub struct Ipv4SocketAddress { - pub port: u16, - pub address: Ipv4Address, - } - impl ::core::fmt::Debug for Ipv4SocketAddress { - fn fmt( - &self, - f: &mut ::core::fmt::Formatter<'_>, - ) -> ::core::fmt::Result { - f.debug_struct("Ipv4SocketAddress") - .field("port", &self.port) - .field("address", &self.address) - .finish() - } - } - #[repr(C)] - #[derive(Clone, Copy)] - pub struct Ipv6SocketAddress { - pub port: u16, - pub flow_info: u32, - pub address: Ipv6Address, - pub scope_id: u32, - } - impl ::core::fmt::Debug for Ipv6SocketAddress { - fn fmt( - &self, - f: &mut ::core::fmt::Formatter<'_>, - ) -> ::core::fmt::Result { - f.debug_struct("Ipv6SocketAddress") - .field("port", &self.port) - .field("flow-info", &self.flow_info) - .field("address", &self.address) - .field("scope-id", &self.scope_id) - .finish() - } - } - #[derive(Clone, Copy)] - pub enum IpSocketAddress { - Ipv4(Ipv4SocketAddress), - Ipv6(Ipv6SocketAddress), - } - impl ::core::fmt::Debug for IpSocketAddress { - fn fmt( - &self, - f: &mut ::core::fmt::Formatter<'_>, - ) -> ::core::fmt::Result { - match self { - IpSocketAddress::Ipv4(e) => { - f.debug_tuple("IpSocketAddress::Ipv4").field(e).finish() + impl Bucket { + #[allow(unused_unsafe, clippy::all)] + pub fn exists(&self, key: &str) -> Result { + unsafe { + #[cfg_attr(target_pointer_width = "64", repr(align(8)))] + #[cfg_attr(target_pointer_width = "32", repr(align(4)))] + struct RetArea( + [::core::mem::MaybeUninit< + u8, + >; 4 * ::core::mem::size_of::<*const u8>()], + ); + let mut ret_area = RetArea( + [::core::mem::MaybeUninit::uninit(); 4 + * ::core::mem::size_of::<*const u8>()], + ); + let vec0 = key; + let ptr0 = vec0.as_ptr().cast::(); + let len0 = vec0.len(); + let ptr1 = ret_area.0.as_mut_ptr().cast::(); + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "wasi:keyvalue/store@0.2.0-draft2")] + unsafe extern "C" { + #[link_name = "[method]bucket.exists"] + fn wit_import2(_: i32, _: *mut u8, _: usize, _: *mut u8); } - IpSocketAddress::Ipv6(e) => { - f.debug_tuple("IpSocketAddress::Ipv6").field(e).finish() + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import2( + _: i32, + _: *mut u8, + _: usize, + _: *mut u8, + ) { + unreachable!() } + unsafe { + wit_import2( + (self).handle() as i32, + ptr0.cast_mut(), + len0, + ptr1, + ) + }; + let l3 = i32::from(*ptr1.add(0).cast::()); + let result10 = match l3 { + 0 => { + let e = { + let l4 = i32::from( + *ptr1.add(::core::mem::size_of::<*const u8>()).cast::(), + ); + _rt::bool_lift(l4 as u8) + }; + Ok(e) + } + 1 => { + let e = { + let l5 = i32::from( + *ptr1.add(::core::mem::size_of::<*const u8>()).cast::(), + ); + let v9 = match l5 { + 0 => Error::NoSuchStore, + 1 => Error::AccessDenied, + n => { + debug_assert_eq!(n, 2, "invalid enum discriminant"); + let e9 = { + let l6 = *ptr1 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l7 = *ptr1 + .add(3 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len8 = l7; + let bytes8 = _rt::Vec::from_raw_parts( + l6.cast(), + len8, + len8, + ); + _rt::string_lift(bytes8) + }; + Error::Other(e9) + } + }; + v9 + }; + Err(e) + } + _ => _rt::invalid_enum_discriminant(), + }; + result10 } } } - } - #[allow(dead_code, async_fn_in_trait, unused_imports, clippy::all)] - pub mod instance_network { - #[used] - #[doc(hidden)] - static __FORCE_SECTION_REF: fn() = super::super::super::__link_custom_section_describing_imports; - pub type Network = super::super::super::wasi::sockets::network::Network; - #[allow(unused_unsafe, clippy::all)] - pub fn instance_network() -> Network { - unsafe { - #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:sockets/instance-network@0.2.0")] - unsafe extern "C" { - #[link_name = "instance-network"] - fn wit_import0() -> i32; - } - #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import0() -> i32 { - unreachable!() - } - let ret = unsafe { wit_import0() }; - unsafe { - super::super::super::wasi::sockets::network::Network::from_handle( - ret as u32, - ) - } - } - } - } - #[allow(dead_code, async_fn_in_trait, unused_imports, clippy::all)] - pub mod udp { - #[used] - #[doc(hidden)] - static __FORCE_SECTION_REF: fn() = super::super::super::__link_custom_section_describing_imports; - use super::super::super::_rt; - pub type Pollable = super::super::super::wasi::io::poll::Pollable; - pub type Network = super::super::super::wasi::sockets::network::Network; - pub type ErrorCode = super::super::super::wasi::sockets::network::ErrorCode; - pub type IpSocketAddress = super::super::super::wasi::sockets::network::IpSocketAddress; - pub type IpAddressFamily = super::super::super::wasi::sockets::network::IpAddressFamily; - #[derive(Clone)] - pub struct IncomingDatagram { - pub data: _rt::Vec, - pub remote_address: IpSocketAddress, - } - impl ::core::fmt::Debug for IncomingDatagram { - fn fmt( + impl Bucket { + #[allow(unused_unsafe, clippy::all)] + pub fn list_keys( &self, - f: &mut ::core::fmt::Formatter<'_>, - ) -> ::core::fmt::Result { - f.debug_struct("IncomingDatagram") - .field("data", &self.data) - .field("remote-address", &self.remote_address) - .finish() - } - } - #[derive(Clone)] - pub struct OutgoingDatagram { - pub data: _rt::Vec, - pub remote_address: Option, - } - impl ::core::fmt::Debug for OutgoingDatagram { - fn fmt( - &self, - f: &mut ::core::fmt::Formatter<'_>, - ) -> ::core::fmt::Result { - f.debug_struct("OutgoingDatagram") - .field("data", &self.data) - .field("remote-address", &self.remote_address) - .finish() - } - } - #[derive(Debug)] - #[repr(transparent)] - pub struct UdpSocket { - handle: _rt::Resource, - } - impl UdpSocket { - #[doc(hidden)] - pub unsafe fn from_handle(handle: u32) -> Self { - Self { - handle: unsafe { _rt::Resource::from_handle(handle) }, + cursor: Option<&str>, + ) -> Result { + unsafe { + #[cfg_attr(target_pointer_width = "64", repr(align(8)))] + #[cfg_attr(target_pointer_width = "32", repr(align(4)))] + struct RetArea( + [::core::mem::MaybeUninit< + u8, + >; 6 * ::core::mem::size_of::<*const u8>()], + ); + let mut ret_area = RetArea( + [::core::mem::MaybeUninit::uninit(); 6 + * ::core::mem::size_of::<*const u8>()], + ); + let (result1_0, result1_1, result1_2) = match cursor { + Some(e) => { + let vec0 = e; + let ptr0 = vec0.as_ptr().cast::(); + let len0 = vec0.len(); + (1i32, ptr0.cast_mut(), len0) + } + None => (0i32, ::core::ptr::null_mut(), 0usize), + }; + let ptr2 = ret_area.0.as_mut_ptr().cast::(); + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "wasi:keyvalue/store@0.2.0-draft2")] + unsafe extern "C" { + #[link_name = "[method]bucket.list-keys"] + fn wit_import3( + _: i32, + _: i32, + _: *mut u8, + _: usize, + _: *mut u8, + ); + } + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import3( + _: i32, + _: i32, + _: *mut u8, + _: usize, + _: *mut u8, + ) { + unreachable!() + } + unsafe { + wit_import3( + (self).handle() as i32, + result1_0, + result1_1, + result1_2, + ptr2, + ) + }; + let l4 = i32::from(*ptr2.add(0).cast::()); + let result20 = match l4 { + 0 => { + let e = { + let l5 = *ptr2 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l6 = *ptr2 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let base10 = l5; + let len10 = l6; + let mut result10 = _rt::Vec::with_capacity(len10); + for i in 0..len10 { + let base = base10 + .add(i * (2 * ::core::mem::size_of::<*const u8>())); + let e10 = { + let l7 = *base.add(0).cast::<*mut u8>(); + let l8 = *base + .add(::core::mem::size_of::<*const u8>()) + .cast::(); + let len9 = l8; + let bytes9 = _rt::Vec::from_raw_parts( + l7.cast(), + len9, + len9, + ); + _rt::string_lift(bytes9) + }; + result10.push(e10); + } + _rt::cabi_dealloc( + base10, + len10 * (2 * ::core::mem::size_of::<*const u8>()), + ::core::mem::size_of::<*const u8>(), + ); + let l11 = i32::from( + *ptr2 + .add(3 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + KeyResponse { + keys: result10, + cursor: match l11 { + 0 => None, + 1 => { + let e = { + let l12 = *ptr2 + .add(4 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l13 = *ptr2 + .add(5 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len14 = l13; + let bytes14 = _rt::Vec::from_raw_parts( + l12.cast(), + len14, + len14, + ); + _rt::string_lift(bytes14) + }; + Some(e) + } + _ => _rt::invalid_enum_discriminant(), + }, + } + }; + Ok(e) + } + 1 => { + let e = { + let l15 = i32::from( + *ptr2.add(::core::mem::size_of::<*const u8>()).cast::(), + ); + let v19 = match l15 { + 0 => Error::NoSuchStore, + 1 => Error::AccessDenied, + n => { + debug_assert_eq!(n, 2, "invalid enum discriminant"); + let e19 = { + let l16 = *ptr2 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l17 = *ptr2 + .add(3 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len18 = l17; + let bytes18 = _rt::Vec::from_raw_parts( + l16.cast(), + len18, + len18, + ); + _rt::string_lift(bytes18) + }; + Error::Other(e19) + } + }; + v19 + }; + Err(e) + } + _ => _rt::invalid_enum_discriminant(), + }; + result20 } } - #[doc(hidden)] - pub fn take_handle(&self) -> u32 { - _rt::Resource::take_handle(&self.handle) - } - #[doc(hidden)] - pub fn handle(&self) -> u32 { - _rt::Resource::handle(&self.handle) - } } - unsafe impl _rt::WasmResource for UdpSocket { - #[inline] - unsafe fn drop(_handle: u32) { - #[cfg(not(target_arch = "wasm32"))] - unreachable!(); + #[allow(unused_unsafe, clippy::all)] + pub fn open(identifier: &str) -> Result { + unsafe { + #[cfg_attr(target_pointer_width = "64", repr(align(8)))] + #[cfg_attr(target_pointer_width = "32", repr(align(4)))] + struct RetArea( + [::core::mem::MaybeUninit< + u8, + >; 4 * ::core::mem::size_of::<*const u8>()], + ); + let mut ret_area = RetArea( + [::core::mem::MaybeUninit::uninit(); 4 + * ::core::mem::size_of::<*const u8>()], + ); + let vec0 = identifier; + let ptr0 = vec0.as_ptr().cast::(); + let len0 = vec0.len(); + let ptr1 = ret_area.0.as_mut_ptr().cast::(); #[cfg(target_arch = "wasm32")] - { - #[link(wasm_import_module = "wasi:sockets/udp@0.2.0")] - unsafe extern "C" { - #[link_name = "[resource-drop]udp-socket"] - fn drop(_: u32); - } - unsafe { drop(_handle) }; + #[link(wasm_import_module = "wasi:keyvalue/store@0.2.0-draft2")] + unsafe extern "C" { + #[link_name = "open"] + fn wit_import2(_: *mut u8, _: usize, _: *mut u8); + } + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import2(_: *mut u8, _: usize, _: *mut u8) { + unreachable!() } + unsafe { wit_import2(ptr0.cast_mut(), len0, ptr1) }; + let l3 = i32::from(*ptr1.add(0).cast::()); + let result10 = match l3 { + 0 => { + let e = { + let l4 = *ptr1 + .add(::core::mem::size_of::<*const u8>()) + .cast::(); + unsafe { Bucket::from_handle(l4 as u32) } + }; + Ok(e) + } + 1 => { + let e = { + let l5 = i32::from( + *ptr1.add(::core::mem::size_of::<*const u8>()).cast::(), + ); + let v9 = match l5 { + 0 => Error::NoSuchStore, + 1 => Error::AccessDenied, + n => { + debug_assert_eq!(n, 2, "invalid enum discriminant"); + let e9 = { + let l6 = *ptr1 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l7 = *ptr1 + .add(3 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len8 = l7; + let bytes8 = _rt::Vec::from_raw_parts( + l6.cast(), + len8, + len8, + ); + _rt::string_lift(bytes8) + }; + Error::Other(e9) + } + }; + v9 + }; + Err(e) + } + _ => _rt::invalid_enum_discriminant(), + }; + result10 } } + } + #[allow(dead_code, async_fn_in_trait, unused_imports, clippy::all)] + pub mod atomics { + #[used] + #[doc(hidden)] + static __FORCE_SECTION_REF: fn() = super::super::super::__link_custom_section_describing_imports; + use super::super::super::_rt; + pub type Bucket = super::super::super::wasi::keyvalue::store::Bucket; + pub type Error = super::super::super::wasi::keyvalue::store::Error; #[derive(Debug)] #[repr(transparent)] - pub struct IncomingDatagramStream { - handle: _rt::Resource, + pub struct Cas { + handle: _rt::Resource, } - impl IncomingDatagramStream { + impl Cas { #[doc(hidden)] pub unsafe fn from_handle(handle: u32) -> Self { Self { @@ -11770,206 +12102,137 @@ pub mod wasi { _rt::Resource::handle(&self.handle) } } - unsafe impl _rt::WasmResource for IncomingDatagramStream { + unsafe impl _rt::WasmResource for Cas { #[inline] unsafe fn drop(_handle: u32) { #[cfg(not(target_arch = "wasm32"))] unreachable!(); #[cfg(target_arch = "wasm32")] { - #[link(wasm_import_module = "wasi:sockets/udp@0.2.0")] + #[link( + wasm_import_module = "wasi:keyvalue/atomics@0.2.0-draft2" + )] unsafe extern "C" { - #[link_name = "[resource-drop]incoming-datagram-stream"] + #[link_name = "[resource-drop]cas"] fn drop(_: u32); } unsafe { drop(_handle) }; } } } - #[derive(Debug)] - #[repr(transparent)] - pub struct OutgoingDatagramStream { - handle: _rt::Resource, + pub enum CasError { + StoreError(Error), + CasFailed(Cas), } - impl OutgoingDatagramStream { - #[doc(hidden)] - pub unsafe fn from_handle(handle: u32) -> Self { - Self { - handle: unsafe { _rt::Resource::from_handle(handle) }, + impl ::core::fmt::Debug for CasError { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + match self { + CasError::StoreError(e) => { + f.debug_tuple("CasError::StoreError").field(e).finish() + } + CasError::CasFailed(e) => { + f.debug_tuple("CasError::CasFailed").field(e).finish() + } } } - #[doc(hidden)] - pub fn take_handle(&self) -> u32 { - _rt::Resource::take_handle(&self.handle) - } - #[doc(hidden)] - pub fn handle(&self) -> u32 { - _rt::Resource::handle(&self.handle) - } } - unsafe impl _rt::WasmResource for OutgoingDatagramStream { - #[inline] - unsafe fn drop(_handle: u32) { - #[cfg(not(target_arch = "wasm32"))] - unreachable!(); - #[cfg(target_arch = "wasm32")] - { - #[link(wasm_import_module = "wasi:sockets/udp@0.2.0")] - unsafe extern "C" { - #[link_name = "[resource-drop]outgoing-datagram-stream"] - fn drop(_: u32); - } - unsafe { drop(_handle) }; - } + impl ::core::fmt::Display for CasError { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + write!(f, "{:?}", self) } } - impl UdpSocket { + impl std::error::Error for CasError {} + impl Cas { #[allow(unused_unsafe, clippy::all)] - pub fn start_bind( - &self, - network: &Network, - local_address: IpSocketAddress, - ) -> Result<(), ErrorCode> { + pub fn new(bucket: &Bucket, key: &str) -> Result { unsafe { - #[repr(align(1))] - struct RetArea([::core::mem::MaybeUninit; 2]); + #[cfg_attr(target_pointer_width = "64", repr(align(8)))] + #[cfg_attr(target_pointer_width = "32", repr(align(4)))] + struct RetArea( + [::core::mem::MaybeUninit< + u8, + >; 4 * ::core::mem::size_of::<*const u8>()], + ); let mut ret_area = RetArea( - [::core::mem::MaybeUninit::uninit(); 2], + [::core::mem::MaybeUninit::uninit(); 4 + * ::core::mem::size_of::<*const u8>()], ); - use super::super::super::wasi::sockets::network::IpSocketAddress as V4; - let ( - result5_0, - result5_1, - result5_2, - result5_3, - result5_4, - result5_5, - result5_6, - result5_7, - result5_8, - result5_9, - result5_10, - result5_11, - ) = match local_address { - V4::Ipv4(e) => { - let super::super::super::wasi::sockets::network::Ipv4SocketAddress { - port: port0, - address: address0, - } = e; - let (t1_0, t1_1, t1_2, t1_3) = address0; - ( - 0i32, - _rt::as_i32(port0), - _rt::as_i32(t1_0), - _rt::as_i32(t1_1), - _rt::as_i32(t1_2), - _rt::as_i32(t1_3), - 0i32, - 0i32, - 0i32, - 0i32, - 0i32, - 0i32, - ) - } - V4::Ipv6(e) => { - let super::super::super::wasi::sockets::network::Ipv6SocketAddress { - port: port2, - flow_info: flow_info2, - address: address2, - scope_id: scope_id2, - } = e; - let (t3_0, t3_1, t3_2, t3_3, t3_4, t3_5, t3_6, t3_7) = address2; - ( - 1i32, - _rt::as_i32(port2), - _rt::as_i32(flow_info2), - _rt::as_i32(t3_0), - _rt::as_i32(t3_1), - _rt::as_i32(t3_2), - _rt::as_i32(t3_3), - _rt::as_i32(t3_4), - _rt::as_i32(t3_5), - _rt::as_i32(t3_6), - _rt::as_i32(t3_7), - _rt::as_i32(scope_id2), - ) - } - }; - let ptr6 = ret_area.0.as_mut_ptr().cast::(); + let vec0 = key; + let ptr0 = vec0.as_ptr().cast::(); + let len0 = vec0.len(); + let ptr1 = ret_area.0.as_mut_ptr().cast::(); #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:sockets/udp@0.2.0")] + #[link( + wasm_import_module = "wasi:keyvalue/atomics@0.2.0-draft2" + )] unsafe extern "C" { - #[link_name = "[method]udp-socket.start-bind"] - fn wit_import7( - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: *mut u8, - ); + #[link_name = "[static]cas.new"] + fn wit_import2(_: i32, _: *mut u8, _: usize, _: *mut u8); } #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import7( - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, + unsafe extern "C" fn wit_import2( _: i32, _: *mut u8, + _: usize, + _: *mut u8, ) { unreachable!() } unsafe { - wit_import7( - (self).handle() as i32, - (network).handle() as i32, - result5_0, - result5_1, - result5_2, - result5_3, - result5_4, - result5_5, - result5_6, - result5_7, - result5_8, - result5_9, - result5_10, - result5_11, - ptr6, + wit_import2( + (bucket).handle() as i32, + ptr0.cast_mut(), + len0, + ptr1, ) }; - let l8 = i32::from(*ptr6.add(0).cast::()); - let result10 = match l8 { + let l3 = i32::from(*ptr1.add(0).cast::()); + let result10 = match l3 { 0 => { - let e = (); + let e = { + let l4 = *ptr1 + .add(::core::mem::size_of::<*const u8>()) + .cast::(); + unsafe { Cas::from_handle(l4 as u32) } + }; Ok(e) } 1 => { let e = { - let l9 = i32::from(*ptr6.add(1).cast::()); - super::super::super::wasi::sockets::network::ErrorCode::_lift( - l9 as u8, - ) + let l5 = i32::from( + *ptr1.add(::core::mem::size_of::<*const u8>()).cast::(), + ); + use super::super::super::wasi::keyvalue::store::Error as V9; + let v9 = match l5 { + 0 => V9::NoSuchStore, + 1 => V9::AccessDenied, + n => { + debug_assert_eq!(n, 2, "invalid enum discriminant"); + let e9 = { + let l6 = *ptr1 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l7 = *ptr1 + .add(3 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len8 = l7; + let bytes8 = _rt::Vec::from_raw_parts( + l6.cast(), + len8, + len8, + ); + _rt::string_lift(bytes8) + }; + V9::Other(e9) + } + }; + v9 }; Err(e) } @@ -11979,20 +12242,28 @@ pub mod wasi { } } } - impl UdpSocket { + impl Cas { #[allow(unused_unsafe, clippy::all)] - pub fn finish_bind(&self) -> Result<(), ErrorCode> { + pub fn current(&self) -> Result>, Error> { unsafe { - #[repr(align(1))] - struct RetArea([::core::mem::MaybeUninit; 2]); + #[cfg_attr(target_pointer_width = "64", repr(align(8)))] + #[cfg_attr(target_pointer_width = "32", repr(align(4)))] + struct RetArea( + [::core::mem::MaybeUninit< + u8, + >; 4 * ::core::mem::size_of::<*const u8>()], + ); let mut ret_area = RetArea( - [::core::mem::MaybeUninit::uninit(); 2], + [::core::mem::MaybeUninit::uninit(); 4 + * ::core::mem::size_of::<*const u8>()], ); let ptr0 = ret_area.0.as_mut_ptr().cast::(); #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:sockets/udp@0.2.0")] + #[link( + wasm_import_module = "wasi:keyvalue/atomics@0.2.0-draft2" + )] unsafe extern "C" { - #[link_name = "[method]udp-socket.finish-bind"] + #[link_name = "[method]cas.current"] fn wit_import1(_: i32, _: *mut u8); } #[cfg(not(target_arch = "wasm32"))] @@ -12001,557 +12272,3021 @@ pub mod wasi { } unsafe { wit_import1((self).handle() as i32, ptr0) }; let l2 = i32::from(*ptr0.add(0).cast::()); - let result4 = match l2 { + let result12 = match l2 { 0 => { - let e = (); + let e = { + let l3 = i32::from( + *ptr0.add(::core::mem::size_of::<*const u8>()).cast::(), + ); + match l3 { + 0 => None, + 1 => { + let e = { + let l4 = *ptr0 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l5 = *ptr0 + .add(3 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len6 = l5; + _rt::Vec::from_raw_parts(l4.cast(), len6, len6) + }; + Some(e) + } + _ => _rt::invalid_enum_discriminant(), + } + }; Ok(e) } 1 => { let e = { - let l3 = i32::from(*ptr0.add(1).cast::()); - super::super::super::wasi::sockets::network::ErrorCode::_lift( - l3 as u8, - ) + let l7 = i32::from( + *ptr0.add(::core::mem::size_of::<*const u8>()).cast::(), + ); + use super::super::super::wasi::keyvalue::store::Error as V11; + let v11 = match l7 { + 0 => V11::NoSuchStore, + 1 => V11::AccessDenied, + n => { + debug_assert_eq!(n, 2, "invalid enum discriminant"); + let e11 = { + let l8 = *ptr0 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l9 = *ptr0 + .add(3 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len10 = l9; + let bytes10 = _rt::Vec::from_raw_parts( + l8.cast(), + len10, + len10, + ); + _rt::string_lift(bytes10) + }; + V11::Other(e11) + } + }; + v11 }; Err(e) } _ => _rt::invalid_enum_discriminant(), }; - result4 + result12 } } } - impl UdpSocket { - #[allow(unused_unsafe, clippy::all)] - pub fn stream( - &self, - remote_address: Option, - ) -> Result< - (IncomingDatagramStream, OutgoingDatagramStream), - ErrorCode, - > { + #[allow(unused_unsafe, clippy::all)] + pub fn increment( + bucket: &Bucket, + key: &str, + delta: i64, + ) -> Result { + unsafe { + #[repr(align(8))] + struct RetArea( + [::core::mem::MaybeUninit< + u8, + >; 16 + 2 * ::core::mem::size_of::<*const u8>()], + ); + let mut ret_area = RetArea( + [::core::mem::MaybeUninit::uninit(); 16 + + 2 * ::core::mem::size_of::<*const u8>()], + ); + let vec0 = key; + let ptr0 = vec0.as_ptr().cast::(); + let len0 = vec0.len(); + let ptr1 = ret_area.0.as_mut_ptr().cast::(); + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "wasi:keyvalue/atomics@0.2.0-draft2")] + unsafe extern "C" { + #[link_name = "increment"] + fn wit_import2(_: i32, _: *mut u8, _: usize, _: i64, _: *mut u8); + } + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import2( + _: i32, + _: *mut u8, + _: usize, + _: i64, + _: *mut u8, + ) { + unreachable!() + } unsafe { - #[repr(align(4))] - struct RetArea([::core::mem::MaybeUninit; 12]); - let mut ret_area = RetArea( - [::core::mem::MaybeUninit::uninit(); 12], - ); - let ( - result6_0, - result6_1, - result6_2, - result6_3, - result6_4, - result6_5, - result6_6, - result6_7, - result6_8, - result6_9, - result6_10, - result6_11, - result6_12, - ) = match remote_address { - Some(e) => { - use super::super::super::wasi::sockets::network::IpSocketAddress as V4; - let ( - result5_0, - result5_1, - result5_2, - result5_3, - result5_4, - result5_5, - result5_6, - result5_7, - result5_8, - result5_9, - result5_10, - result5_11, - ) = match e { - V4::Ipv4(e) => { - let super::super::super::wasi::sockets::network::Ipv4SocketAddress { - port: port0, - address: address0, - } = e; - let (t1_0, t1_1, t1_2, t1_3) = address0; - ( - 0i32, - _rt::as_i32(port0), - _rt::as_i32(t1_0), - _rt::as_i32(t1_1), - _rt::as_i32(t1_2), - _rt::as_i32(t1_3), - 0i32, - 0i32, - 0i32, - 0i32, - 0i32, - 0i32, - ) + wit_import2( + (bucket).handle() as i32, + ptr0.cast_mut(), + len0, + _rt::as_i64(&delta), + ptr1, + ) + }; + let l3 = i32::from(*ptr1.add(0).cast::()); + let result10 = match l3 { + 0 => { + let e = { + let l4 = *ptr1.add(8).cast::(); + l4 + }; + Ok(e) + } + 1 => { + let e = { + let l5 = i32::from(*ptr1.add(8).cast::()); + use super::super::super::wasi::keyvalue::store::Error as V9; + let v9 = match l5 { + 0 => V9::NoSuchStore, + 1 => V9::AccessDenied, + n => { + debug_assert_eq!(n, 2, "invalid enum discriminant"); + let e9 = { + let l6 = *ptr1 + .add(8 + 1 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l7 = *ptr1 + .add(8 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len8 = l7; + let bytes8 = _rt::Vec::from_raw_parts( + l6.cast(), + len8, + len8, + ); + _rt::string_lift(bytes8) + }; + V9::Other(e9) } - V4::Ipv6(e) => { - let super::super::super::wasi::sockets::network::Ipv6SocketAddress { - port: port2, - flow_info: flow_info2, - address: address2, - scope_id: scope_id2, - } = e; - let (t3_0, t3_1, t3_2, t3_3, t3_4, t3_5, t3_6, t3_7) = address2; - ( - 1i32, - _rt::as_i32(port2), - _rt::as_i32(flow_info2), - _rt::as_i32(t3_0), - _rt::as_i32(t3_1), - _rt::as_i32(t3_2), - _rt::as_i32(t3_3), - _rt::as_i32(t3_4), - _rt::as_i32(t3_5), - _rt::as_i32(t3_6), - _rt::as_i32(t3_7), - _rt::as_i32(scope_id2), - ) + }; + v9 + }; + Err(e) + } + _ => _rt::invalid_enum_discriminant(), + }; + result10 + } + } + #[allow(unused_unsafe, clippy::all)] + pub fn swap(cas: Cas, value: &[u8]) -> Result<(), CasError> { + unsafe { + #[cfg_attr(target_pointer_width = "64", repr(align(8)))] + #[cfg_attr(target_pointer_width = "32", repr(align(4)))] + struct RetArea( + [::core::mem::MaybeUninit< + u8, + >; 5 * ::core::mem::size_of::<*const u8>()], + ); + let mut ret_area = RetArea( + [::core::mem::MaybeUninit::uninit(); 5 + * ::core::mem::size_of::<*const u8>()], + ); + let vec0 = value; + let ptr0 = vec0.as_ptr().cast::(); + let len0 = vec0.len(); + let ptr1 = ret_area.0.as_mut_ptr().cast::(); + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "wasi:keyvalue/atomics@0.2.0-draft2")] + unsafe extern "C" { + #[link_name = "swap"] + fn wit_import2(_: i32, _: *mut u8, _: usize, _: *mut u8); + } + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import2( + _: i32, + _: *mut u8, + _: usize, + _: *mut u8, + ) { + unreachable!() + } + unsafe { + wit_import2( + (&cas).take_handle() as i32, + ptr0.cast_mut(), + len0, + ptr1, + ) + }; + let l3 = i32::from(*ptr1.add(0).cast::()); + let result12 = match l3 { + 0 => { + let e = (); + Ok(e) + } + 1 => { + let e = { + let l4 = i32::from( + *ptr1.add(::core::mem::size_of::<*const u8>()).cast::(), + ); + let v11 = match l4 { + 0 => { + let e11 = { + let l5 = i32::from( + *ptr1 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + use super::super::super::wasi::keyvalue::store::Error as V9; + let v9 = match l5 { + 0 => V9::NoSuchStore, + 1 => V9::AccessDenied, + n => { + debug_assert_eq!(n, 2, "invalid enum discriminant"); + let e9 = { + let l6 = *ptr1 + .add(3 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l7 = *ptr1 + .add(4 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len8 = l7; + let bytes8 = _rt::Vec::from_raw_parts( + l6.cast(), + len8, + len8, + ); + _rt::string_lift(bytes8) + }; + V9::Other(e9) + } + }; + v9 + }; + CasError::StoreError(e11) + } + n => { + debug_assert_eq!(n, 1, "invalid enum discriminant"); + let e11 = { + let l10 = *ptr1 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + unsafe { Cas::from_handle(l10 as u32) } + }; + CasError::CasFailed(e11) } }; - ( - 1i32, - result5_0, - result5_1, - result5_2, - result5_3, - result5_4, - result5_5, - result5_6, - result5_7, - result5_8, - result5_9, - result5_10, - result5_11, - ) + v11 + }; + Err(e) + } + _ => _rt::invalid_enum_discriminant(), + }; + result12 + } + } + } + #[allow(dead_code, async_fn_in_trait, unused_imports, clippy::all)] + pub mod batch { + #[used] + #[doc(hidden)] + static __FORCE_SECTION_REF: fn() = super::super::super::__link_custom_section_describing_imports; + use super::super::super::_rt; + pub type Bucket = super::super::super::wasi::keyvalue::store::Bucket; + pub type Error = super::super::super::wasi::keyvalue::store::Error; + #[allow(unused_unsafe, clippy::all)] + pub fn get_many( + bucket: &Bucket, + keys: &[_rt::String], + ) -> Result<_rt::Vec)>>, Error> { + unsafe { + #[cfg_attr(target_pointer_width = "64", repr(align(8)))] + #[cfg_attr(target_pointer_width = "32", repr(align(4)))] + struct RetArea( + [::core::mem::MaybeUninit< + u8, + >; 4 * ::core::mem::size_of::<*const u8>()], + ); + let mut ret_area = RetArea( + [::core::mem::MaybeUninit::uninit(); 4 + * ::core::mem::size_of::<*const u8>()], + ); + let vec1 = keys; + let len1 = vec1.len(); + let layout1 = _rt::alloc::Layout::from_size_align_unchecked( + vec1.len() * (2 * ::core::mem::size_of::<*const u8>()), + ::core::mem::size_of::<*const u8>(), + ); + let result1 = if layout1.size() != 0 { + let ptr = _rt::alloc::alloc(layout1).cast::(); + if ptr.is_null() { + _rt::alloc::handle_alloc_error(layout1); + } + ptr + } else { + ::core::ptr::null_mut() + }; + for (i, e) in vec1.into_iter().enumerate() { + let base = result1 + .add(i * (2 * ::core::mem::size_of::<*const u8>())); + { + let vec0 = e; + let ptr0 = vec0.as_ptr().cast::(); + let len0 = vec0.len(); + *base + .add(::core::mem::size_of::<*const u8>()) + .cast::() = len0; + *base.add(0).cast::<*mut u8>() = ptr0.cast_mut(); + } + } + let ptr2 = ret_area.0.as_mut_ptr().cast::(); + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "wasi:keyvalue/batch@0.2.0-draft2")] + unsafe extern "C" { + #[link_name = "get-many"] + fn wit_import3(_: i32, _: *mut u8, _: usize, _: *mut u8); + } + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import3( + _: i32, + _: *mut u8, + _: usize, + _: *mut u8, + ) { + unreachable!() + } + unsafe { + wit_import3((bucket).handle() as i32, result1, len1, ptr2) + }; + let l4 = i32::from(*ptr2.add(0).cast::()); + let result20 = match l4 { + 0 => { + let e = { + let l5 = *ptr2 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l6 = *ptr2 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let base14 = l5; + let len14 = l6; + let mut result14 = _rt::Vec::with_capacity(len14); + for i in 0..len14 { + let base = base14 + .add(i * (5 * ::core::mem::size_of::<*const u8>())); + let e14 = { + let l7 = i32::from(*base.add(0).cast::()); + match l7 { + 0 => None, + 1 => { + let e = { + let l8 = *base + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l9 = *base + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len10 = l9; + let bytes10 = _rt::Vec::from_raw_parts( + l8.cast(), + len10, + len10, + ); + let l11 = *base + .add(3 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l12 = *base + .add(4 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len13 = l12; + ( + _rt::string_lift(bytes10), + _rt::Vec::from_raw_parts(l11.cast(), len13, len13), + ) + }; + Some(e) + } + _ => _rt::invalid_enum_discriminant(), + } + }; + result14.push(e14); + } + _rt::cabi_dealloc( + base14, + len14 * (5 * ::core::mem::size_of::<*const u8>()), + ::core::mem::size_of::<*const u8>(), + ); + result14 + }; + Ok(e) + } + 1 => { + let e = { + let l15 = i32::from( + *ptr2.add(::core::mem::size_of::<*const u8>()).cast::(), + ); + use super::super::super::wasi::keyvalue::store::Error as V19; + let v19 = match l15 { + 0 => V19::NoSuchStore, + 1 => V19::AccessDenied, + n => { + debug_assert_eq!(n, 2, "invalid enum discriminant"); + let e19 = { + let l16 = *ptr2 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l17 = *ptr2 + .add(3 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len18 = l17; + let bytes18 = _rt::Vec::from_raw_parts( + l16.cast(), + len18, + len18, + ); + _rt::string_lift(bytes18) + }; + V19::Other(e19) + } + }; + v19 + }; + Err(e) + } + _ => _rt::invalid_enum_discriminant(), + }; + if layout1.size() != 0 { + _rt::alloc::dealloc(result1.cast(), layout1); + } + result20 + } + } + #[allow(unused_unsafe, clippy::all)] + pub fn set_many( + bucket: &Bucket, + key_values: &[(_rt::String, _rt::Vec)], + ) -> Result<(), Error> { + unsafe { + #[cfg_attr(target_pointer_width = "64", repr(align(8)))] + #[cfg_attr(target_pointer_width = "32", repr(align(4)))] + struct RetArea( + [::core::mem::MaybeUninit< + u8, + >; 4 * ::core::mem::size_of::<*const u8>()], + ); + let mut ret_area = RetArea( + [::core::mem::MaybeUninit::uninit(); 4 + * ::core::mem::size_of::<*const u8>()], + ); + let vec3 = key_values; + let len3 = vec3.len(); + let layout3 = _rt::alloc::Layout::from_size_align_unchecked( + vec3.len() * (4 * ::core::mem::size_of::<*const u8>()), + ::core::mem::size_of::<*const u8>(), + ); + let result3 = if layout3.size() != 0 { + let ptr = _rt::alloc::alloc(layout3).cast::(); + if ptr.is_null() { + _rt::alloc::handle_alloc_error(layout3); + } + ptr + } else { + ::core::ptr::null_mut() + }; + for (i, e) in vec3.into_iter().enumerate() { + let base = result3 + .add(i * (4 * ::core::mem::size_of::<*const u8>())); + { + let (t0_0, t0_1) = e; + let vec1 = t0_0; + let ptr1 = vec1.as_ptr().cast::(); + let len1 = vec1.len(); + *base + .add(::core::mem::size_of::<*const u8>()) + .cast::() = len1; + *base.add(0).cast::<*mut u8>() = ptr1.cast_mut(); + let vec2 = t0_1; + let ptr2 = vec2.as_ptr().cast::(); + let len2 = vec2.len(); + *base + .add(3 * ::core::mem::size_of::<*const u8>()) + .cast::() = len2; + *base + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>() = ptr2.cast_mut(); + } + } + let ptr4 = ret_area.0.as_mut_ptr().cast::(); + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "wasi:keyvalue/batch@0.2.0-draft2")] + unsafe extern "C" { + #[link_name = "set-many"] + fn wit_import5(_: i32, _: *mut u8, _: usize, _: *mut u8); + } + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import5( + _: i32, + _: *mut u8, + _: usize, + _: *mut u8, + ) { + unreachable!() + } + unsafe { + wit_import5((bucket).handle() as i32, result3, len3, ptr4) + }; + let l6 = i32::from(*ptr4.add(0).cast::()); + let result12 = match l6 { + 0 => { + let e = (); + Ok(e) + } + 1 => { + let e = { + let l7 = i32::from( + *ptr4.add(::core::mem::size_of::<*const u8>()).cast::(), + ); + use super::super::super::wasi::keyvalue::store::Error as V11; + let v11 = match l7 { + 0 => V11::NoSuchStore, + 1 => V11::AccessDenied, + n => { + debug_assert_eq!(n, 2, "invalid enum discriminant"); + let e11 = { + let l8 = *ptr4 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l9 = *ptr4 + .add(3 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len10 = l9; + let bytes10 = _rt::Vec::from_raw_parts( + l8.cast(), + len10, + len10, + ); + _rt::string_lift(bytes10) + }; + V11::Other(e11) + } + }; + v11 + }; + Err(e) + } + _ => _rt::invalid_enum_discriminant(), + }; + if layout3.size() != 0 { + _rt::alloc::dealloc(result3.cast(), layout3); + } + result12 + } + } + #[allow(unused_unsafe, clippy::all)] + pub fn delete_many( + bucket: &Bucket, + keys: &[_rt::String], + ) -> Result<(), Error> { + unsafe { + #[cfg_attr(target_pointer_width = "64", repr(align(8)))] + #[cfg_attr(target_pointer_width = "32", repr(align(4)))] + struct RetArea( + [::core::mem::MaybeUninit< + u8, + >; 4 * ::core::mem::size_of::<*const u8>()], + ); + let mut ret_area = RetArea( + [::core::mem::MaybeUninit::uninit(); 4 + * ::core::mem::size_of::<*const u8>()], + ); + let vec1 = keys; + let len1 = vec1.len(); + let layout1 = _rt::alloc::Layout::from_size_align_unchecked( + vec1.len() * (2 * ::core::mem::size_of::<*const u8>()), + ::core::mem::size_of::<*const u8>(), + ); + let result1 = if layout1.size() != 0 { + let ptr = _rt::alloc::alloc(layout1).cast::(); + if ptr.is_null() { + _rt::alloc::handle_alloc_error(layout1); + } + ptr + } else { + ::core::ptr::null_mut() + }; + for (i, e) in vec1.into_iter().enumerate() { + let base = result1 + .add(i * (2 * ::core::mem::size_of::<*const u8>())); + { + let vec0 = e; + let ptr0 = vec0.as_ptr().cast::(); + let len0 = vec0.len(); + *base + .add(::core::mem::size_of::<*const u8>()) + .cast::() = len0; + *base.add(0).cast::<*mut u8>() = ptr0.cast_mut(); + } + } + let ptr2 = ret_area.0.as_mut_ptr().cast::(); + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "wasi:keyvalue/batch@0.2.0-draft2")] + unsafe extern "C" { + #[link_name = "delete-many"] + fn wit_import3(_: i32, _: *mut u8, _: usize, _: *mut u8); + } + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import3( + _: i32, + _: *mut u8, + _: usize, + _: *mut u8, + ) { + unreachable!() + } + unsafe { + wit_import3((bucket).handle() as i32, result1, len1, ptr2) + }; + let l4 = i32::from(*ptr2.add(0).cast::()); + let result10 = match l4 { + 0 => { + let e = (); + Ok(e) + } + 1 => { + let e = { + let l5 = i32::from( + *ptr2.add(::core::mem::size_of::<*const u8>()).cast::(), + ); + use super::super::super::wasi::keyvalue::store::Error as V9; + let v9 = match l5 { + 0 => V9::NoSuchStore, + 1 => V9::AccessDenied, + n => { + debug_assert_eq!(n, 2, "invalid enum discriminant"); + let e9 = { + let l6 = *ptr2 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l7 = *ptr2 + .add(3 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len8 = l7; + let bytes8 = _rt::Vec::from_raw_parts( + l6.cast(), + len8, + len8, + ); + _rt::string_lift(bytes8) + }; + V9::Other(e9) + } + }; + v9 + }; + Err(e) + } + _ => _rt::invalid_enum_discriminant(), + }; + if layout1.size() != 0 { + _rt::alloc::dealloc(result1.cast(), layout1); + } + result10 + } + } + } + } + pub mod random { + #[allow(dead_code, async_fn_in_trait, unused_imports, clippy::all)] + pub mod random { + #[used] + #[doc(hidden)] + static __FORCE_SECTION_REF: fn() = super::super::super::__link_custom_section_describing_imports; + use super::super::super::_rt; + #[allow(unused_unsafe, clippy::all)] + pub fn get_random_bytes(len: u64) -> _rt::Vec { + unsafe { + #[cfg_attr(target_pointer_width = "64", repr(align(8)))] + #[cfg_attr(target_pointer_width = "32", repr(align(4)))] + struct RetArea( + [::core::mem::MaybeUninit< + u8, + >; 2 * ::core::mem::size_of::<*const u8>()], + ); + let mut ret_area = RetArea( + [::core::mem::MaybeUninit::uninit(); 2 + * ::core::mem::size_of::<*const u8>()], + ); + let ptr0 = ret_area.0.as_mut_ptr().cast::(); + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "wasi:random/random@0.2.0")] + unsafe extern "C" { + #[link_name = "get-random-bytes"] + fn wit_import1(_: i64, _: *mut u8); + } + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import1(_: i64, _: *mut u8) { + unreachable!() + } + unsafe { wit_import1(_rt::as_i64(&len), ptr0) }; + let l2 = *ptr0.add(0).cast::<*mut u8>(); + let l3 = *ptr0 + .add(::core::mem::size_of::<*const u8>()) + .cast::(); + let len4 = l3; + let result5 = _rt::Vec::from_raw_parts(l2.cast(), len4, len4); + result5 + } + } + #[allow(unused_unsafe, clippy::all)] + pub fn get_random_u64() -> u64 { + unsafe { + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "wasi:random/random@0.2.0")] + unsafe extern "C" { + #[link_name = "get-random-u64"] + fn wit_import0() -> i64; + } + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import0() -> i64 { + unreachable!() + } + let ret = unsafe { wit_import0() }; + ret as u64 + } + } + } + #[allow(dead_code, async_fn_in_trait, unused_imports, clippy::all)] + pub mod insecure { + #[used] + #[doc(hidden)] + static __FORCE_SECTION_REF: fn() = super::super::super::__link_custom_section_describing_imports; + use super::super::super::_rt; + #[allow(unused_unsafe, clippy::all)] + pub fn get_insecure_random_bytes(len: u64) -> _rt::Vec { + unsafe { + #[cfg_attr(target_pointer_width = "64", repr(align(8)))] + #[cfg_attr(target_pointer_width = "32", repr(align(4)))] + struct RetArea( + [::core::mem::MaybeUninit< + u8, + >; 2 * ::core::mem::size_of::<*const u8>()], + ); + let mut ret_area = RetArea( + [::core::mem::MaybeUninit::uninit(); 2 + * ::core::mem::size_of::<*const u8>()], + ); + let ptr0 = ret_area.0.as_mut_ptr().cast::(); + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "wasi:random/insecure@0.2.0")] + unsafe extern "C" { + #[link_name = "get-insecure-random-bytes"] + fn wit_import1(_: i64, _: *mut u8); + } + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import1(_: i64, _: *mut u8) { + unreachable!() + } + unsafe { wit_import1(_rt::as_i64(&len), ptr0) }; + let l2 = *ptr0.add(0).cast::<*mut u8>(); + let l3 = *ptr0 + .add(::core::mem::size_of::<*const u8>()) + .cast::(); + let len4 = l3; + let result5 = _rt::Vec::from_raw_parts(l2.cast(), len4, len4); + result5 + } + } + #[allow(unused_unsafe, clippy::all)] + pub fn get_insecure_random_u64() -> u64 { + unsafe { + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "wasi:random/insecure@0.2.0")] + unsafe extern "C" { + #[link_name = "get-insecure-random-u64"] + fn wit_import0() -> i64; + } + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import0() -> i64 { + unreachable!() + } + let ret = unsafe { wit_import0() }; + ret as u64 + } + } + } + #[allow(dead_code, async_fn_in_trait, unused_imports, clippy::all)] + pub mod insecure_seed { + #[used] + #[doc(hidden)] + static __FORCE_SECTION_REF: fn() = super::super::super::__link_custom_section_describing_imports; + #[allow(unused_unsafe, clippy::all)] + pub fn insecure_seed() -> (u64, u64) { + unsafe { + #[repr(align(8))] + struct RetArea([::core::mem::MaybeUninit; 16]); + let mut ret_area = RetArea([::core::mem::MaybeUninit::uninit(); 16]); + let ptr0 = ret_area.0.as_mut_ptr().cast::(); + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "wasi:random/insecure-seed@0.2.0")] + unsafe extern "C" { + #[link_name = "insecure-seed"] + fn wit_import1(_: *mut u8); + } + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import1(_: *mut u8) { + unreachable!() + } + unsafe { wit_import1(ptr0) }; + let l2 = *ptr0.add(0).cast::(); + let l3 = *ptr0.add(8).cast::(); + let result4 = (l2 as u64, l3 as u64); + result4 + } + } + } + } + pub mod sockets { + #[allow(dead_code, async_fn_in_trait, unused_imports, clippy::all)] + pub mod network { + #[used] + #[doc(hidden)] + static __FORCE_SECTION_REF: fn() = super::super::super::__link_custom_section_describing_imports; + use super::super::super::_rt; + #[derive(Debug)] + #[repr(transparent)] + pub struct Network { + handle: _rt::Resource, + } + impl Network { + #[doc(hidden)] + pub unsafe fn from_handle(handle: u32) -> Self { + Self { + handle: unsafe { _rt::Resource::from_handle(handle) }, + } + } + #[doc(hidden)] + pub fn take_handle(&self) -> u32 { + _rt::Resource::take_handle(&self.handle) + } + #[doc(hidden)] + pub fn handle(&self) -> u32 { + _rt::Resource::handle(&self.handle) + } + } + unsafe impl _rt::WasmResource for Network { + #[inline] + unsafe fn drop(_handle: u32) { + #[cfg(not(target_arch = "wasm32"))] + unreachable!(); + #[cfg(target_arch = "wasm32")] + { + #[link(wasm_import_module = "wasi:sockets/network@0.2.0")] + unsafe extern "C" { + #[link_name = "[resource-drop]network"] + fn drop(_: u32); + } + unsafe { drop(_handle) }; + } + } + } + #[repr(u8)] + #[derive(Clone, Copy, Eq, Ord, PartialEq, PartialOrd)] + pub enum ErrorCode { + Unknown, + AccessDenied, + NotSupported, + InvalidArgument, + OutOfMemory, + Timeout, + ConcurrencyConflict, + NotInProgress, + WouldBlock, + InvalidState, + NewSocketLimit, + AddressNotBindable, + AddressInUse, + RemoteUnreachable, + ConnectionRefused, + ConnectionReset, + ConnectionAborted, + DatagramTooLarge, + NameUnresolvable, + TemporaryResolverFailure, + PermanentResolverFailure, + } + impl ErrorCode { + pub fn name(&self) -> &'static str { + match self { + ErrorCode::Unknown => "unknown", + ErrorCode::AccessDenied => "access-denied", + ErrorCode::NotSupported => "not-supported", + ErrorCode::InvalidArgument => "invalid-argument", + ErrorCode::OutOfMemory => "out-of-memory", + ErrorCode::Timeout => "timeout", + ErrorCode::ConcurrencyConflict => "concurrency-conflict", + ErrorCode::NotInProgress => "not-in-progress", + ErrorCode::WouldBlock => "would-block", + ErrorCode::InvalidState => "invalid-state", + ErrorCode::NewSocketLimit => "new-socket-limit", + ErrorCode::AddressNotBindable => "address-not-bindable", + ErrorCode::AddressInUse => "address-in-use", + ErrorCode::RemoteUnreachable => "remote-unreachable", + ErrorCode::ConnectionRefused => "connection-refused", + ErrorCode::ConnectionReset => "connection-reset", + ErrorCode::ConnectionAborted => "connection-aborted", + ErrorCode::DatagramTooLarge => "datagram-too-large", + ErrorCode::NameUnresolvable => "name-unresolvable", + ErrorCode::TemporaryResolverFailure => { + "temporary-resolver-failure" + } + ErrorCode::PermanentResolverFailure => { + "permanent-resolver-failure" + } + } + } + pub fn message(&self) -> &'static str { + match self { + ErrorCode::Unknown => "", + ErrorCode::AccessDenied => "", + ErrorCode::NotSupported => "", + ErrorCode::InvalidArgument => "", + ErrorCode::OutOfMemory => "", + ErrorCode::Timeout => "", + ErrorCode::ConcurrencyConflict => "", + ErrorCode::NotInProgress => "", + ErrorCode::WouldBlock => "", + ErrorCode::InvalidState => "", + ErrorCode::NewSocketLimit => "", + ErrorCode::AddressNotBindable => "", + ErrorCode::AddressInUse => "", + ErrorCode::RemoteUnreachable => "", + ErrorCode::ConnectionRefused => "", + ErrorCode::ConnectionReset => "", + ErrorCode::ConnectionAborted => "", + ErrorCode::DatagramTooLarge => "", + ErrorCode::NameUnresolvable => "", + ErrorCode::TemporaryResolverFailure => "", + ErrorCode::PermanentResolverFailure => "", + } + } + } + impl ::core::fmt::Debug for ErrorCode { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + f.debug_struct("ErrorCode") + .field("code", &(*self as i32)) + .field("name", &self.name()) + .field("message", &self.message()) + .finish() + } + } + impl ::core::fmt::Display for ErrorCode { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + write!(f, "{} (error {})", self.name(), * self as i32) + } + } + impl std::error::Error for ErrorCode {} + impl ErrorCode { + #[doc(hidden)] + pub unsafe fn _lift(val: u8) -> ErrorCode { + if !cfg!(debug_assertions) { + return ::core::mem::transmute(val); + } + match val { + 0 => ErrorCode::Unknown, + 1 => ErrorCode::AccessDenied, + 2 => ErrorCode::NotSupported, + 3 => ErrorCode::InvalidArgument, + 4 => ErrorCode::OutOfMemory, + 5 => ErrorCode::Timeout, + 6 => ErrorCode::ConcurrencyConflict, + 7 => ErrorCode::NotInProgress, + 8 => ErrorCode::WouldBlock, + 9 => ErrorCode::InvalidState, + 10 => ErrorCode::NewSocketLimit, + 11 => ErrorCode::AddressNotBindable, + 12 => ErrorCode::AddressInUse, + 13 => ErrorCode::RemoteUnreachable, + 14 => ErrorCode::ConnectionRefused, + 15 => ErrorCode::ConnectionReset, + 16 => ErrorCode::ConnectionAborted, + 17 => ErrorCode::DatagramTooLarge, + 18 => ErrorCode::NameUnresolvable, + 19 => ErrorCode::TemporaryResolverFailure, + 20 => ErrorCode::PermanentResolverFailure, + _ => panic!("invalid enum discriminant"), + } + } + } + #[repr(u8)] + #[derive(Clone, Copy, Eq, Ord, PartialEq, PartialOrd)] + pub enum IpAddressFamily { + Ipv4, + Ipv6, + } + impl ::core::fmt::Debug for IpAddressFamily { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + match self { + IpAddressFamily::Ipv4 => { + f.debug_tuple("IpAddressFamily::Ipv4").finish() + } + IpAddressFamily::Ipv6 => { + f.debug_tuple("IpAddressFamily::Ipv6").finish() + } + } + } + } + impl IpAddressFamily { + #[doc(hidden)] + pub unsafe fn _lift(val: u8) -> IpAddressFamily { + if !cfg!(debug_assertions) { + return ::core::mem::transmute(val); + } + match val { + 0 => IpAddressFamily::Ipv4, + 1 => IpAddressFamily::Ipv6, + _ => panic!("invalid enum discriminant"), + } + } + } + pub type Ipv4Address = (u8, u8, u8, u8); + pub type Ipv6Address = (u16, u16, u16, u16, u16, u16, u16, u16); + #[derive(Clone, Copy)] + pub enum IpAddress { + Ipv4(Ipv4Address), + Ipv6(Ipv6Address), + } + impl ::core::fmt::Debug for IpAddress { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + match self { + IpAddress::Ipv4(e) => { + f.debug_tuple("IpAddress::Ipv4").field(e).finish() + } + IpAddress::Ipv6(e) => { + f.debug_tuple("IpAddress::Ipv6").field(e).finish() + } + } + } + } + #[repr(C)] + #[derive(Clone, Copy)] + pub struct Ipv4SocketAddress { + pub port: u16, + pub address: Ipv4Address, + } + impl ::core::fmt::Debug for Ipv4SocketAddress { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + f.debug_struct("Ipv4SocketAddress") + .field("port", &self.port) + .field("address", &self.address) + .finish() + } + } + #[repr(C)] + #[derive(Clone, Copy)] + pub struct Ipv6SocketAddress { + pub port: u16, + pub flow_info: u32, + pub address: Ipv6Address, + pub scope_id: u32, + } + impl ::core::fmt::Debug for Ipv6SocketAddress { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + f.debug_struct("Ipv6SocketAddress") + .field("port", &self.port) + .field("flow-info", &self.flow_info) + .field("address", &self.address) + .field("scope-id", &self.scope_id) + .finish() + } + } + #[derive(Clone, Copy)] + pub enum IpSocketAddress { + Ipv4(Ipv4SocketAddress), + Ipv6(Ipv6SocketAddress), + } + impl ::core::fmt::Debug for IpSocketAddress { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + match self { + IpSocketAddress::Ipv4(e) => { + f.debug_tuple("IpSocketAddress::Ipv4").field(e).finish() + } + IpSocketAddress::Ipv6(e) => { + f.debug_tuple("IpSocketAddress::Ipv6").field(e).finish() + } + } + } + } + } + #[allow(dead_code, async_fn_in_trait, unused_imports, clippy::all)] + pub mod instance_network { + #[used] + #[doc(hidden)] + static __FORCE_SECTION_REF: fn() = super::super::super::__link_custom_section_describing_imports; + pub type Network = super::super::super::wasi::sockets::network::Network; + #[allow(unused_unsafe, clippy::all)] + pub fn instance_network() -> Network { + unsafe { + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "wasi:sockets/instance-network@0.2.0")] + unsafe extern "C" { + #[link_name = "instance-network"] + fn wit_import0() -> i32; + } + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import0() -> i32 { + unreachable!() + } + let ret = unsafe { wit_import0() }; + unsafe { + super::super::super::wasi::sockets::network::Network::from_handle( + ret as u32, + ) + } + } + } + } + #[allow(dead_code, async_fn_in_trait, unused_imports, clippy::all)] + pub mod udp { + #[used] + #[doc(hidden)] + static __FORCE_SECTION_REF: fn() = super::super::super::__link_custom_section_describing_imports; + use super::super::super::_rt; + pub type Pollable = super::super::super::wasi::io::poll::Pollable; + pub type Network = super::super::super::wasi::sockets::network::Network; + pub type ErrorCode = super::super::super::wasi::sockets::network::ErrorCode; + pub type IpSocketAddress = super::super::super::wasi::sockets::network::IpSocketAddress; + pub type IpAddressFamily = super::super::super::wasi::sockets::network::IpAddressFamily; + #[derive(Clone)] + pub struct IncomingDatagram { + pub data: _rt::Vec, + pub remote_address: IpSocketAddress, + } + impl ::core::fmt::Debug for IncomingDatagram { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + f.debug_struct("IncomingDatagram") + .field("data", &self.data) + .field("remote-address", &self.remote_address) + .finish() + } + } + #[derive(Clone)] + pub struct OutgoingDatagram { + pub data: _rt::Vec, + pub remote_address: Option, + } + impl ::core::fmt::Debug for OutgoingDatagram { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + f.debug_struct("OutgoingDatagram") + .field("data", &self.data) + .field("remote-address", &self.remote_address) + .finish() + } + } + #[derive(Debug)] + #[repr(transparent)] + pub struct UdpSocket { + handle: _rt::Resource, + } + impl UdpSocket { + #[doc(hidden)] + pub unsafe fn from_handle(handle: u32) -> Self { + Self { + handle: unsafe { _rt::Resource::from_handle(handle) }, + } + } + #[doc(hidden)] + pub fn take_handle(&self) -> u32 { + _rt::Resource::take_handle(&self.handle) + } + #[doc(hidden)] + pub fn handle(&self) -> u32 { + _rt::Resource::handle(&self.handle) + } + } + unsafe impl _rt::WasmResource for UdpSocket { + #[inline] + unsafe fn drop(_handle: u32) { + #[cfg(not(target_arch = "wasm32"))] + unreachable!(); + #[cfg(target_arch = "wasm32")] + { + #[link(wasm_import_module = "wasi:sockets/udp@0.2.0")] + unsafe extern "C" { + #[link_name = "[resource-drop]udp-socket"] + fn drop(_: u32); + } + unsafe { drop(_handle) }; + } + } + } + #[derive(Debug)] + #[repr(transparent)] + pub struct IncomingDatagramStream { + handle: _rt::Resource, + } + impl IncomingDatagramStream { + #[doc(hidden)] + pub unsafe fn from_handle(handle: u32) -> Self { + Self { + handle: unsafe { _rt::Resource::from_handle(handle) }, + } + } + #[doc(hidden)] + pub fn take_handle(&self) -> u32 { + _rt::Resource::take_handle(&self.handle) + } + #[doc(hidden)] + pub fn handle(&self) -> u32 { + _rt::Resource::handle(&self.handle) + } + } + unsafe impl _rt::WasmResource for IncomingDatagramStream { + #[inline] + unsafe fn drop(_handle: u32) { + #[cfg(not(target_arch = "wasm32"))] + unreachable!(); + #[cfg(target_arch = "wasm32")] + { + #[link(wasm_import_module = "wasi:sockets/udp@0.2.0")] + unsafe extern "C" { + #[link_name = "[resource-drop]incoming-datagram-stream"] + fn drop(_: u32); + } + unsafe { drop(_handle) }; + } + } + } + #[derive(Debug)] + #[repr(transparent)] + pub struct OutgoingDatagramStream { + handle: _rt::Resource, + } + impl OutgoingDatagramStream { + #[doc(hidden)] + pub unsafe fn from_handle(handle: u32) -> Self { + Self { + handle: unsafe { _rt::Resource::from_handle(handle) }, + } + } + #[doc(hidden)] + pub fn take_handle(&self) -> u32 { + _rt::Resource::take_handle(&self.handle) + } + #[doc(hidden)] + pub fn handle(&self) -> u32 { + _rt::Resource::handle(&self.handle) + } + } + unsafe impl _rt::WasmResource for OutgoingDatagramStream { + #[inline] + unsafe fn drop(_handle: u32) { + #[cfg(not(target_arch = "wasm32"))] + unreachable!(); + #[cfg(target_arch = "wasm32")] + { + #[link(wasm_import_module = "wasi:sockets/udp@0.2.0")] + unsafe extern "C" { + #[link_name = "[resource-drop]outgoing-datagram-stream"] + fn drop(_: u32); + } + unsafe { drop(_handle) }; + } + } + } + impl UdpSocket { + #[allow(unused_unsafe, clippy::all)] + pub fn start_bind( + &self, + network: &Network, + local_address: IpSocketAddress, + ) -> Result<(), ErrorCode> { + unsafe { + #[repr(align(1))] + struct RetArea([::core::mem::MaybeUninit; 2]); + let mut ret_area = RetArea( + [::core::mem::MaybeUninit::uninit(); 2], + ); + use super::super::super::wasi::sockets::network::IpSocketAddress as V4; + let ( + result5_0, + result5_1, + result5_2, + result5_3, + result5_4, + result5_5, + result5_6, + result5_7, + result5_8, + result5_9, + result5_10, + result5_11, + ) = match local_address { + V4::Ipv4(e) => { + let super::super::super::wasi::sockets::network::Ipv4SocketAddress { + port: port0, + address: address0, + } = e; + let (t1_0, t1_1, t1_2, t1_3) = address0; + ( + 0i32, + _rt::as_i32(port0), + _rt::as_i32(t1_0), + _rt::as_i32(t1_1), + _rt::as_i32(t1_2), + _rt::as_i32(t1_3), + 0i32, + 0i32, + 0i32, + 0i32, + 0i32, + 0i32, + ) + } + V4::Ipv6(e) => { + let super::super::super::wasi::sockets::network::Ipv6SocketAddress { + port: port2, + flow_info: flow_info2, + address: address2, + scope_id: scope_id2, + } = e; + let (t3_0, t3_1, t3_2, t3_3, t3_4, t3_5, t3_6, t3_7) = address2; + ( + 1i32, + _rt::as_i32(port2), + _rt::as_i32(flow_info2), + _rt::as_i32(t3_0), + _rt::as_i32(t3_1), + _rt::as_i32(t3_2), + _rt::as_i32(t3_3), + _rt::as_i32(t3_4), + _rt::as_i32(t3_5), + _rt::as_i32(t3_6), + _rt::as_i32(t3_7), + _rt::as_i32(scope_id2), + ) + } + }; + let ptr6 = ret_area.0.as_mut_ptr().cast::(); + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "wasi:sockets/udp@0.2.0")] + unsafe extern "C" { + #[link_name = "[method]udp-socket.start-bind"] + fn wit_import7( + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: *mut u8, + ); + } + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import7( + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: *mut u8, + ) { + unreachable!() + } + unsafe { + wit_import7( + (self).handle() as i32, + (network).handle() as i32, + result5_0, + result5_1, + result5_2, + result5_3, + result5_4, + result5_5, + result5_6, + result5_7, + result5_8, + result5_9, + result5_10, + result5_11, + ptr6, + ) + }; + let l8 = i32::from(*ptr6.add(0).cast::()); + let result10 = match l8 { + 0 => { + let e = (); + Ok(e) + } + 1 => { + let e = { + let l9 = i32::from(*ptr6.add(1).cast::()); + super::super::super::wasi::sockets::network::ErrorCode::_lift( + l9 as u8, + ) + }; + Err(e) + } + _ => _rt::invalid_enum_discriminant(), + }; + result10 + } + } + } + impl UdpSocket { + #[allow(unused_unsafe, clippy::all)] + pub fn finish_bind(&self) -> Result<(), ErrorCode> { + unsafe { + #[repr(align(1))] + struct RetArea([::core::mem::MaybeUninit; 2]); + let mut ret_area = RetArea( + [::core::mem::MaybeUninit::uninit(); 2], + ); + let ptr0 = ret_area.0.as_mut_ptr().cast::(); + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "wasi:sockets/udp@0.2.0")] + unsafe extern "C" { + #[link_name = "[method]udp-socket.finish-bind"] + fn wit_import1(_: i32, _: *mut u8); + } + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import1(_: i32, _: *mut u8) { + unreachable!() + } + unsafe { wit_import1((self).handle() as i32, ptr0) }; + let l2 = i32::from(*ptr0.add(0).cast::()); + let result4 = match l2 { + 0 => { + let e = (); + Ok(e) + } + 1 => { + let e = { + let l3 = i32::from(*ptr0.add(1).cast::()); + super::super::super::wasi::sockets::network::ErrorCode::_lift( + l3 as u8, + ) + }; + Err(e) + } + _ => _rt::invalid_enum_discriminant(), + }; + result4 + } + } + } + impl UdpSocket { + #[allow(unused_unsafe, clippy::all)] + pub fn stream( + &self, + remote_address: Option, + ) -> Result< + (IncomingDatagramStream, OutgoingDatagramStream), + ErrorCode, + > { + unsafe { + #[repr(align(4))] + struct RetArea([::core::mem::MaybeUninit; 12]); + let mut ret_area = RetArea( + [::core::mem::MaybeUninit::uninit(); 12], + ); + let ( + result6_0, + result6_1, + result6_2, + result6_3, + result6_4, + result6_5, + result6_6, + result6_7, + result6_8, + result6_9, + result6_10, + result6_11, + result6_12, + ) = match remote_address { + Some(e) => { + use super::super::super::wasi::sockets::network::IpSocketAddress as V4; + let ( + result5_0, + result5_1, + result5_2, + result5_3, + result5_4, + result5_5, + result5_6, + result5_7, + result5_8, + result5_9, + result5_10, + result5_11, + ) = match e { + V4::Ipv4(e) => { + let super::super::super::wasi::sockets::network::Ipv4SocketAddress { + port: port0, + address: address0, + } = e; + let (t1_0, t1_1, t1_2, t1_3) = address0; + ( + 0i32, + _rt::as_i32(port0), + _rt::as_i32(t1_0), + _rt::as_i32(t1_1), + _rt::as_i32(t1_2), + _rt::as_i32(t1_3), + 0i32, + 0i32, + 0i32, + 0i32, + 0i32, + 0i32, + ) + } + V4::Ipv6(e) => { + let super::super::super::wasi::sockets::network::Ipv6SocketAddress { + port: port2, + flow_info: flow_info2, + address: address2, + scope_id: scope_id2, + } = e; + let (t3_0, t3_1, t3_2, t3_3, t3_4, t3_5, t3_6, t3_7) = address2; + ( + 1i32, + _rt::as_i32(port2), + _rt::as_i32(flow_info2), + _rt::as_i32(t3_0), + _rt::as_i32(t3_1), + _rt::as_i32(t3_2), + _rt::as_i32(t3_3), + _rt::as_i32(t3_4), + _rt::as_i32(t3_5), + _rt::as_i32(t3_6), + _rt::as_i32(t3_7), + _rt::as_i32(scope_id2), + ) + } + }; + ( + 1i32, + result5_0, + result5_1, + result5_2, + result5_3, + result5_4, + result5_5, + result5_6, + result5_7, + result5_8, + result5_9, + result5_10, + result5_11, + ) + } + None => { + ( + 0i32, + 0i32, + 0i32, + 0i32, + 0i32, + 0i32, + 0i32, + 0i32, + 0i32, + 0i32, + 0i32, + 0i32, + 0i32, + ) + } + }; + let ptr7 = ret_area.0.as_mut_ptr().cast::(); + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "wasi:sockets/udp@0.2.0")] + unsafe extern "C" { + #[link_name = "[method]udp-socket.stream"] + fn wit_import8( + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: *mut u8, + ); + } + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import8( + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: *mut u8, + ) { + unreachable!() + } + unsafe { + wit_import8( + (self).handle() as i32, + result6_0, + result6_1, + result6_2, + result6_3, + result6_4, + result6_5, + result6_6, + result6_7, + result6_8, + result6_9, + result6_10, + result6_11, + result6_12, + ptr7, + ) + }; + let l9 = i32::from(*ptr7.add(0).cast::()); + let result13 = match l9 { + 0 => { + let e = { + let l10 = *ptr7.add(4).cast::(); + let l11 = *ptr7.add(8).cast::(); + ( + unsafe { IncomingDatagramStream::from_handle(l10 as u32) }, + unsafe { OutgoingDatagramStream::from_handle(l11 as u32) }, + ) + }; + Ok(e) + } + 1 => { + let e = { + let l12 = i32::from(*ptr7.add(4).cast::()); + super::super::super::wasi::sockets::network::ErrorCode::_lift( + l12 as u8, + ) + }; + Err(e) + } + _ => _rt::invalid_enum_discriminant(), + }; + result13 + } + } + } + impl UdpSocket { + #[allow(unused_unsafe, clippy::all)] + pub fn local_address(&self) -> Result { + unsafe { + #[repr(align(4))] + struct RetArea([::core::mem::MaybeUninit; 36]); + let mut ret_area = RetArea( + [::core::mem::MaybeUninit::uninit(); 36], + ); + let ptr0 = ret_area.0.as_mut_ptr().cast::(); + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "wasi:sockets/udp@0.2.0")] + unsafe extern "C" { + #[link_name = "[method]udp-socket.local-address"] + fn wit_import1(_: i32, _: *mut u8); + } + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import1(_: i32, _: *mut u8) { + unreachable!() + } + unsafe { wit_import1((self).handle() as i32, ptr0) }; + let l2 = i32::from(*ptr0.add(0).cast::()); + let result22 = match l2 { + 0 => { + let e = { + let l3 = i32::from(*ptr0.add(4).cast::()); + use super::super::super::wasi::sockets::network::IpSocketAddress as V20; + let v20 = match l3 { + 0 => { + let e20 = { + let l4 = i32::from(*ptr0.add(8).cast::()); + let l5 = i32::from(*ptr0.add(10).cast::()); + let l6 = i32::from(*ptr0.add(11).cast::()); + let l7 = i32::from(*ptr0.add(12).cast::()); + let l8 = i32::from(*ptr0.add(13).cast::()); + super::super::super::wasi::sockets::network::Ipv4SocketAddress { + port: l4 as u16, + address: (l5 as u8, l6 as u8, l7 as u8, l8 as u8), + } + }; + V20::Ipv4(e20) + } + n => { + debug_assert_eq!(n, 1, "invalid enum discriminant"); + let e20 = { + let l9 = i32::from(*ptr0.add(8).cast::()); + let l10 = *ptr0.add(12).cast::(); + let l11 = i32::from(*ptr0.add(16).cast::()); + let l12 = i32::from(*ptr0.add(18).cast::()); + let l13 = i32::from(*ptr0.add(20).cast::()); + let l14 = i32::from(*ptr0.add(22).cast::()); + let l15 = i32::from(*ptr0.add(24).cast::()); + let l16 = i32::from(*ptr0.add(26).cast::()); + let l17 = i32::from(*ptr0.add(28).cast::()); + let l18 = i32::from(*ptr0.add(30).cast::()); + let l19 = *ptr0.add(32).cast::(); + super::super::super::wasi::sockets::network::Ipv6SocketAddress { + port: l9 as u16, + flow_info: l10 as u32, + address: ( + l11 as u16, + l12 as u16, + l13 as u16, + l14 as u16, + l15 as u16, + l16 as u16, + l17 as u16, + l18 as u16, + ), + scope_id: l19 as u32, + } + }; + V20::Ipv6(e20) + } + }; + v20 + }; + Ok(e) + } + 1 => { + let e = { + let l21 = i32::from(*ptr0.add(4).cast::()); + super::super::super::wasi::sockets::network::ErrorCode::_lift( + l21 as u8, + ) + }; + Err(e) + } + _ => _rt::invalid_enum_discriminant(), + }; + result22 + } + } + } + impl UdpSocket { + #[allow(unused_unsafe, clippy::all)] + pub fn remote_address(&self) -> Result { + unsafe { + #[repr(align(4))] + struct RetArea([::core::mem::MaybeUninit; 36]); + let mut ret_area = RetArea( + [::core::mem::MaybeUninit::uninit(); 36], + ); + let ptr0 = ret_area.0.as_mut_ptr().cast::(); + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "wasi:sockets/udp@0.2.0")] + unsafe extern "C" { + #[link_name = "[method]udp-socket.remote-address"] + fn wit_import1(_: i32, _: *mut u8); + } + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import1(_: i32, _: *mut u8) { + unreachable!() + } + unsafe { wit_import1((self).handle() as i32, ptr0) }; + let l2 = i32::from(*ptr0.add(0).cast::()); + let result22 = match l2 { + 0 => { + let e = { + let l3 = i32::from(*ptr0.add(4).cast::()); + use super::super::super::wasi::sockets::network::IpSocketAddress as V20; + let v20 = match l3 { + 0 => { + let e20 = { + let l4 = i32::from(*ptr0.add(8).cast::()); + let l5 = i32::from(*ptr0.add(10).cast::()); + let l6 = i32::from(*ptr0.add(11).cast::()); + let l7 = i32::from(*ptr0.add(12).cast::()); + let l8 = i32::from(*ptr0.add(13).cast::()); + super::super::super::wasi::sockets::network::Ipv4SocketAddress { + port: l4 as u16, + address: (l5 as u8, l6 as u8, l7 as u8, l8 as u8), + } + }; + V20::Ipv4(e20) + } + n => { + debug_assert_eq!(n, 1, "invalid enum discriminant"); + let e20 = { + let l9 = i32::from(*ptr0.add(8).cast::()); + let l10 = *ptr0.add(12).cast::(); + let l11 = i32::from(*ptr0.add(16).cast::()); + let l12 = i32::from(*ptr0.add(18).cast::()); + let l13 = i32::from(*ptr0.add(20).cast::()); + let l14 = i32::from(*ptr0.add(22).cast::()); + let l15 = i32::from(*ptr0.add(24).cast::()); + let l16 = i32::from(*ptr0.add(26).cast::()); + let l17 = i32::from(*ptr0.add(28).cast::()); + let l18 = i32::from(*ptr0.add(30).cast::()); + let l19 = *ptr0.add(32).cast::(); + super::super::super::wasi::sockets::network::Ipv6SocketAddress { + port: l9 as u16, + flow_info: l10 as u32, + address: ( + l11 as u16, + l12 as u16, + l13 as u16, + l14 as u16, + l15 as u16, + l16 as u16, + l17 as u16, + l18 as u16, + ), + scope_id: l19 as u32, + } + }; + V20::Ipv6(e20) + } + }; + v20 + }; + Ok(e) + } + 1 => { + let e = { + let l21 = i32::from(*ptr0.add(4).cast::()); + super::super::super::wasi::sockets::network::ErrorCode::_lift( + l21 as u8, + ) + }; + Err(e) + } + _ => _rt::invalid_enum_discriminant(), + }; + result22 + } + } + } + impl UdpSocket { + #[allow(unused_unsafe, clippy::all)] + pub fn address_family(&self) -> IpAddressFamily { + unsafe { + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "wasi:sockets/udp@0.2.0")] + unsafe extern "C" { + #[link_name = "[method]udp-socket.address-family"] + fn wit_import0(_: i32) -> i32; + } + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import0(_: i32) -> i32 { + unreachable!() + } + let ret = unsafe { wit_import0((self).handle() as i32) }; + super::super::super::wasi::sockets::network::IpAddressFamily::_lift( + ret as u8, + ) + } + } + } + impl UdpSocket { + #[allow(unused_unsafe, clippy::all)] + pub fn unicast_hop_limit(&self) -> Result { + unsafe { + #[repr(align(1))] + struct RetArea([::core::mem::MaybeUninit; 2]); + let mut ret_area = RetArea( + [::core::mem::MaybeUninit::uninit(); 2], + ); + let ptr0 = ret_area.0.as_mut_ptr().cast::(); + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "wasi:sockets/udp@0.2.0")] + unsafe extern "C" { + #[link_name = "[method]udp-socket.unicast-hop-limit"] + fn wit_import1(_: i32, _: *mut u8); + } + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import1(_: i32, _: *mut u8) { + unreachable!() + } + unsafe { wit_import1((self).handle() as i32, ptr0) }; + let l2 = i32::from(*ptr0.add(0).cast::()); + let result5 = match l2 { + 0 => { + let e = { + let l3 = i32::from(*ptr0.add(1).cast::()); + l3 as u8 + }; + Ok(e) + } + 1 => { + let e = { + let l4 = i32::from(*ptr0.add(1).cast::()); + super::super::super::wasi::sockets::network::ErrorCode::_lift( + l4 as u8, + ) + }; + Err(e) + } + _ => _rt::invalid_enum_discriminant(), + }; + result5 + } + } + } + impl UdpSocket { + #[allow(unused_unsafe, clippy::all)] + pub fn set_unicast_hop_limit(&self, value: u8) -> Result<(), ErrorCode> { + unsafe { + #[repr(align(1))] + struct RetArea([::core::mem::MaybeUninit; 2]); + let mut ret_area = RetArea( + [::core::mem::MaybeUninit::uninit(); 2], + ); + let ptr0 = ret_area.0.as_mut_ptr().cast::(); + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "wasi:sockets/udp@0.2.0")] + unsafe extern "C" { + #[link_name = "[method]udp-socket.set-unicast-hop-limit"] + fn wit_import1(_: i32, _: i32, _: *mut u8); + } + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import1(_: i32, _: i32, _: *mut u8) { + unreachable!() + } + unsafe { + wit_import1( + (self).handle() as i32, + _rt::as_i32(&value), + ptr0, + ) + }; + let l2 = i32::from(*ptr0.add(0).cast::()); + let result4 = match l2 { + 0 => { + let e = (); + Ok(e) + } + 1 => { + let e = { + let l3 = i32::from(*ptr0.add(1).cast::()); + super::super::super::wasi::sockets::network::ErrorCode::_lift( + l3 as u8, + ) + }; + Err(e) + } + _ => _rt::invalid_enum_discriminant(), + }; + result4 + } + } + } + impl UdpSocket { + #[allow(unused_unsafe, clippy::all)] + pub fn receive_buffer_size(&self) -> Result { + unsafe { + #[repr(align(8))] + struct RetArea([::core::mem::MaybeUninit; 16]); + let mut ret_area = RetArea( + [::core::mem::MaybeUninit::uninit(); 16], + ); + let ptr0 = ret_area.0.as_mut_ptr().cast::(); + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "wasi:sockets/udp@0.2.0")] + unsafe extern "C" { + #[link_name = "[method]udp-socket.receive-buffer-size"] + fn wit_import1(_: i32, _: *mut u8); + } + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import1(_: i32, _: *mut u8) { + unreachable!() + } + unsafe { wit_import1((self).handle() as i32, ptr0) }; + let l2 = i32::from(*ptr0.add(0).cast::()); + let result5 = match l2 { + 0 => { + let e = { + let l3 = *ptr0.add(8).cast::(); + l3 as u64 + }; + Ok(e) + } + 1 => { + let e = { + let l4 = i32::from(*ptr0.add(8).cast::()); + super::super::super::wasi::sockets::network::ErrorCode::_lift( + l4 as u8, + ) + }; + Err(e) + } + _ => _rt::invalid_enum_discriminant(), + }; + result5 + } + } + } + impl UdpSocket { + #[allow(unused_unsafe, clippy::all)] + pub fn set_receive_buffer_size( + &self, + value: u64, + ) -> Result<(), ErrorCode> { + unsafe { + #[repr(align(1))] + struct RetArea([::core::mem::MaybeUninit; 2]); + let mut ret_area = RetArea( + [::core::mem::MaybeUninit::uninit(); 2], + ); + let ptr0 = ret_area.0.as_mut_ptr().cast::(); + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "wasi:sockets/udp@0.2.0")] + unsafe extern "C" { + #[link_name = "[method]udp-socket.set-receive-buffer-size"] + fn wit_import1(_: i32, _: i64, _: *mut u8); + } + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import1(_: i32, _: i64, _: *mut u8) { + unreachable!() + } + unsafe { + wit_import1( + (self).handle() as i32, + _rt::as_i64(&value), + ptr0, + ) + }; + let l2 = i32::from(*ptr0.add(0).cast::()); + let result4 = match l2 { + 0 => { + let e = (); + Ok(e) + } + 1 => { + let e = { + let l3 = i32::from(*ptr0.add(1).cast::()); + super::super::super::wasi::sockets::network::ErrorCode::_lift( + l3 as u8, + ) + }; + Err(e) + } + _ => _rt::invalid_enum_discriminant(), + }; + result4 + } + } + } + impl UdpSocket { + #[allow(unused_unsafe, clippy::all)] + pub fn send_buffer_size(&self) -> Result { + unsafe { + #[repr(align(8))] + struct RetArea([::core::mem::MaybeUninit; 16]); + let mut ret_area = RetArea( + [::core::mem::MaybeUninit::uninit(); 16], + ); + let ptr0 = ret_area.0.as_mut_ptr().cast::(); + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "wasi:sockets/udp@0.2.0")] + unsafe extern "C" { + #[link_name = "[method]udp-socket.send-buffer-size"] + fn wit_import1(_: i32, _: *mut u8); + } + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import1(_: i32, _: *mut u8) { + unreachable!() + } + unsafe { wit_import1((self).handle() as i32, ptr0) }; + let l2 = i32::from(*ptr0.add(0).cast::()); + let result5 = match l2 { + 0 => { + let e = { + let l3 = *ptr0.add(8).cast::(); + l3 as u64 + }; + Ok(e) + } + 1 => { + let e = { + let l4 = i32::from(*ptr0.add(8).cast::()); + super::super::super::wasi::sockets::network::ErrorCode::_lift( + l4 as u8, + ) + }; + Err(e) + } + _ => _rt::invalid_enum_discriminant(), + }; + result5 + } + } + } + impl UdpSocket { + #[allow(unused_unsafe, clippy::all)] + pub fn set_send_buffer_size(&self, value: u64) -> Result<(), ErrorCode> { + unsafe { + #[repr(align(1))] + struct RetArea([::core::mem::MaybeUninit; 2]); + let mut ret_area = RetArea( + [::core::mem::MaybeUninit::uninit(); 2], + ); + let ptr0 = ret_area.0.as_mut_ptr().cast::(); + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "wasi:sockets/udp@0.2.0")] + unsafe extern "C" { + #[link_name = "[method]udp-socket.set-send-buffer-size"] + fn wit_import1(_: i32, _: i64, _: *mut u8); + } + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import1(_: i32, _: i64, _: *mut u8) { + unreachable!() + } + unsafe { + wit_import1( + (self).handle() as i32, + _rt::as_i64(&value), + ptr0, + ) + }; + let l2 = i32::from(*ptr0.add(0).cast::()); + let result4 = match l2 { + 0 => { + let e = (); + Ok(e) + } + 1 => { + let e = { + let l3 = i32::from(*ptr0.add(1).cast::()); + super::super::super::wasi::sockets::network::ErrorCode::_lift( + l3 as u8, + ) + }; + Err(e) + } + _ => _rt::invalid_enum_discriminant(), + }; + result4 + } + } + } + impl UdpSocket { + #[allow(unused_unsafe, clippy::all)] + pub fn subscribe(&self) -> Pollable { + unsafe { + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "wasi:sockets/udp@0.2.0")] + unsafe extern "C" { + #[link_name = "[method]udp-socket.subscribe"] + fn wit_import0(_: i32) -> i32; + } + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import0(_: i32) -> i32 { + unreachable!() + } + let ret = unsafe { wit_import0((self).handle() as i32) }; + unsafe { + super::super::super::wasi::io::poll::Pollable::from_handle( + ret as u32, + ) + } + } + } + } + impl IncomingDatagramStream { + #[allow(unused_unsafe, clippy::all)] + pub fn receive( + &self, + max_results: u64, + ) -> Result<_rt::Vec, ErrorCode> { + unsafe { + #[cfg_attr(target_pointer_width = "64", repr(align(8)))] + #[cfg_attr(target_pointer_width = "32", repr(align(4)))] + struct RetArea( + [::core::mem::MaybeUninit< + u8, + >; 3 * ::core::mem::size_of::<*const u8>()], + ); + let mut ret_area = RetArea( + [::core::mem::MaybeUninit::uninit(); 3 + * ::core::mem::size_of::<*const u8>()], + ); + let ptr0 = ret_area.0.as_mut_ptr().cast::(); + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "wasi:sockets/udp@0.2.0")] + unsafe extern "C" { + #[link_name = "[method]incoming-datagram-stream.receive"] + fn wit_import1(_: i32, _: i64, _: *mut u8); + } + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import1(_: i32, _: i64, _: *mut u8) { + unreachable!() + } + unsafe { + wit_import1( + (self).handle() as i32, + _rt::as_i64(&max_results), + ptr0, + ) + }; + let l2 = i32::from(*ptr0.add(0).cast::()); + let result28 = match l2 { + 0 => { + let e = { + let l3 = *ptr0 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l4 = *ptr0 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let base26 = l3; + let len26 = l4; + let mut result26 = _rt::Vec::with_capacity(len26); + for i in 0..len26 { + let base = base26 + .add(i * (32 + 2 * ::core::mem::size_of::<*const u8>())); + let e26 = { + let l5 = *base.add(0).cast::<*mut u8>(); + let l6 = *base + .add(::core::mem::size_of::<*const u8>()) + .cast::(); + let len7 = l6; + let l8 = i32::from( + *base + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + use super::super::super::wasi::sockets::network::IpSocketAddress as V25; + let v25 = match l8 { + 0 => { + let e25 = { + let l9 = i32::from( + *base + .add(4 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + let l10 = i32::from( + *base + .add(6 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + let l11 = i32::from( + *base + .add(7 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + let l12 = i32::from( + *base + .add(8 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + let l13 = i32::from( + *base + .add(9 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + super::super::super::wasi::sockets::network::Ipv4SocketAddress { + port: l9 as u16, + address: (l10 as u8, l11 as u8, l12 as u8, l13 as u8), + } + }; + V25::Ipv4(e25) + } + n => { + debug_assert_eq!(n, 1, "invalid enum discriminant"); + let e25 = { + let l14 = i32::from( + *base + .add(4 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + let l15 = *base + .add(8 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let l16 = i32::from( + *base + .add(12 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + let l17 = i32::from( + *base + .add(14 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + let l18 = i32::from( + *base + .add(16 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + let l19 = i32::from( + *base + .add(18 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + let l20 = i32::from( + *base + .add(20 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + let l21 = i32::from( + *base + .add(22 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + let l22 = i32::from( + *base + .add(24 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + let l23 = i32::from( + *base + .add(26 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + let l24 = *base + .add(28 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + super::super::super::wasi::sockets::network::Ipv6SocketAddress { + port: l14 as u16, + flow_info: l15 as u32, + address: ( + l16 as u16, + l17 as u16, + l18 as u16, + l19 as u16, + l20 as u16, + l21 as u16, + l22 as u16, + l23 as u16, + ), + scope_id: l24 as u32, + } + }; + V25::Ipv6(e25) + } + }; + IncomingDatagram { + data: _rt::Vec::from_raw_parts(l5.cast(), len7, len7), + remote_address: v25, + } + }; + result26.push(e26); + } + _rt::cabi_dealloc( + base26, + len26 * (32 + 2 * ::core::mem::size_of::<*const u8>()), + ::core::mem::size_of::<*const u8>(), + ); + result26 + }; + Ok(e) } - None => { - ( - 0i32, - 0i32, - 0i32, - 0i32, - 0i32, - 0i32, - 0i32, - 0i32, - 0i32, - 0i32, - 0i32, - 0i32, - 0i32, - ) + 1 => { + let e = { + let l27 = i32::from( + *ptr0.add(::core::mem::size_of::<*const u8>()).cast::(), + ); + super::super::super::wasi::sockets::network::ErrorCode::_lift( + l27 as u8, + ) + }; + Err(e) } + _ => _rt::invalid_enum_discriminant(), }; - let ptr7 = ret_area.0.as_mut_ptr().cast::(); + result28 + } + } + } + impl IncomingDatagramStream { + #[allow(unused_unsafe, clippy::all)] + pub fn subscribe(&self) -> Pollable { + unsafe { #[cfg(target_arch = "wasm32")] #[link(wasm_import_module = "wasi:sockets/udp@0.2.0")] unsafe extern "C" { - #[link_name = "[method]udp-socket.stream"] - fn wit_import8( - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: *mut u8, - ); + #[link_name = "[method]incoming-datagram-stream.subscribe"] + fn wit_import0(_: i32) -> i32; } #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import8( - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: *mut u8, - ) { + unsafe extern "C" fn wit_import0(_: i32) -> i32 { unreachable!() } + let ret = unsafe { wit_import0((self).handle() as i32) }; unsafe { - wit_import8( - (self).handle() as i32, - result6_0, - result6_1, - result6_2, - result6_3, - result6_4, - result6_5, - result6_6, - result6_7, - result6_8, - result6_9, - result6_10, - result6_11, - result6_12, - ptr7, + super::super::super::wasi::io::poll::Pollable::from_handle( + ret as u32, ) - }; - let l9 = i32::from(*ptr7.add(0).cast::()); - let result13 = match l9 { + } + } + } + } + impl OutgoingDatagramStream { + #[allow(unused_unsafe, clippy::all)] + pub fn check_send(&self) -> Result { + unsafe { + #[repr(align(8))] + struct RetArea([::core::mem::MaybeUninit; 16]); + let mut ret_area = RetArea( + [::core::mem::MaybeUninit::uninit(); 16], + ); + let ptr0 = ret_area.0.as_mut_ptr().cast::(); + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "wasi:sockets/udp@0.2.0")] + unsafe extern "C" { + #[link_name = "[method]outgoing-datagram-stream.check-send"] + fn wit_import1(_: i32, _: *mut u8); + } + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import1(_: i32, _: *mut u8) { + unreachable!() + } + unsafe { wit_import1((self).handle() as i32, ptr0) }; + let l2 = i32::from(*ptr0.add(0).cast::()); + let result5 = match l2 { 0 => { let e = { - let l10 = *ptr7.add(4).cast::(); - let l11 = *ptr7.add(8).cast::(); - ( - unsafe { IncomingDatagramStream::from_handle(l10 as u32) }, - unsafe { OutgoingDatagramStream::from_handle(l11 as u32) }, - ) + let l3 = *ptr0.add(8).cast::(); + l3 as u64 }; Ok(e) } 1 => { let e = { - let l12 = i32::from(*ptr7.add(4).cast::()); + let l4 = i32::from(*ptr0.add(8).cast::()); super::super::super::wasi::sockets::network::ErrorCode::_lift( - l12 as u8, + l4 as u8, ) }; Err(e) } _ => _rt::invalid_enum_discriminant(), }; - result13 + result5 } } } - impl UdpSocket { + impl OutgoingDatagramStream { #[allow(unused_unsafe, clippy::all)] - pub fn local_address(&self) -> Result { + pub fn send( + &self, + datagrams: &[OutgoingDatagram], + ) -> Result { unsafe { - #[repr(align(4))] - struct RetArea([::core::mem::MaybeUninit; 36]); + #[repr(align(8))] + struct RetArea([::core::mem::MaybeUninit; 16]); let mut ret_area = RetArea( - [::core::mem::MaybeUninit::uninit(); 36], + [::core::mem::MaybeUninit::uninit(); 16], ); - let ptr0 = ret_area.0.as_mut_ptr().cast::(); + let vec7 = datagrams; + let len7 = vec7.len(); + let layout7 = _rt::alloc::Layout::from_size_align_unchecked( + vec7.len() * (32 + 3 * ::core::mem::size_of::<*const u8>()), + ::core::mem::size_of::<*const u8>(), + ); + let result7 = if layout7.size() != 0 { + let ptr = _rt::alloc::alloc(layout7).cast::(); + if ptr.is_null() { + _rt::alloc::handle_alloc_error(layout7); + } + ptr + } else { + ::core::ptr::null_mut() + }; + for (i, e) in vec7.into_iter().enumerate() { + let base = result7 + .add(i * (32 + 3 * ::core::mem::size_of::<*const u8>())); + { + let OutgoingDatagram { + data: data0, + remote_address: remote_address0, + } = e; + let vec1 = data0; + let ptr1 = vec1.as_ptr().cast::(); + let len1 = vec1.len(); + *base + .add(::core::mem::size_of::<*const u8>()) + .cast::() = len1; + *base.add(0).cast::<*mut u8>() = ptr1.cast_mut(); + match remote_address0 { + Some(e) => { + *base + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::() = (1i32) as u8; + use super::super::super::wasi::sockets::network::IpSocketAddress as V6; + match e { + V6::Ipv4(e) => { + *base + .add(4 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::() = (0i32) as u8; + let super::super::super::wasi::sockets::network::Ipv4SocketAddress { + port: port2, + address: address2, + } = e; + *base + .add(8 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::() = (_rt::as_i32(port2)) as u16; + let (t3_0, t3_1, t3_2, t3_3) = address2; + *base + .add(10 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::() = (_rt::as_i32(t3_0)) as u8; + *base + .add(11 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::() = (_rt::as_i32(t3_1)) as u8; + *base + .add(12 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::() = (_rt::as_i32(t3_2)) as u8; + *base + .add(13 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::() = (_rt::as_i32(t3_3)) as u8; + } + V6::Ipv6(e) => { + *base + .add(4 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::() = (1i32) as u8; + let super::super::super::wasi::sockets::network::Ipv6SocketAddress { + port: port4, + flow_info: flow_info4, + address: address4, + scope_id: scope_id4, + } = e; + *base + .add(8 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::() = (_rt::as_i32(port4)) as u16; + *base + .add(12 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::() = _rt::as_i32(flow_info4); + let (t5_0, t5_1, t5_2, t5_3, t5_4, t5_5, t5_6, t5_7) = address4; + *base + .add(16 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::() = (_rt::as_i32(t5_0)) as u16; + *base + .add(18 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::() = (_rt::as_i32(t5_1)) as u16; + *base + .add(20 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::() = (_rt::as_i32(t5_2)) as u16; + *base + .add(22 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::() = (_rt::as_i32(t5_3)) as u16; + *base + .add(24 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::() = (_rt::as_i32(t5_4)) as u16; + *base + .add(26 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::() = (_rt::as_i32(t5_5)) as u16; + *base + .add(28 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::() = (_rt::as_i32(t5_6)) as u16; + *base + .add(30 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::() = (_rt::as_i32(t5_7)) as u16; + *base + .add(32 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::() = _rt::as_i32(scope_id4); + } + } + } + None => { + *base + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::() = (0i32) as u8; + } + }; + } + } + let ptr8 = ret_area.0.as_mut_ptr().cast::(); #[cfg(target_arch = "wasm32")] #[link(wasm_import_module = "wasi:sockets/udp@0.2.0")] unsafe extern "C" { - #[link_name = "[method]udp-socket.local-address"] - fn wit_import1(_: i32, _: *mut u8); + #[link_name = "[method]outgoing-datagram-stream.send"] + fn wit_import9(_: i32, _: *mut u8, _: usize, _: *mut u8); } #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import1(_: i32, _: *mut u8) { + unsafe extern "C" fn wit_import9( + _: i32, + _: *mut u8, + _: usize, + _: *mut u8, + ) { unreachable!() } - unsafe { wit_import1((self).handle() as i32, ptr0) }; - let l2 = i32::from(*ptr0.add(0).cast::()); - let result22 = match l2 { + unsafe { + wit_import9((self).handle() as i32, result7, len7, ptr8) + }; + let l10 = i32::from(*ptr8.add(0).cast::()); + let result13 = match l10 { 0 => { let e = { - let l3 = i32::from(*ptr0.add(4).cast::()); - use super::super::super::wasi::sockets::network::IpSocketAddress as V20; - let v20 = match l3 { - 0 => { - let e20 = { - let l4 = i32::from(*ptr0.add(8).cast::()); - let l5 = i32::from(*ptr0.add(10).cast::()); - let l6 = i32::from(*ptr0.add(11).cast::()); - let l7 = i32::from(*ptr0.add(12).cast::()); - let l8 = i32::from(*ptr0.add(13).cast::()); - super::super::super::wasi::sockets::network::Ipv4SocketAddress { - port: l4 as u16, - address: (l5 as u8, l6 as u8, l7 as u8, l8 as u8), - } - }; - V20::Ipv4(e20) - } - n => { - debug_assert_eq!(n, 1, "invalid enum discriminant"); - let e20 = { - let l9 = i32::from(*ptr0.add(8).cast::()); - let l10 = *ptr0.add(12).cast::(); - let l11 = i32::from(*ptr0.add(16).cast::()); - let l12 = i32::from(*ptr0.add(18).cast::()); - let l13 = i32::from(*ptr0.add(20).cast::()); - let l14 = i32::from(*ptr0.add(22).cast::()); - let l15 = i32::from(*ptr0.add(24).cast::()); - let l16 = i32::from(*ptr0.add(26).cast::()); - let l17 = i32::from(*ptr0.add(28).cast::()); - let l18 = i32::from(*ptr0.add(30).cast::()); - let l19 = *ptr0.add(32).cast::(); - super::super::super::wasi::sockets::network::Ipv6SocketAddress { - port: l9 as u16, - flow_info: l10 as u32, - address: ( - l11 as u16, - l12 as u16, - l13 as u16, - l14 as u16, - l15 as u16, - l16 as u16, - l17 as u16, - l18 as u16, - ), - scope_id: l19 as u32, - } - }; - V20::Ipv6(e20) - } - }; - v20 + let l11 = *ptr8.add(8).cast::(); + l11 as u64 }; Ok(e) } 1 => { let e = { - let l21 = i32::from(*ptr0.add(4).cast::()); + let l12 = i32::from(*ptr8.add(8).cast::()); super::super::super::wasi::sockets::network::ErrorCode::_lift( - l21 as u8, + l12 as u8, ) }; Err(e) } _ => _rt::invalid_enum_discriminant(), }; - result22 + if layout7.size() != 0 { + _rt::alloc::dealloc(result7.cast(), layout7); + } + result13 } } } - impl UdpSocket { + impl OutgoingDatagramStream { #[allow(unused_unsafe, clippy::all)] - pub fn remote_address(&self) -> Result { + pub fn subscribe(&self) -> Pollable { unsafe { - #[repr(align(4))] - struct RetArea([::core::mem::MaybeUninit; 36]); - let mut ret_area = RetArea( - [::core::mem::MaybeUninit::uninit(); 36], - ); - let ptr0 = ret_area.0.as_mut_ptr().cast::(); #[cfg(target_arch = "wasm32")] #[link(wasm_import_module = "wasi:sockets/udp@0.2.0")] unsafe extern "C" { - #[link_name = "[method]udp-socket.remote-address"] - fn wit_import1(_: i32, _: *mut u8); + #[link_name = "[method]outgoing-datagram-stream.subscribe"] + fn wit_import0(_: i32) -> i32; } #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import1(_: i32, _: *mut u8) { + unsafe extern "C" fn wit_import0(_: i32) -> i32 { unreachable!() } - unsafe { wit_import1((self).handle() as i32, ptr0) }; - let l2 = i32::from(*ptr0.add(0).cast::()); - let result22 = match l2 { - 0 => { - let e = { - let l3 = i32::from(*ptr0.add(4).cast::()); - use super::super::super::wasi::sockets::network::IpSocketAddress as V20; - let v20 = match l3 { - 0 => { - let e20 = { - let l4 = i32::from(*ptr0.add(8).cast::()); - let l5 = i32::from(*ptr0.add(10).cast::()); - let l6 = i32::from(*ptr0.add(11).cast::()); - let l7 = i32::from(*ptr0.add(12).cast::()); - let l8 = i32::from(*ptr0.add(13).cast::()); - super::super::super::wasi::sockets::network::Ipv4SocketAddress { - port: l4 as u16, - address: (l5 as u8, l6 as u8, l7 as u8, l8 as u8), - } - }; - V20::Ipv4(e20) - } - n => { - debug_assert_eq!(n, 1, "invalid enum discriminant"); - let e20 = { - let l9 = i32::from(*ptr0.add(8).cast::()); - let l10 = *ptr0.add(12).cast::(); - let l11 = i32::from(*ptr0.add(16).cast::()); - let l12 = i32::from(*ptr0.add(18).cast::()); - let l13 = i32::from(*ptr0.add(20).cast::()); - let l14 = i32::from(*ptr0.add(22).cast::()); - let l15 = i32::from(*ptr0.add(24).cast::()); - let l16 = i32::from(*ptr0.add(26).cast::()); - let l17 = i32::from(*ptr0.add(28).cast::()); - let l18 = i32::from(*ptr0.add(30).cast::()); - let l19 = *ptr0.add(32).cast::(); - super::super::super::wasi::sockets::network::Ipv6SocketAddress { - port: l9 as u16, - flow_info: l10 as u32, - address: ( - l11 as u16, - l12 as u16, - l13 as u16, - l14 as u16, - l15 as u16, - l16 as u16, - l17 as u16, - l18 as u16, - ), - scope_id: l19 as u32, - } - }; - V20::Ipv6(e20) - } - }; - v20 - }; - Ok(e) - } - 1 => { - let e = { - let l21 = i32::from(*ptr0.add(4).cast::()); - super::super::super::wasi::sockets::network::ErrorCode::_lift( - l21 as u8, - ) - }; - Err(e) - } - _ => _rt::invalid_enum_discriminant(), - }; - result22 + let ret = unsafe { wit_import0((self).handle() as i32) }; + unsafe { + super::super::super::wasi::io::poll::Pollable::from_handle( + ret as u32, + ) + } + } + } + } + } + #[allow(dead_code, async_fn_in_trait, unused_imports, clippy::all)] + pub mod udp_create_socket { + #[used] + #[doc(hidden)] + static __FORCE_SECTION_REF: fn() = super::super::super::__link_custom_section_describing_imports; + use super::super::super::_rt; + pub type ErrorCode = super::super::super::wasi::sockets::network::ErrorCode; + pub type IpAddressFamily = super::super::super::wasi::sockets::network::IpAddressFamily; + pub type UdpSocket = super::super::super::wasi::sockets::udp::UdpSocket; + #[allow(unused_unsafe, clippy::all)] + pub fn create_udp_socket( + address_family: IpAddressFamily, + ) -> Result { + unsafe { + #[repr(align(4))] + struct RetArea([::core::mem::MaybeUninit; 8]); + let mut ret_area = RetArea([::core::mem::MaybeUninit::uninit(); 8]); + let ptr0 = ret_area.0.as_mut_ptr().cast::(); + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "wasi:sockets/udp-create-socket@0.2.0")] + unsafe extern "C" { + #[link_name = "create-udp-socket"] + fn wit_import1(_: i32, _: *mut u8); + } + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import1(_: i32, _: *mut u8) { + unreachable!() } + unsafe { wit_import1(address_family.clone() as i32, ptr0) }; + let l2 = i32::from(*ptr0.add(0).cast::()); + let result5 = match l2 { + 0 => { + let e = { + let l3 = *ptr0.add(4).cast::(); + unsafe { + super::super::super::wasi::sockets::udp::UdpSocket::from_handle( + l3 as u32, + ) + } + }; + Ok(e) + } + 1 => { + let e = { + let l4 = i32::from(*ptr0.add(4).cast::()); + super::super::super::wasi::sockets::network::ErrorCode::_lift( + l4 as u8, + ) + }; + Err(e) + } + _ => _rt::invalid_enum_discriminant(), + }; + result5 } } - impl UdpSocket { - #[allow(unused_unsafe, clippy::all)] - pub fn address_family(&self) -> IpAddressFamily { - unsafe { - #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:sockets/udp@0.2.0")] - unsafe extern "C" { - #[link_name = "[method]udp-socket.address-family"] - fn wit_import0(_: i32) -> i32; + } + #[allow(dead_code, async_fn_in_trait, unused_imports, clippy::all)] + pub mod tcp { + #[used] + #[doc(hidden)] + static __FORCE_SECTION_REF: fn() = super::super::super::__link_custom_section_describing_imports; + use super::super::super::_rt; + pub type InputStream = super::super::super::wasi::io::streams::InputStream; + pub type OutputStream = super::super::super::wasi::io::streams::OutputStream; + pub type Pollable = super::super::super::wasi::io::poll::Pollable; + pub type Duration = super::super::super::wasi::clocks::monotonic_clock::Duration; + pub type Network = super::super::super::wasi::sockets::network::Network; + pub type ErrorCode = super::super::super::wasi::sockets::network::ErrorCode; + pub type IpSocketAddress = super::super::super::wasi::sockets::network::IpSocketAddress; + pub type IpAddressFamily = super::super::super::wasi::sockets::network::IpAddressFamily; + #[repr(u8)] + #[derive(Clone, Copy, Eq, Ord, PartialEq, PartialOrd)] + pub enum ShutdownType { + Receive, + Send, + Both, + } + impl ::core::fmt::Debug for ShutdownType { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + match self { + ShutdownType::Receive => { + f.debug_tuple("ShutdownType::Receive").finish() } - #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import0(_: i32) -> i32 { - unreachable!() + ShutdownType::Send => { + f.debug_tuple("ShutdownType::Send").finish() + } + ShutdownType::Both => { + f.debug_tuple("ShutdownType::Both").finish() } - let ret = unsafe { wit_import0((self).handle() as i32) }; - super::super::super::wasi::sockets::network::IpAddressFamily::_lift( - ret as u8, - ) } } } - impl UdpSocket { - #[allow(unused_unsafe, clippy::all)] - pub fn unicast_hop_limit(&self) -> Result { - unsafe { - #[repr(align(1))] - struct RetArea([::core::mem::MaybeUninit; 2]); - let mut ret_area = RetArea( - [::core::mem::MaybeUninit::uninit(); 2], - ); - let ptr0 = ret_area.0.as_mut_ptr().cast::(); - #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:sockets/udp@0.2.0")] + impl ShutdownType { + #[doc(hidden)] + pub unsafe fn _lift(val: u8) -> ShutdownType { + if !cfg!(debug_assertions) { + return ::core::mem::transmute(val); + } + match val { + 0 => ShutdownType::Receive, + 1 => ShutdownType::Send, + 2 => ShutdownType::Both, + _ => panic!("invalid enum discriminant"), + } + } + } + #[derive(Debug)] + #[repr(transparent)] + pub struct TcpSocket { + handle: _rt::Resource, + } + impl TcpSocket { + #[doc(hidden)] + pub unsafe fn from_handle(handle: u32) -> Self { + Self { + handle: unsafe { _rt::Resource::from_handle(handle) }, + } + } + #[doc(hidden)] + pub fn take_handle(&self) -> u32 { + _rt::Resource::take_handle(&self.handle) + } + #[doc(hidden)] + pub fn handle(&self) -> u32 { + _rt::Resource::handle(&self.handle) + } + } + unsafe impl _rt::WasmResource for TcpSocket { + #[inline] + unsafe fn drop(_handle: u32) { + #[cfg(not(target_arch = "wasm32"))] + unreachable!(); + #[cfg(target_arch = "wasm32")] + { + #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] unsafe extern "C" { - #[link_name = "[method]udp-socket.unicast-hop-limit"] - fn wit_import1(_: i32, _: *mut u8); - } - #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import1(_: i32, _: *mut u8) { - unreachable!() + #[link_name = "[resource-drop]tcp-socket"] + fn drop(_: u32); } - unsafe { wit_import1((self).handle() as i32, ptr0) }; - let l2 = i32::from(*ptr0.add(0).cast::()); - let result5 = match l2 { - 0 => { - let e = { - let l3 = i32::from(*ptr0.add(1).cast::()); - l3 as u8 - }; - Ok(e) - } - 1 => { - let e = { - let l4 = i32::from(*ptr0.add(1).cast::()); - super::super::super::wasi::sockets::network::ErrorCode::_lift( - l4 as u8, - ) - }; - Err(e) - } - _ => _rt::invalid_enum_discriminant(), - }; - result5 + unsafe { drop(_handle) }; } } } - impl UdpSocket { + impl TcpSocket { #[allow(unused_unsafe, clippy::all)] - pub fn set_unicast_hop_limit(&self, value: u8) -> Result<(), ErrorCode> { + pub fn start_bind( + &self, + network: &Network, + local_address: IpSocketAddress, + ) -> Result<(), ErrorCode> { unsafe { #[repr(align(1))] struct RetArea([::core::mem::MaybeUninit; 2]); let mut ret_area = RetArea( [::core::mem::MaybeUninit::uninit(); 2], ); - let ptr0 = ret_area.0.as_mut_ptr().cast::(); + use super::super::super::wasi::sockets::network::IpSocketAddress as V4; + let ( + result5_0, + result5_1, + result5_2, + result5_3, + result5_4, + result5_5, + result5_6, + result5_7, + result5_8, + result5_9, + result5_10, + result5_11, + ) = match local_address { + V4::Ipv4(e) => { + let super::super::super::wasi::sockets::network::Ipv4SocketAddress { + port: port0, + address: address0, + } = e; + let (t1_0, t1_1, t1_2, t1_3) = address0; + ( + 0i32, + _rt::as_i32(port0), + _rt::as_i32(t1_0), + _rt::as_i32(t1_1), + _rt::as_i32(t1_2), + _rt::as_i32(t1_3), + 0i32, + 0i32, + 0i32, + 0i32, + 0i32, + 0i32, + ) + } + V4::Ipv6(e) => { + let super::super::super::wasi::sockets::network::Ipv6SocketAddress { + port: port2, + flow_info: flow_info2, + address: address2, + scope_id: scope_id2, + } = e; + let (t3_0, t3_1, t3_2, t3_3, t3_4, t3_5, t3_6, t3_7) = address2; + ( + 1i32, + _rt::as_i32(port2), + _rt::as_i32(flow_info2), + _rt::as_i32(t3_0), + _rt::as_i32(t3_1), + _rt::as_i32(t3_2), + _rt::as_i32(t3_3), + _rt::as_i32(t3_4), + _rt::as_i32(t3_5), + _rt::as_i32(t3_6), + _rt::as_i32(t3_7), + _rt::as_i32(scope_id2), + ) + } + }; + let ptr6 = ret_area.0.as_mut_ptr().cast::(); #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:sockets/udp@0.2.0")] + #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] unsafe extern "C" { - #[link_name = "[method]udp-socket.set-unicast-hop-limit"] - fn wit_import1(_: i32, _: i32, _: *mut u8); + #[link_name = "[method]tcp-socket.start-bind"] + fn wit_import7( + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: *mut u8, + ); } #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import1(_: i32, _: i32, _: *mut u8) { + unsafe extern "C" fn wit_import7( + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: *mut u8, + ) { unreachable!() } unsafe { - wit_import1( + wit_import7( (self).handle() as i32, - _rt::as_i32(&value), - ptr0, + (network).handle() as i32, + result5_0, + result5_1, + result5_2, + result5_3, + result5_4, + result5_5, + result5_6, + result5_7, + result5_8, + result5_9, + result5_10, + result5_11, + ptr6, ) }; - let l2 = i32::from(*ptr0.add(0).cast::()); - let result4 = match l2 { + let l8 = i32::from(*ptr6.add(0).cast::()); + let result10 = match l8 { 0 => { let e = (); Ok(e) } 1 => { let e = { - let l3 = i32::from(*ptr0.add(1).cast::()); + let l9 = i32::from(*ptr6.add(1).cast::()); super::super::super::wasi::sockets::network::ErrorCode::_lift( - l3 as u8, + l9 as u8, ) }; Err(e) } _ => _rt::invalid_enum_discriminant(), }; - result4 + result10 } } } - impl UdpSocket { + impl TcpSocket { #[allow(unused_unsafe, clippy::all)] - pub fn receive_buffer_size(&self) -> Result { + pub fn finish_bind(&self) -> Result<(), ErrorCode> { unsafe { - #[repr(align(8))] - struct RetArea([::core::mem::MaybeUninit; 16]); + #[repr(align(1))] + struct RetArea([::core::mem::MaybeUninit; 2]); let mut ret_area = RetArea( - [::core::mem::MaybeUninit::uninit(); 16], + [::core::mem::MaybeUninit::uninit(); 2], ); let ptr0 = ret_area.0.as_mut_ptr().cast::(); #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:sockets/udp@0.2.0")] + #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] unsafe extern "C" { - #[link_name = "[method]udp-socket.receive-buffer-size"] + #[link_name = "[method]tcp-socket.finish-bind"] fn wit_import1(_: i32, _: *mut u8); } #[cfg(not(target_arch = "wasm32"))] @@ -12560,34 +15295,32 @@ pub mod wasi { } unsafe { wit_import1((self).handle() as i32, ptr0) }; let l2 = i32::from(*ptr0.add(0).cast::()); - let result5 = match l2 { + let result4 = match l2 { 0 => { - let e = { - let l3 = *ptr0.add(8).cast::(); - l3 as u64 - }; + let e = (); Ok(e) } 1 => { let e = { - let l4 = i32::from(*ptr0.add(8).cast::()); + let l3 = i32::from(*ptr0.add(1).cast::()); super::super::super::wasi::sockets::network::ErrorCode::_lift( - l4 as u8, + l3 as u8, ) }; Err(e) } _ => _rt::invalid_enum_discriminant(), }; - result5 + result4 } } } - impl UdpSocket { + impl TcpSocket { #[allow(unused_unsafe, clippy::all)] - pub fn set_receive_buffer_size( + pub fn start_connect( &self, - value: u64, + network: &Network, + remote_address: IpSocketAddress, ) -> Result<(), ErrorCode> { unsafe { #[repr(align(1))] @@ -12595,59 +15328,165 @@ pub mod wasi { let mut ret_area = RetArea( [::core::mem::MaybeUninit::uninit(); 2], ); - let ptr0 = ret_area.0.as_mut_ptr().cast::(); + use super::super::super::wasi::sockets::network::IpSocketAddress as V4; + let ( + result5_0, + result5_1, + result5_2, + result5_3, + result5_4, + result5_5, + result5_6, + result5_7, + result5_8, + result5_9, + result5_10, + result5_11, + ) = match remote_address { + V4::Ipv4(e) => { + let super::super::super::wasi::sockets::network::Ipv4SocketAddress { + port: port0, + address: address0, + } = e; + let (t1_0, t1_1, t1_2, t1_3) = address0; + ( + 0i32, + _rt::as_i32(port0), + _rt::as_i32(t1_0), + _rt::as_i32(t1_1), + _rt::as_i32(t1_2), + _rt::as_i32(t1_3), + 0i32, + 0i32, + 0i32, + 0i32, + 0i32, + 0i32, + ) + } + V4::Ipv6(e) => { + let super::super::super::wasi::sockets::network::Ipv6SocketAddress { + port: port2, + flow_info: flow_info2, + address: address2, + scope_id: scope_id2, + } = e; + let (t3_0, t3_1, t3_2, t3_3, t3_4, t3_5, t3_6, t3_7) = address2; + ( + 1i32, + _rt::as_i32(port2), + _rt::as_i32(flow_info2), + _rt::as_i32(t3_0), + _rt::as_i32(t3_1), + _rt::as_i32(t3_2), + _rt::as_i32(t3_3), + _rt::as_i32(t3_4), + _rt::as_i32(t3_5), + _rt::as_i32(t3_6), + _rt::as_i32(t3_7), + _rt::as_i32(scope_id2), + ) + } + }; + let ptr6 = ret_area.0.as_mut_ptr().cast::(); #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:sockets/udp@0.2.0")] + #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] unsafe extern "C" { - #[link_name = "[method]udp-socket.set-receive-buffer-size"] - fn wit_import1(_: i32, _: i64, _: *mut u8); + #[link_name = "[method]tcp-socket.start-connect"] + fn wit_import7( + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: *mut u8, + ); } #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import1(_: i32, _: i64, _: *mut u8) { + unsafe extern "C" fn wit_import7( + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: i32, + _: *mut u8, + ) { unreachable!() } unsafe { - wit_import1( + wit_import7( (self).handle() as i32, - _rt::as_i64(&value), - ptr0, + (network).handle() as i32, + result5_0, + result5_1, + result5_2, + result5_3, + result5_4, + result5_5, + result5_6, + result5_7, + result5_8, + result5_9, + result5_10, + result5_11, + ptr6, ) }; - let l2 = i32::from(*ptr0.add(0).cast::()); - let result4 = match l2 { + let l8 = i32::from(*ptr6.add(0).cast::()); + let result10 = match l8 { 0 => { let e = (); Ok(e) } 1 => { let e = { - let l3 = i32::from(*ptr0.add(1).cast::()); + let l9 = i32::from(*ptr6.add(1).cast::()); super::super::super::wasi::sockets::network::ErrorCode::_lift( - l3 as u8, + l9 as u8, ) }; Err(e) } _ => _rt::invalid_enum_discriminant(), }; - result4 + result10 } } } - impl UdpSocket { + impl TcpSocket { #[allow(unused_unsafe, clippy::all)] - pub fn send_buffer_size(&self) -> Result { + pub fn finish_connect( + &self, + ) -> Result<(InputStream, OutputStream), ErrorCode> { unsafe { - #[repr(align(8))] - struct RetArea([::core::mem::MaybeUninit; 16]); + #[repr(align(4))] + struct RetArea([::core::mem::MaybeUninit; 12]); let mut ret_area = RetArea( - [::core::mem::MaybeUninit::uninit(); 16], + [::core::mem::MaybeUninit::uninit(); 12], ); let ptr0 = ret_area.0.as_mut_ptr().cast::(); #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:sockets/udp@0.2.0")] + #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] unsafe extern "C" { - #[link_name = "[method]udp-socket.send-buffer-size"] + #[link_name = "[method]tcp-socket.finish-connect"] fn wit_import1(_: i32, _: *mut u8); } #[cfg(not(target_arch = "wasm32"))] @@ -12656,342 +15495,141 @@ pub mod wasi { } unsafe { wit_import1((self).handle() as i32, ptr0) }; let l2 = i32::from(*ptr0.add(0).cast::()); - let result5 = match l2 { + let result6 = match l2 { 0 => { let e = { - let l3 = *ptr0.add(8).cast::(); - l3 as u64 - }; - Ok(e) - } - 1 => { - let e = { - let l4 = i32::from(*ptr0.add(8).cast::()); - super::super::super::wasi::sockets::network::ErrorCode::_lift( - l4 as u8, - ) - }; - Err(e) - } - _ => _rt::invalid_enum_discriminant(), - }; - result5 - } - } - } - impl UdpSocket { - #[allow(unused_unsafe, clippy::all)] - pub fn set_send_buffer_size(&self, value: u64) -> Result<(), ErrorCode> { - unsafe { - #[repr(align(1))] - struct RetArea([::core::mem::MaybeUninit; 2]); - let mut ret_area = RetArea( - [::core::mem::MaybeUninit::uninit(); 2], - ); - let ptr0 = ret_area.0.as_mut_ptr().cast::(); - #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:sockets/udp@0.2.0")] - unsafe extern "C" { - #[link_name = "[method]udp-socket.set-send-buffer-size"] - fn wit_import1(_: i32, _: i64, _: *mut u8); - } - #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import1(_: i32, _: i64, _: *mut u8) { - unreachable!() - } - unsafe { - wit_import1( - (self).handle() as i32, - _rt::as_i64(&value), - ptr0, - ) - }; - let l2 = i32::from(*ptr0.add(0).cast::()); - let result4 = match l2 { - 0 => { - let e = (); + let l3 = *ptr0.add(4).cast::(); + let l4 = *ptr0.add(8).cast::(); + ( + unsafe { + super::super::super::wasi::io::streams::InputStream::from_handle( + l3 as u32, + ) + }, + unsafe { + super::super::super::wasi::io::streams::OutputStream::from_handle( + l4 as u32, + ) + }, + ) + }; Ok(e) } 1 => { let e = { - let l3 = i32::from(*ptr0.add(1).cast::()); + let l5 = i32::from(*ptr0.add(4).cast::()); super::super::super::wasi::sockets::network::ErrorCode::_lift( - l3 as u8, + l5 as u8, ) }; Err(e) } _ => _rt::invalid_enum_discriminant(), }; - result4 - } - } - } - impl UdpSocket { - #[allow(unused_unsafe, clippy::all)] - pub fn subscribe(&self) -> Pollable { - unsafe { - #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:sockets/udp@0.2.0")] - unsafe extern "C" { - #[link_name = "[method]udp-socket.subscribe"] - fn wit_import0(_: i32) -> i32; - } - #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import0(_: i32) -> i32 { - unreachable!() - } - let ret = unsafe { wit_import0((self).handle() as i32) }; - unsafe { - super::super::super::wasi::io::poll::Pollable::from_handle( - ret as u32, - ) - } + result6 } } } - impl IncomingDatagramStream { + impl TcpSocket { #[allow(unused_unsafe, clippy::all)] - pub fn receive( - &self, - max_results: u64, - ) -> Result<_rt::Vec, ErrorCode> { + pub fn start_listen(&self) -> Result<(), ErrorCode> { unsafe { - #[cfg_attr(target_pointer_width = "64", repr(align(8)))] - #[cfg_attr(target_pointer_width = "32", repr(align(4)))] - struct RetArea( - [::core::mem::MaybeUninit< - u8, - >; 3 * ::core::mem::size_of::<*const u8>()], - ); + #[repr(align(1))] + struct RetArea([::core::mem::MaybeUninit; 2]); let mut ret_area = RetArea( - [::core::mem::MaybeUninit::uninit(); 3 - * ::core::mem::size_of::<*const u8>()], + [::core::mem::MaybeUninit::uninit(); 2], ); let ptr0 = ret_area.0.as_mut_ptr().cast::(); #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:sockets/udp@0.2.0")] + #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] unsafe extern "C" { - #[link_name = "[method]incoming-datagram-stream.receive"] - fn wit_import1(_: i32, _: i64, _: *mut u8); + #[link_name = "[method]tcp-socket.start-listen"] + fn wit_import1(_: i32, _: *mut u8); } #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import1(_: i32, _: i64, _: *mut u8) { + unsafe extern "C" fn wit_import1(_: i32, _: *mut u8) { unreachable!() } - unsafe { - wit_import1( - (self).handle() as i32, - _rt::as_i64(&max_results), - ptr0, - ) - }; + unsafe { wit_import1((self).handle() as i32, ptr0) }; let l2 = i32::from(*ptr0.add(0).cast::()); - let result28 = match l2 { + let result4 = match l2 { 0 => { - let e = { - let l3 = *ptr0 - .add(::core::mem::size_of::<*const u8>()) - .cast::<*mut u8>(); - let l4 = *ptr0 - .add(2 * ::core::mem::size_of::<*const u8>()) - .cast::(); - let base26 = l3; - let len26 = l4; - let mut result26 = _rt::Vec::with_capacity(len26); - for i in 0..len26 { - let base = base26 - .add(i * (32 + 2 * ::core::mem::size_of::<*const u8>())); - let e26 = { - let l5 = *base.add(0).cast::<*mut u8>(); - let l6 = *base - .add(::core::mem::size_of::<*const u8>()) - .cast::(); - let len7 = l6; - let l8 = i32::from( - *base - .add(2 * ::core::mem::size_of::<*const u8>()) - .cast::(), - ); - use super::super::super::wasi::sockets::network::IpSocketAddress as V25; - let v25 = match l8 { - 0 => { - let e25 = { - let l9 = i32::from( - *base - .add(4 + 2 * ::core::mem::size_of::<*const u8>()) - .cast::(), - ); - let l10 = i32::from( - *base - .add(6 + 2 * ::core::mem::size_of::<*const u8>()) - .cast::(), - ); - let l11 = i32::from( - *base - .add(7 + 2 * ::core::mem::size_of::<*const u8>()) - .cast::(), - ); - let l12 = i32::from( - *base - .add(8 + 2 * ::core::mem::size_of::<*const u8>()) - .cast::(), - ); - let l13 = i32::from( - *base - .add(9 + 2 * ::core::mem::size_of::<*const u8>()) - .cast::(), - ); - super::super::super::wasi::sockets::network::Ipv4SocketAddress { - port: l9 as u16, - address: (l10 as u8, l11 as u8, l12 as u8, l13 as u8), - } - }; - V25::Ipv4(e25) - } - n => { - debug_assert_eq!(n, 1, "invalid enum discriminant"); - let e25 = { - let l14 = i32::from( - *base - .add(4 + 2 * ::core::mem::size_of::<*const u8>()) - .cast::(), - ); - let l15 = *base - .add(8 + 2 * ::core::mem::size_of::<*const u8>()) - .cast::(); - let l16 = i32::from( - *base - .add(12 + 2 * ::core::mem::size_of::<*const u8>()) - .cast::(), - ); - let l17 = i32::from( - *base - .add(14 + 2 * ::core::mem::size_of::<*const u8>()) - .cast::(), - ); - let l18 = i32::from( - *base - .add(16 + 2 * ::core::mem::size_of::<*const u8>()) - .cast::(), - ); - let l19 = i32::from( - *base - .add(18 + 2 * ::core::mem::size_of::<*const u8>()) - .cast::(), - ); - let l20 = i32::from( - *base - .add(20 + 2 * ::core::mem::size_of::<*const u8>()) - .cast::(), - ); - let l21 = i32::from( - *base - .add(22 + 2 * ::core::mem::size_of::<*const u8>()) - .cast::(), - ); - let l22 = i32::from( - *base - .add(24 + 2 * ::core::mem::size_of::<*const u8>()) - .cast::(), - ); - let l23 = i32::from( - *base - .add(26 + 2 * ::core::mem::size_of::<*const u8>()) - .cast::(), - ); - let l24 = *base - .add(28 + 2 * ::core::mem::size_of::<*const u8>()) - .cast::(); - super::super::super::wasi::sockets::network::Ipv6SocketAddress { - port: l14 as u16, - flow_info: l15 as u32, - address: ( - l16 as u16, - l17 as u16, - l18 as u16, - l19 as u16, - l20 as u16, - l21 as u16, - l22 as u16, - l23 as u16, - ), - scope_id: l24 as u32, - } - }; - V25::Ipv6(e25) - } - }; - IncomingDatagram { - data: _rt::Vec::from_raw_parts(l5.cast(), len7, len7), - remote_address: v25, - } - }; - result26.push(e26); - } - _rt::cabi_dealloc( - base26, - len26 * (32 + 2 * ::core::mem::size_of::<*const u8>()), - ::core::mem::size_of::<*const u8>(), - ); - result26 - }; + let e = (); Ok(e) } 1 => { let e = { - let l27 = i32::from( - *ptr0.add(::core::mem::size_of::<*const u8>()).cast::(), - ); + let l3 = i32::from(*ptr0.add(1).cast::()); super::super::super::wasi::sockets::network::ErrorCode::_lift( - l27 as u8, + l3 as u8, ) }; Err(e) } _ => _rt::invalid_enum_discriminant(), }; - result28 + result4 } } } - impl IncomingDatagramStream { + impl TcpSocket { #[allow(unused_unsafe, clippy::all)] - pub fn subscribe(&self) -> Pollable { + pub fn finish_listen(&self) -> Result<(), ErrorCode> { unsafe { + #[repr(align(1))] + struct RetArea([::core::mem::MaybeUninit; 2]); + let mut ret_area = RetArea( + [::core::mem::MaybeUninit::uninit(); 2], + ); + let ptr0 = ret_area.0.as_mut_ptr().cast::(); #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:sockets/udp@0.2.0")] + #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] unsafe extern "C" { - #[link_name = "[method]incoming-datagram-stream.subscribe"] - fn wit_import0(_: i32) -> i32; + #[link_name = "[method]tcp-socket.finish-listen"] + fn wit_import1(_: i32, _: *mut u8); } #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import0(_: i32) -> i32 { + unsafe extern "C" fn wit_import1(_: i32, _: *mut u8) { unreachable!() } - let ret = unsafe { wit_import0((self).handle() as i32) }; - unsafe { - super::super::super::wasi::io::poll::Pollable::from_handle( - ret as u32, - ) - } + unsafe { wit_import1((self).handle() as i32, ptr0) }; + let l2 = i32::from(*ptr0.add(0).cast::()); + let result4 = match l2 { + 0 => { + let e = (); + Ok(e) + } + 1 => { + let e = { + let l3 = i32::from(*ptr0.add(1).cast::()); + super::super::super::wasi::sockets::network::ErrorCode::_lift( + l3 as u8, + ) + }; + Err(e) + } + _ => _rt::invalid_enum_discriminant(), + }; + result4 } } } - impl OutgoingDatagramStream { + impl TcpSocket { #[allow(unused_unsafe, clippy::all)] - pub fn check_send(&self) -> Result { + pub fn accept( + &self, + ) -> Result<(TcpSocket, InputStream, OutputStream), ErrorCode> { unsafe { - #[repr(align(8))] + #[repr(align(4))] struct RetArea([::core::mem::MaybeUninit; 16]); let mut ret_area = RetArea( [::core::mem::MaybeUninit::uninit(); 16], ); let ptr0 = ret_area.0.as_mut_ptr().cast::(); #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:sockets/udp@0.2.0")] + #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] unsafe extern "C" { - #[link_name = "[method]outgoing-datagram-stream.check-send"] + #[link_name = "[method]tcp-socket.accept"] fn wit_import1(_: i32, _: *mut u8); } #[cfg(not(target_arch = "wasm32"))] @@ -13000,212 +15638,239 @@ pub mod wasi { } unsafe { wit_import1((self).handle() as i32, ptr0) }; let l2 = i32::from(*ptr0.add(0).cast::()); - let result5 = match l2 { + let result7 = match l2 { 0 => { let e = { - let l3 = *ptr0.add(8).cast::(); - l3 as u64 + let l3 = *ptr0.add(4).cast::(); + let l4 = *ptr0.add(8).cast::(); + let l5 = *ptr0.add(12).cast::(); + ( + unsafe { TcpSocket::from_handle(l3 as u32) }, + unsafe { + super::super::super::wasi::io::streams::InputStream::from_handle( + l4 as u32, + ) + }, + unsafe { + super::super::super::wasi::io::streams::OutputStream::from_handle( + l5 as u32, + ) + }, + ) }; Ok(e) } 1 => { let e = { - let l4 = i32::from(*ptr0.add(8).cast::()); + let l6 = i32::from(*ptr0.add(4).cast::()); super::super::super::wasi::sockets::network::ErrorCode::_lift( - l4 as u8, + l6 as u8, ) }; Err(e) } _ => _rt::invalid_enum_discriminant(), }; - result5 + result7 } } } - impl OutgoingDatagramStream { + impl TcpSocket { #[allow(unused_unsafe, clippy::all)] - pub fn send( - &self, - datagrams: &[OutgoingDatagram], - ) -> Result { + pub fn local_address(&self) -> Result { unsafe { - #[repr(align(8))] - struct RetArea([::core::mem::MaybeUninit; 16]); + #[repr(align(4))] + struct RetArea([::core::mem::MaybeUninit; 36]); let mut ret_area = RetArea( - [::core::mem::MaybeUninit::uninit(); 16], - ); - let vec7 = datagrams; - let len7 = vec7.len(); - let layout7 = _rt::alloc::Layout::from_size_align_unchecked( - vec7.len() * (32 + 3 * ::core::mem::size_of::<*const u8>()), - ::core::mem::size_of::<*const u8>(), + [::core::mem::MaybeUninit::uninit(); 36], ); - let result7 = if layout7.size() != 0 { - let ptr = _rt::alloc::alloc(layout7).cast::(); - if ptr.is_null() { - _rt::alloc::handle_alloc_error(layout7); - } - ptr - } else { - ::core::ptr::null_mut() - }; - for (i, e) in vec7.into_iter().enumerate() { - let base = result7 - .add(i * (32 + 3 * ::core::mem::size_of::<*const u8>())); - { - let OutgoingDatagram { - data: data0, - remote_address: remote_address0, - } = e; - let vec1 = data0; - let ptr1 = vec1.as_ptr().cast::(); - let len1 = vec1.len(); - *base - .add(::core::mem::size_of::<*const u8>()) - .cast::() = len1; - *base.add(0).cast::<*mut u8>() = ptr1.cast_mut(); - match remote_address0 { - Some(e) => { - *base - .add(2 * ::core::mem::size_of::<*const u8>()) - .cast::() = (1i32) as u8; - use super::super::super::wasi::sockets::network::IpSocketAddress as V6; - match e { - V6::Ipv4(e) => { - *base - .add(4 + 2 * ::core::mem::size_of::<*const u8>()) - .cast::() = (0i32) as u8; - let super::super::super::wasi::sockets::network::Ipv4SocketAddress { - port: port2, - address: address2, - } = e; - *base - .add(8 + 2 * ::core::mem::size_of::<*const u8>()) - .cast::() = (_rt::as_i32(port2)) as u16; - let (t3_0, t3_1, t3_2, t3_3) = address2; - *base - .add(10 + 2 * ::core::mem::size_of::<*const u8>()) - .cast::() = (_rt::as_i32(t3_0)) as u8; - *base - .add(11 + 2 * ::core::mem::size_of::<*const u8>()) - .cast::() = (_rt::as_i32(t3_1)) as u8; - *base - .add(12 + 2 * ::core::mem::size_of::<*const u8>()) - .cast::() = (_rt::as_i32(t3_2)) as u8; - *base - .add(13 + 2 * ::core::mem::size_of::<*const u8>()) - .cast::() = (_rt::as_i32(t3_3)) as u8; - } - V6::Ipv6(e) => { - *base - .add(4 + 2 * ::core::mem::size_of::<*const u8>()) - .cast::() = (1i32) as u8; - let super::super::super::wasi::sockets::network::Ipv6SocketAddress { - port: port4, - flow_info: flow_info4, - address: address4, - scope_id: scope_id4, - } = e; - *base - .add(8 + 2 * ::core::mem::size_of::<*const u8>()) - .cast::() = (_rt::as_i32(port4)) as u16; - *base - .add(12 + 2 * ::core::mem::size_of::<*const u8>()) - .cast::() = _rt::as_i32(flow_info4); - let (t5_0, t5_1, t5_2, t5_3, t5_4, t5_5, t5_6, t5_7) = address4; - *base - .add(16 + 2 * ::core::mem::size_of::<*const u8>()) - .cast::() = (_rt::as_i32(t5_0)) as u16; - *base - .add(18 + 2 * ::core::mem::size_of::<*const u8>()) - .cast::() = (_rt::as_i32(t5_1)) as u16; - *base - .add(20 + 2 * ::core::mem::size_of::<*const u8>()) - .cast::() = (_rt::as_i32(t5_2)) as u16; - *base - .add(22 + 2 * ::core::mem::size_of::<*const u8>()) - .cast::() = (_rt::as_i32(t5_3)) as u16; - *base - .add(24 + 2 * ::core::mem::size_of::<*const u8>()) - .cast::() = (_rt::as_i32(t5_4)) as u16; - *base - .add(26 + 2 * ::core::mem::size_of::<*const u8>()) - .cast::() = (_rt::as_i32(t5_5)) as u16; - *base - .add(28 + 2 * ::core::mem::size_of::<*const u8>()) - .cast::() = (_rt::as_i32(t5_6)) as u16; - *base - .add(30 + 2 * ::core::mem::size_of::<*const u8>()) - .cast::() = (_rt::as_i32(t5_7)) as u16; - *base - .add(32 + 2 * ::core::mem::size_of::<*const u8>()) - .cast::() = _rt::as_i32(scope_id4); - } + let ptr0 = ret_area.0.as_mut_ptr().cast::(); + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] + unsafe extern "C" { + #[link_name = "[method]tcp-socket.local-address"] + fn wit_import1(_: i32, _: *mut u8); + } + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import1(_: i32, _: *mut u8) { + unreachable!() + } + unsafe { wit_import1((self).handle() as i32, ptr0) }; + let l2 = i32::from(*ptr0.add(0).cast::()); + let result22 = match l2 { + 0 => { + let e = { + let l3 = i32::from(*ptr0.add(4).cast::()); + use super::super::super::wasi::sockets::network::IpSocketAddress as V20; + let v20 = match l3 { + 0 => { + let e20 = { + let l4 = i32::from(*ptr0.add(8).cast::()); + let l5 = i32::from(*ptr0.add(10).cast::()); + let l6 = i32::from(*ptr0.add(11).cast::()); + let l7 = i32::from(*ptr0.add(12).cast::()); + let l8 = i32::from(*ptr0.add(13).cast::()); + super::super::super::wasi::sockets::network::Ipv4SocketAddress { + port: l4 as u16, + address: (l5 as u8, l6 as u8, l7 as u8, l8 as u8), + } + }; + V20::Ipv4(e20) } - } - None => { - *base - .add(2 * ::core::mem::size_of::<*const u8>()) - .cast::() = (0i32) as u8; - } + n => { + debug_assert_eq!(n, 1, "invalid enum discriminant"); + let e20 = { + let l9 = i32::from(*ptr0.add(8).cast::()); + let l10 = *ptr0.add(12).cast::(); + let l11 = i32::from(*ptr0.add(16).cast::()); + let l12 = i32::from(*ptr0.add(18).cast::()); + let l13 = i32::from(*ptr0.add(20).cast::()); + let l14 = i32::from(*ptr0.add(22).cast::()); + let l15 = i32::from(*ptr0.add(24).cast::()); + let l16 = i32::from(*ptr0.add(26).cast::()); + let l17 = i32::from(*ptr0.add(28).cast::()); + let l18 = i32::from(*ptr0.add(30).cast::()); + let l19 = *ptr0.add(32).cast::(); + super::super::super::wasi::sockets::network::Ipv6SocketAddress { + port: l9 as u16, + flow_info: l10 as u32, + address: ( + l11 as u16, + l12 as u16, + l13 as u16, + l14 as u16, + l15 as u16, + l16 as u16, + l17 as u16, + l18 as u16, + ), + scope_id: l19 as u32, + } + }; + V20::Ipv6(e20) + } + }; + v20 }; + Ok(e) } - } - let ptr8 = ret_area.0.as_mut_ptr().cast::(); + 1 => { + let e = { + let l21 = i32::from(*ptr0.add(4).cast::()); + super::super::super::wasi::sockets::network::ErrorCode::_lift( + l21 as u8, + ) + }; + Err(e) + } + _ => _rt::invalid_enum_discriminant(), + }; + result22 + } + } + } + impl TcpSocket { + #[allow(unused_unsafe, clippy::all)] + pub fn remote_address(&self) -> Result { + unsafe { + #[repr(align(4))] + struct RetArea([::core::mem::MaybeUninit; 36]); + let mut ret_area = RetArea( + [::core::mem::MaybeUninit::uninit(); 36], + ); + let ptr0 = ret_area.0.as_mut_ptr().cast::(); #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:sockets/udp@0.2.0")] + #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] unsafe extern "C" { - #[link_name = "[method]outgoing-datagram-stream.send"] - fn wit_import9(_: i32, _: *mut u8, _: usize, _: *mut u8); + #[link_name = "[method]tcp-socket.remote-address"] + fn wit_import1(_: i32, _: *mut u8); } #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import9( - _: i32, - _: *mut u8, - _: usize, - _: *mut u8, - ) { + unsafe extern "C" fn wit_import1(_: i32, _: *mut u8) { unreachable!() } - unsafe { - wit_import9((self).handle() as i32, result7, len7, ptr8) - }; - let l10 = i32::from(*ptr8.add(0).cast::()); - let result13 = match l10 { + unsafe { wit_import1((self).handle() as i32, ptr0) }; + let l2 = i32::from(*ptr0.add(0).cast::()); + let result22 = match l2 { 0 => { let e = { - let l11 = *ptr8.add(8).cast::(); - l11 as u64 + let l3 = i32::from(*ptr0.add(4).cast::()); + use super::super::super::wasi::sockets::network::IpSocketAddress as V20; + let v20 = match l3 { + 0 => { + let e20 = { + let l4 = i32::from(*ptr0.add(8).cast::()); + let l5 = i32::from(*ptr0.add(10).cast::()); + let l6 = i32::from(*ptr0.add(11).cast::()); + let l7 = i32::from(*ptr0.add(12).cast::()); + let l8 = i32::from(*ptr0.add(13).cast::()); + super::super::super::wasi::sockets::network::Ipv4SocketAddress { + port: l4 as u16, + address: (l5 as u8, l6 as u8, l7 as u8, l8 as u8), + } + }; + V20::Ipv4(e20) + } + n => { + debug_assert_eq!(n, 1, "invalid enum discriminant"); + let e20 = { + let l9 = i32::from(*ptr0.add(8).cast::()); + let l10 = *ptr0.add(12).cast::(); + let l11 = i32::from(*ptr0.add(16).cast::()); + let l12 = i32::from(*ptr0.add(18).cast::()); + let l13 = i32::from(*ptr0.add(20).cast::()); + let l14 = i32::from(*ptr0.add(22).cast::()); + let l15 = i32::from(*ptr0.add(24).cast::()); + let l16 = i32::from(*ptr0.add(26).cast::()); + let l17 = i32::from(*ptr0.add(28).cast::()); + let l18 = i32::from(*ptr0.add(30).cast::()); + let l19 = *ptr0.add(32).cast::(); + super::super::super::wasi::sockets::network::Ipv6SocketAddress { + port: l9 as u16, + flow_info: l10 as u32, + address: ( + l11 as u16, + l12 as u16, + l13 as u16, + l14 as u16, + l15 as u16, + l16 as u16, + l17 as u16, + l18 as u16, + ), + scope_id: l19 as u32, + } + }; + V20::Ipv6(e20) + } + }; + v20 }; Ok(e) } 1 => { let e = { - let l12 = i32::from(*ptr8.add(8).cast::()); + let l21 = i32::from(*ptr0.add(4).cast::()); super::super::super::wasi::sockets::network::ErrorCode::_lift( - l12 as u8, + l21 as u8, ) }; Err(e) } _ => _rt::invalid_enum_discriminant(), }; - if layout7.size() != 0 { - _rt::alloc::dealloc(result7.cast(), layout7); - } - result13 + result22 } } } - impl OutgoingDatagramStream { + impl TcpSocket { #[allow(unused_unsafe, clippy::all)] - pub fn subscribe(&self) -> Pollable { + pub fn is_listening(&self) -> bool { unsafe { #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:sockets/udp@0.2.0")] + #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] unsafe extern "C" { - #[link_name = "[method]outgoing-datagram-stream.subscribe"] + #[link_name = "[method]tcp-socket.is-listening"] fn wit_import0(_: i32) -> i32; } #[cfg(not(target_arch = "wasm32"))] @@ -13213,168 +15878,132 @@ pub mod wasi { unreachable!() } let ret = unsafe { wit_import0((self).handle() as i32) }; - unsafe { - super::super::super::wasi::io::poll::Pollable::from_handle( - ret as u32, - ) - } + _rt::bool_lift(ret as u8) } } } - } - #[allow(dead_code, async_fn_in_trait, unused_imports, clippy::all)] - pub mod udp_create_socket { - #[used] - #[doc(hidden)] - static __FORCE_SECTION_REF: fn() = super::super::super::__link_custom_section_describing_imports; - use super::super::super::_rt; - pub type ErrorCode = super::super::super::wasi::sockets::network::ErrorCode; - pub type IpAddressFamily = super::super::super::wasi::sockets::network::IpAddressFamily; - pub type UdpSocket = super::super::super::wasi::sockets::udp::UdpSocket; - #[allow(unused_unsafe, clippy::all)] - pub fn create_udp_socket( - address_family: IpAddressFamily, - ) -> Result { - unsafe { - #[repr(align(4))] - struct RetArea([::core::mem::MaybeUninit; 8]); - let mut ret_area = RetArea([::core::mem::MaybeUninit::uninit(); 8]); - let ptr0 = ret_area.0.as_mut_ptr().cast::(); - #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:sockets/udp-create-socket@0.2.0")] - unsafe extern "C" { - #[link_name = "create-udp-socket"] - fn wit_import1(_: i32, _: *mut u8); - } - #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import1(_: i32, _: *mut u8) { - unreachable!() - } - unsafe { wit_import1(address_family.clone() as i32, ptr0) }; - let l2 = i32::from(*ptr0.add(0).cast::()); - let result5 = match l2 { - 0 => { - let e = { - let l3 = *ptr0.add(4).cast::(); - unsafe { - super::super::super::wasi::sockets::udp::UdpSocket::from_handle( - l3 as u32, - ) - } - }; - Ok(e) + impl TcpSocket { + #[allow(unused_unsafe, clippy::all)] + pub fn address_family(&self) -> IpAddressFamily { + unsafe { + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] + unsafe extern "C" { + #[link_name = "[method]tcp-socket.address-family"] + fn wit_import0(_: i32) -> i32; } - 1 => { - let e = { - let l4 = i32::from(*ptr0.add(4).cast::()); - super::super::super::wasi::sockets::network::ErrorCode::_lift( - l4 as u8, - ) - }; - Err(e) + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import0(_: i32) -> i32 { + unreachable!() } - _ => _rt::invalid_enum_discriminant(), - }; - result5 + let ret = unsafe { wit_import0((self).handle() as i32) }; + super::super::super::wasi::sockets::network::IpAddressFamily::_lift( + ret as u8, + ) + } } } - } - #[allow(dead_code, async_fn_in_trait, unused_imports, clippy::all)] - pub mod tcp { - #[used] - #[doc(hidden)] - static __FORCE_SECTION_REF: fn() = super::super::super::__link_custom_section_describing_imports; - use super::super::super::_rt; - pub type InputStream = super::super::super::wasi::io::streams::InputStream; - pub type OutputStream = super::super::super::wasi::io::streams::OutputStream; - pub type Pollable = super::super::super::wasi::io::poll::Pollable; - pub type Duration = super::super::super::wasi::clocks::monotonic_clock::Duration; - pub type Network = super::super::super::wasi::sockets::network::Network; - pub type ErrorCode = super::super::super::wasi::sockets::network::ErrorCode; - pub type IpSocketAddress = super::super::super::wasi::sockets::network::IpSocketAddress; - pub type IpAddressFamily = super::super::super::wasi::sockets::network::IpAddressFamily; - #[repr(u8)] - #[derive(Clone, Copy, Eq, Ord, PartialEq, PartialOrd)] - pub enum ShutdownType { - Receive, - Send, - Both, - } - impl ::core::fmt::Debug for ShutdownType { - fn fmt( + impl TcpSocket { + #[allow(unused_unsafe, clippy::all)] + pub fn set_listen_backlog_size( &self, - f: &mut ::core::fmt::Formatter<'_>, - ) -> ::core::fmt::Result { - match self { - ShutdownType::Receive => { - f.debug_tuple("ShutdownType::Receive").finish() - } - ShutdownType::Send => { - f.debug_tuple("ShutdownType::Send").finish() + value: u64, + ) -> Result<(), ErrorCode> { + unsafe { + #[repr(align(1))] + struct RetArea([::core::mem::MaybeUninit; 2]); + let mut ret_area = RetArea( + [::core::mem::MaybeUninit::uninit(); 2], + ); + let ptr0 = ret_area.0.as_mut_ptr().cast::(); + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] + unsafe extern "C" { + #[link_name = "[method]tcp-socket.set-listen-backlog-size"] + fn wit_import1(_: i32, _: i64, _: *mut u8); } - ShutdownType::Both => { - f.debug_tuple("ShutdownType::Both").finish() + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import1(_: i32, _: i64, _: *mut u8) { + unreachable!() } + unsafe { + wit_import1( + (self).handle() as i32, + _rt::as_i64(&value), + ptr0, + ) + }; + let l2 = i32::from(*ptr0.add(0).cast::()); + let result4 = match l2 { + 0 => { + let e = (); + Ok(e) + } + 1 => { + let e = { + let l3 = i32::from(*ptr0.add(1).cast::()); + super::super::super::wasi::sockets::network::ErrorCode::_lift( + l3 as u8, + ) + }; + Err(e) + } + _ => _rt::invalid_enum_discriminant(), + }; + result4 } } } - impl ShutdownType { - #[doc(hidden)] - pub unsafe fn _lift(val: u8) -> ShutdownType { - if !cfg!(debug_assertions) { - return ::core::mem::transmute(val); - } - match val { - 0 => ShutdownType::Receive, - 1 => ShutdownType::Send, - 2 => ShutdownType::Both, - _ => panic!("invalid enum discriminant"), - } - } - } - #[derive(Debug)] - #[repr(transparent)] - pub struct TcpSocket { - handle: _rt::Resource, - } impl TcpSocket { - #[doc(hidden)] - pub unsafe fn from_handle(handle: u32) -> Self { - Self { - handle: unsafe { _rt::Resource::from_handle(handle) }, - } - } - #[doc(hidden)] - pub fn take_handle(&self) -> u32 { - _rt::Resource::take_handle(&self.handle) - } - #[doc(hidden)] - pub fn handle(&self) -> u32 { - _rt::Resource::handle(&self.handle) - } - } - unsafe impl _rt::WasmResource for TcpSocket { - #[inline] - unsafe fn drop(_handle: u32) { - #[cfg(not(target_arch = "wasm32"))] - unreachable!(); - #[cfg(target_arch = "wasm32")] - { + #[allow(unused_unsafe, clippy::all)] + pub fn keep_alive_enabled(&self) -> Result { + unsafe { + #[repr(align(1))] + struct RetArea([::core::mem::MaybeUninit; 2]); + let mut ret_area = RetArea( + [::core::mem::MaybeUninit::uninit(); 2], + ); + let ptr0 = ret_area.0.as_mut_ptr().cast::(); + #[cfg(target_arch = "wasm32")] #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] unsafe extern "C" { - #[link_name = "[resource-drop]tcp-socket"] - fn drop(_: u32); + #[link_name = "[method]tcp-socket.keep-alive-enabled"] + fn wit_import1(_: i32, _: *mut u8); } - unsafe { drop(_handle) }; + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import1(_: i32, _: *mut u8) { + unreachable!() + } + unsafe { wit_import1((self).handle() as i32, ptr0) }; + let l2 = i32::from(*ptr0.add(0).cast::()); + let result5 = match l2 { + 0 => { + let e = { + let l3 = i32::from(*ptr0.add(1).cast::()); + _rt::bool_lift(l3 as u8) + }; + Ok(e) + } + 1 => { + let e = { + let l4 = i32::from(*ptr0.add(1).cast::()); + super::super::super::wasi::sockets::network::ErrorCode::_lift( + l4 as u8, + ) + }; + Err(e) + } + _ => _rt::invalid_enum_discriminant(), + }; + result5 } } } impl TcpSocket { #[allow(unused_unsafe, clippy::all)] - pub fn start_bind( + pub fn set_keep_alive_enabled( &self, - network: &Network, - local_address: IpSocketAddress, + value: bool, ) -> Result<(), ErrorCode> { unsafe { #[repr(align(1))] @@ -13382,163 +16011,154 @@ pub mod wasi { let mut ret_area = RetArea( [::core::mem::MaybeUninit::uninit(); 2], ); - use super::super::super::wasi::sockets::network::IpSocketAddress as V4; - let ( - result5_0, - result5_1, - result5_2, - result5_3, - result5_4, - result5_5, - result5_6, - result5_7, - result5_8, - result5_9, - result5_10, - result5_11, - ) = match local_address { - V4::Ipv4(e) => { - let super::super::super::wasi::sockets::network::Ipv4SocketAddress { - port: port0, - address: address0, - } = e; - let (t1_0, t1_1, t1_2, t1_3) = address0; - ( - 0i32, - _rt::as_i32(port0), - _rt::as_i32(t1_0), - _rt::as_i32(t1_1), - _rt::as_i32(t1_2), - _rt::as_i32(t1_3), - 0i32, - 0i32, - 0i32, - 0i32, - 0i32, - 0i32, - ) + let ptr0 = ret_area.0.as_mut_ptr().cast::(); + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] + unsafe extern "C" { + #[link_name = "[method]tcp-socket.set-keep-alive-enabled"] + fn wit_import1(_: i32, _: i32, _: *mut u8); + } + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import1(_: i32, _: i32, _: *mut u8) { + unreachable!() + } + unsafe { + wit_import1( + (self).handle() as i32, + match &value { + true => 1, + false => 0, + }, + ptr0, + ) + }; + let l2 = i32::from(*ptr0.add(0).cast::()); + let result4 = match l2 { + 0 => { + let e = (); + Ok(e) } - V4::Ipv6(e) => { - let super::super::super::wasi::sockets::network::Ipv6SocketAddress { - port: port2, - flow_info: flow_info2, - address: address2, - scope_id: scope_id2, - } = e; - let (t3_0, t3_1, t3_2, t3_3, t3_4, t3_5, t3_6, t3_7) = address2; - ( - 1i32, - _rt::as_i32(port2), - _rt::as_i32(flow_info2), - _rt::as_i32(t3_0), - _rt::as_i32(t3_1), - _rt::as_i32(t3_2), - _rt::as_i32(t3_3), - _rt::as_i32(t3_4), - _rt::as_i32(t3_5), - _rt::as_i32(t3_6), - _rt::as_i32(t3_7), - _rt::as_i32(scope_id2), - ) + 1 => { + let e = { + let l3 = i32::from(*ptr0.add(1).cast::()); + super::super::super::wasi::sockets::network::ErrorCode::_lift( + l3 as u8, + ) + }; + Err(e) } + _ => _rt::invalid_enum_discriminant(), }; - let ptr6 = ret_area.0.as_mut_ptr().cast::(); + result4 + } + } + } + impl TcpSocket { + #[allow(unused_unsafe, clippy::all)] + pub fn keep_alive_idle_time(&self) -> Result { + unsafe { + #[repr(align(8))] + struct RetArea([::core::mem::MaybeUninit; 16]); + let mut ret_area = RetArea( + [::core::mem::MaybeUninit::uninit(); 16], + ); + let ptr0 = ret_area.0.as_mut_ptr().cast::(); #[cfg(target_arch = "wasm32")] #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] unsafe extern "C" { - #[link_name = "[method]tcp-socket.start-bind"] - fn wit_import7( - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: *mut u8, - ); + #[link_name = "[method]tcp-socket.keep-alive-idle-time"] + fn wit_import1(_: i32, _: *mut u8); + } + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import1(_: i32, _: *mut u8) { + unreachable!() + } + unsafe { wit_import1((self).handle() as i32, ptr0) }; + let l2 = i32::from(*ptr0.add(0).cast::()); + let result5 = match l2 { + 0 => { + let e = { + let l3 = *ptr0.add(8).cast::(); + l3 as u64 + }; + Ok(e) + } + 1 => { + let e = { + let l4 = i32::from(*ptr0.add(8).cast::()); + super::super::super::wasi::sockets::network::ErrorCode::_lift( + l4 as u8, + ) + }; + Err(e) + } + _ => _rt::invalid_enum_discriminant(), + }; + result5 + } + } + } + impl TcpSocket { + #[allow(unused_unsafe, clippy::all)] + pub fn set_keep_alive_idle_time( + &self, + value: Duration, + ) -> Result<(), ErrorCode> { + unsafe { + #[repr(align(1))] + struct RetArea([::core::mem::MaybeUninit; 2]); + let mut ret_area = RetArea( + [::core::mem::MaybeUninit::uninit(); 2], + ); + let ptr0 = ret_area.0.as_mut_ptr().cast::(); + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] + unsafe extern "C" { + #[link_name = "[method]tcp-socket.set-keep-alive-idle-time"] + fn wit_import1(_: i32, _: i64, _: *mut u8); } #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import7( - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: *mut u8, - ) { + unsafe extern "C" fn wit_import1(_: i32, _: i64, _: *mut u8) { unreachable!() } unsafe { - wit_import7( - (self).handle() as i32, - (network).handle() as i32, - result5_0, - result5_1, - result5_2, - result5_3, - result5_4, - result5_5, - result5_6, - result5_7, - result5_8, - result5_9, - result5_10, - result5_11, - ptr6, - ) + wit_import1((self).handle() as i32, _rt::as_i64(value), ptr0) }; - let l8 = i32::from(*ptr6.add(0).cast::()); - let result10 = match l8 { + let l2 = i32::from(*ptr0.add(0).cast::()); + let result4 = match l2 { 0 => { let e = (); Ok(e) } 1 => { let e = { - let l9 = i32::from(*ptr6.add(1).cast::()); + let l3 = i32::from(*ptr0.add(1).cast::()); super::super::super::wasi::sockets::network::ErrorCode::_lift( - l9 as u8, + l3 as u8, ) }; Err(e) } _ => _rt::invalid_enum_discriminant(), }; - result10 + result4 } } } impl TcpSocket { #[allow(unused_unsafe, clippy::all)] - pub fn finish_bind(&self) -> Result<(), ErrorCode> { + pub fn keep_alive_interval(&self) -> Result { unsafe { - #[repr(align(1))] - struct RetArea([::core::mem::MaybeUninit; 2]); + #[repr(align(8))] + struct RetArea([::core::mem::MaybeUninit; 16]); let mut ret_area = RetArea( - [::core::mem::MaybeUninit::uninit(); 2], + [::core::mem::MaybeUninit::uninit(); 16], ); let ptr0 = ret_area.0.as_mut_ptr().cast::(); #[cfg(target_arch = "wasm32")] #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] unsafe extern "C" { - #[link_name = "[method]tcp-socket.finish-bind"] + #[link_name = "[method]tcp-socket.keep-alive-interval"] fn wit_import1(_: i32, _: *mut u8); } #[cfg(not(target_arch = "wasm32"))] @@ -13547,32 +16167,34 @@ pub mod wasi { } unsafe { wit_import1((self).handle() as i32, ptr0) }; let l2 = i32::from(*ptr0.add(0).cast::()); - let result4 = match l2 { + let result5 = match l2 { 0 => { - let e = (); + let e = { + let l3 = *ptr0.add(8).cast::(); + l3 as u64 + }; Ok(e) } 1 => { let e = { - let l3 = i32::from(*ptr0.add(1).cast::()); + let l4 = i32::from(*ptr0.add(8).cast::()); super::super::super::wasi::sockets::network::ErrorCode::_lift( - l3 as u8, + l4 as u8, ) }; Err(e) } _ => _rt::invalid_enum_discriminant(), }; - result4 + result5 } } } impl TcpSocket { #[allow(unused_unsafe, clippy::all)] - pub fn start_connect( + pub fn set_keep_alive_interval( &self, - network: &Network, - remote_address: IpSocketAddress, + value: Duration, ) -> Result<(), ErrorCode> { unsafe { #[repr(align(1))] @@ -13580,165 +16202,55 @@ pub mod wasi { let mut ret_area = RetArea( [::core::mem::MaybeUninit::uninit(); 2], ); - use super::super::super::wasi::sockets::network::IpSocketAddress as V4; - let ( - result5_0, - result5_1, - result5_2, - result5_3, - result5_4, - result5_5, - result5_6, - result5_7, - result5_8, - result5_9, - result5_10, - result5_11, - ) = match remote_address { - V4::Ipv4(e) => { - let super::super::super::wasi::sockets::network::Ipv4SocketAddress { - port: port0, - address: address0, - } = e; - let (t1_0, t1_1, t1_2, t1_3) = address0; - ( - 0i32, - _rt::as_i32(port0), - _rt::as_i32(t1_0), - _rt::as_i32(t1_1), - _rt::as_i32(t1_2), - _rt::as_i32(t1_3), - 0i32, - 0i32, - 0i32, - 0i32, - 0i32, - 0i32, - ) - } - V4::Ipv6(e) => { - let super::super::super::wasi::sockets::network::Ipv6SocketAddress { - port: port2, - flow_info: flow_info2, - address: address2, - scope_id: scope_id2, - } = e; - let (t3_0, t3_1, t3_2, t3_3, t3_4, t3_5, t3_6, t3_7) = address2; - ( - 1i32, - _rt::as_i32(port2), - _rt::as_i32(flow_info2), - _rt::as_i32(t3_0), - _rt::as_i32(t3_1), - _rt::as_i32(t3_2), - _rt::as_i32(t3_3), - _rt::as_i32(t3_4), - _rt::as_i32(t3_5), - _rt::as_i32(t3_6), - _rt::as_i32(t3_7), - _rt::as_i32(scope_id2), - ) - } - }; - let ptr6 = ret_area.0.as_mut_ptr().cast::(); + let ptr0 = ret_area.0.as_mut_ptr().cast::(); #[cfg(target_arch = "wasm32")] #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] unsafe extern "C" { - #[link_name = "[method]tcp-socket.start-connect"] - fn wit_import7( - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: *mut u8, - ); + #[link_name = "[method]tcp-socket.set-keep-alive-interval"] + fn wit_import1(_: i32, _: i64, _: *mut u8); } #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import7( - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: i32, - _: *mut u8, - ) { + unsafe extern "C" fn wit_import1(_: i32, _: i64, _: *mut u8) { unreachable!() } unsafe { - wit_import7( - (self).handle() as i32, - (network).handle() as i32, - result5_0, - result5_1, - result5_2, - result5_3, - result5_4, - result5_5, - result5_6, - result5_7, - result5_8, - result5_9, - result5_10, - result5_11, - ptr6, - ) + wit_import1((self).handle() as i32, _rt::as_i64(value), ptr0) }; - let l8 = i32::from(*ptr6.add(0).cast::()); - let result10 = match l8 { + let l2 = i32::from(*ptr0.add(0).cast::()); + let result4 = match l2 { 0 => { let e = (); Ok(e) } 1 => { let e = { - let l9 = i32::from(*ptr6.add(1).cast::()); + let l3 = i32::from(*ptr0.add(1).cast::()); super::super::super::wasi::sockets::network::ErrorCode::_lift( - l9 as u8, + l3 as u8, ) }; Err(e) } _ => _rt::invalid_enum_discriminant(), }; - result10 + result4 } } } impl TcpSocket { #[allow(unused_unsafe, clippy::all)] - pub fn finish_connect( - &self, - ) -> Result<(InputStream, OutputStream), ErrorCode> { + pub fn keep_alive_count(&self) -> Result { unsafe { #[repr(align(4))] - struct RetArea([::core::mem::MaybeUninit; 12]); + struct RetArea([::core::mem::MaybeUninit; 8]); let mut ret_area = RetArea( - [::core::mem::MaybeUninit::uninit(); 12], + [::core::mem::MaybeUninit::uninit(); 8], ); let ptr0 = ret_area.0.as_mut_ptr().cast::(); #[cfg(target_arch = "wasm32")] #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] unsafe extern "C" { - #[link_name = "[method]tcp-socket.finish-connect"] + #[link_name = "[method]tcp-socket.keep-alive-count"] fn wit_import1(_: i32, _: *mut u8); } #[cfg(not(target_arch = "wasm32"))] @@ -13747,44 +16259,32 @@ pub mod wasi { } unsafe { wit_import1((self).handle() as i32, ptr0) }; let l2 = i32::from(*ptr0.add(0).cast::()); - let result6 = match l2 { + let result5 = match l2 { 0 => { let e = { let l3 = *ptr0.add(4).cast::(); - let l4 = *ptr0.add(8).cast::(); - ( - unsafe { - super::super::super::wasi::io::streams::InputStream::from_handle( - l3 as u32, - ) - }, - unsafe { - super::super::super::wasi::io::streams::OutputStream::from_handle( - l4 as u32, - ) - }, - ) + l3 as u32 }; Ok(e) } 1 => { let e = { - let l5 = i32::from(*ptr0.add(4).cast::()); + let l4 = i32::from(*ptr0.add(4).cast::()); super::super::super::wasi::sockets::network::ErrorCode::_lift( - l5 as u8, + l4 as u8, ) }; Err(e) } _ => _rt::invalid_enum_discriminant(), }; - result6 + result5 } } } impl TcpSocket { #[allow(unused_unsafe, clippy::all)] - pub fn start_listen(&self) -> Result<(), ErrorCode> { + pub fn set_keep_alive_count(&self, value: u32) -> Result<(), ErrorCode> { unsafe { #[repr(align(1))] struct RetArea([::core::mem::MaybeUninit; 2]); @@ -13795,14 +16295,20 @@ pub mod wasi { #[cfg(target_arch = "wasm32")] #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] unsafe extern "C" { - #[link_name = "[method]tcp-socket.start-listen"] - fn wit_import1(_: i32, _: *mut u8); + #[link_name = "[method]tcp-socket.set-keep-alive-count"] + fn wit_import1(_: i32, _: i32, _: *mut u8); } #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import1(_: i32, _: *mut u8) { + unsafe extern "C" fn wit_import1(_: i32, _: i32, _: *mut u8) { unreachable!() } - unsafe { wit_import1((self).handle() as i32, ptr0) }; + unsafe { + wit_import1( + (self).handle() as i32, + _rt::as_i32(&value), + ptr0, + ) + }; let l2 = i32::from(*ptr0.add(0).cast::()); let result4 = match l2 { 0 => { @@ -13826,7 +16332,7 @@ pub mod wasi { } impl TcpSocket { #[allow(unused_unsafe, clippy::all)] - pub fn finish_listen(&self) -> Result<(), ErrorCode> { + pub fn hop_limit(&self) -> Result { unsafe { #[repr(align(1))] struct RetArea([::core::mem::MaybeUninit; 2]); @@ -13837,7 +16343,7 @@ pub mod wasi { #[cfg(target_arch = "wasm32")] #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] unsafe extern "C" { - #[link_name = "[method]tcp-socket.finish-listen"] + #[link_name = "[method]tcp-socket.hop-limit"] fn wit_import1(_: i32, _: *mut u8); } #[cfg(not(target_arch = "wasm32"))] @@ -13846,101 +16352,91 @@ pub mod wasi { } unsafe { wit_import1((self).handle() as i32, ptr0) }; let l2 = i32::from(*ptr0.add(0).cast::()); - let result4 = match l2 { + let result5 = match l2 { 0 => { - let e = (); + let e = { + let l3 = i32::from(*ptr0.add(1).cast::()); + l3 as u8 + }; Ok(e) } 1 => { let e = { - let l3 = i32::from(*ptr0.add(1).cast::()); + let l4 = i32::from(*ptr0.add(1).cast::()); super::super::super::wasi::sockets::network::ErrorCode::_lift( - l3 as u8, + l4 as u8, ) }; Err(e) } _ => _rt::invalid_enum_discriminant(), }; - result4 + result5 } } } impl TcpSocket { #[allow(unused_unsafe, clippy::all)] - pub fn accept( - &self, - ) -> Result<(TcpSocket, InputStream, OutputStream), ErrorCode> { + pub fn set_hop_limit(&self, value: u8) -> Result<(), ErrorCode> { unsafe { - #[repr(align(4))] - struct RetArea([::core::mem::MaybeUninit; 16]); + #[repr(align(1))] + struct RetArea([::core::mem::MaybeUninit; 2]); let mut ret_area = RetArea( - [::core::mem::MaybeUninit::uninit(); 16], + [::core::mem::MaybeUninit::uninit(); 2], ); let ptr0 = ret_area.0.as_mut_ptr().cast::(); #[cfg(target_arch = "wasm32")] #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] unsafe extern "C" { - #[link_name = "[method]tcp-socket.accept"] - fn wit_import1(_: i32, _: *mut u8); + #[link_name = "[method]tcp-socket.set-hop-limit"] + fn wit_import1(_: i32, _: i32, _: *mut u8); } #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import1(_: i32, _: *mut u8) { + unsafe extern "C" fn wit_import1(_: i32, _: i32, _: *mut u8) { unreachable!() } - unsafe { wit_import1((self).handle() as i32, ptr0) }; + unsafe { + wit_import1( + (self).handle() as i32, + _rt::as_i32(&value), + ptr0, + ) + }; let l2 = i32::from(*ptr0.add(0).cast::()); - let result7 = match l2 { + let result4 = match l2 { 0 => { - let e = { - let l3 = *ptr0.add(4).cast::(); - let l4 = *ptr0.add(8).cast::(); - let l5 = *ptr0.add(12).cast::(); - ( - unsafe { TcpSocket::from_handle(l3 as u32) }, - unsafe { - super::super::super::wasi::io::streams::InputStream::from_handle( - l4 as u32, - ) - }, - unsafe { - super::super::super::wasi::io::streams::OutputStream::from_handle( - l5 as u32, - ) - }, - ) - }; + let e = (); Ok(e) } 1 => { let e = { - let l6 = i32::from(*ptr0.add(4).cast::()); + let l3 = i32::from(*ptr0.add(1).cast::()); super::super::super::wasi::sockets::network::ErrorCode::_lift( - l6 as u8, + l3 as u8, ) }; Err(e) } _ => _rt::invalid_enum_discriminant(), }; - result7 + result4 } } } impl TcpSocket { #[allow(unused_unsafe, clippy::all)] - pub fn local_address(&self) -> Result { + pub fn receive_buffer_size(&self) -> Result { unsafe { - #[repr(align(4))] - struct RetArea([::core::mem::MaybeUninit; 36]); + #[repr(align(8))] + struct RetArea([::core::mem::MaybeUninit; 16]); let mut ret_area = RetArea( - [::core::mem::MaybeUninit::uninit(); 36], + [::core::mem::MaybeUninit::uninit(); 16], ); let ptr0 = ret_area.0.as_mut_ptr().cast::(); #[cfg(target_arch = "wasm32")] #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] unsafe extern "C" { - #[link_name = "[method]tcp-socket.local-address"] + #[link_name = "[method]tcp-socket.receive-buffer-size"] fn wit_import1(_: i32, _: *mut u8); } #[cfg(not(target_arch = "wasm32"))] @@ -13949,218 +16445,128 @@ pub mod wasi { } unsafe { wit_import1((self).handle() as i32, ptr0) }; let l2 = i32::from(*ptr0.add(0).cast::()); - let result22 = match l2 { + let result5 = match l2 { 0 => { let e = { - let l3 = i32::from(*ptr0.add(4).cast::()); - use super::super::super::wasi::sockets::network::IpSocketAddress as V20; - let v20 = match l3 { - 0 => { - let e20 = { - let l4 = i32::from(*ptr0.add(8).cast::()); - let l5 = i32::from(*ptr0.add(10).cast::()); - let l6 = i32::from(*ptr0.add(11).cast::()); - let l7 = i32::from(*ptr0.add(12).cast::()); - let l8 = i32::from(*ptr0.add(13).cast::()); - super::super::super::wasi::sockets::network::Ipv4SocketAddress { - port: l4 as u16, - address: (l5 as u8, l6 as u8, l7 as u8, l8 as u8), - } - }; - V20::Ipv4(e20) - } - n => { - debug_assert_eq!(n, 1, "invalid enum discriminant"); - let e20 = { - let l9 = i32::from(*ptr0.add(8).cast::()); - let l10 = *ptr0.add(12).cast::(); - let l11 = i32::from(*ptr0.add(16).cast::()); - let l12 = i32::from(*ptr0.add(18).cast::()); - let l13 = i32::from(*ptr0.add(20).cast::()); - let l14 = i32::from(*ptr0.add(22).cast::()); - let l15 = i32::from(*ptr0.add(24).cast::()); - let l16 = i32::from(*ptr0.add(26).cast::()); - let l17 = i32::from(*ptr0.add(28).cast::()); - let l18 = i32::from(*ptr0.add(30).cast::()); - let l19 = *ptr0.add(32).cast::(); - super::super::super::wasi::sockets::network::Ipv6SocketAddress { - port: l9 as u16, - flow_info: l10 as u32, - address: ( - l11 as u16, - l12 as u16, - l13 as u16, - l14 as u16, - l15 as u16, - l16 as u16, - l17 as u16, - l18 as u16, - ), - scope_id: l19 as u32, - } - }; - V20::Ipv6(e20) - } - }; - v20 + let l3 = *ptr0.add(8).cast::(); + l3 as u64 }; Ok(e) } 1 => { let e = { - let l21 = i32::from(*ptr0.add(4).cast::()); + let l4 = i32::from(*ptr0.add(8).cast::()); super::super::super::wasi::sockets::network::ErrorCode::_lift( - l21 as u8, + l4 as u8, ) }; Err(e) } _ => _rt::invalid_enum_discriminant(), }; - result22 + result5 } } } impl TcpSocket { #[allow(unused_unsafe, clippy::all)] - pub fn remote_address(&self) -> Result { + pub fn set_receive_buffer_size( + &self, + value: u64, + ) -> Result<(), ErrorCode> { unsafe { - #[repr(align(4))] - struct RetArea([::core::mem::MaybeUninit; 36]); + #[repr(align(1))] + struct RetArea([::core::mem::MaybeUninit; 2]); let mut ret_area = RetArea( - [::core::mem::MaybeUninit::uninit(); 36], + [::core::mem::MaybeUninit::uninit(); 2], ); let ptr0 = ret_area.0.as_mut_ptr().cast::(); #[cfg(target_arch = "wasm32")] #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] unsafe extern "C" { - #[link_name = "[method]tcp-socket.remote-address"] - fn wit_import1(_: i32, _: *mut u8); + #[link_name = "[method]tcp-socket.set-receive-buffer-size"] + fn wit_import1(_: i32, _: i64, _: *mut u8); } #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import1(_: i32, _: *mut u8) { + unsafe extern "C" fn wit_import1(_: i32, _: i64, _: *mut u8) { unreachable!() } - unsafe { wit_import1((self).handle() as i32, ptr0) }; + unsafe { + wit_import1( + (self).handle() as i32, + _rt::as_i64(&value), + ptr0, + ) + }; let l2 = i32::from(*ptr0.add(0).cast::()); - let result22 = match l2 { + let result4 = match l2 { 0 => { - let e = { - let l3 = i32::from(*ptr0.add(4).cast::()); - use super::super::super::wasi::sockets::network::IpSocketAddress as V20; - let v20 = match l3 { - 0 => { - let e20 = { - let l4 = i32::from(*ptr0.add(8).cast::()); - let l5 = i32::from(*ptr0.add(10).cast::()); - let l6 = i32::from(*ptr0.add(11).cast::()); - let l7 = i32::from(*ptr0.add(12).cast::()); - let l8 = i32::from(*ptr0.add(13).cast::()); - super::super::super::wasi::sockets::network::Ipv4SocketAddress { - port: l4 as u16, - address: (l5 as u8, l6 as u8, l7 as u8, l8 as u8), - } - }; - V20::Ipv4(e20) - } - n => { - debug_assert_eq!(n, 1, "invalid enum discriminant"); - let e20 = { - let l9 = i32::from(*ptr0.add(8).cast::()); - let l10 = *ptr0.add(12).cast::(); - let l11 = i32::from(*ptr0.add(16).cast::()); - let l12 = i32::from(*ptr0.add(18).cast::()); - let l13 = i32::from(*ptr0.add(20).cast::()); - let l14 = i32::from(*ptr0.add(22).cast::()); - let l15 = i32::from(*ptr0.add(24).cast::()); - let l16 = i32::from(*ptr0.add(26).cast::()); - let l17 = i32::from(*ptr0.add(28).cast::()); - let l18 = i32::from(*ptr0.add(30).cast::()); - let l19 = *ptr0.add(32).cast::(); - super::super::super::wasi::sockets::network::Ipv6SocketAddress { - port: l9 as u16, - flow_info: l10 as u32, - address: ( - l11 as u16, - l12 as u16, - l13 as u16, - l14 as u16, - l15 as u16, - l16 as u16, - l17 as u16, - l18 as u16, - ), - scope_id: l19 as u32, - } - }; - V20::Ipv6(e20) - } - }; - v20 - }; + let e = (); Ok(e) } 1 => { let e = { - let l21 = i32::from(*ptr0.add(4).cast::()); + let l3 = i32::from(*ptr0.add(1).cast::()); super::super::super::wasi::sockets::network::ErrorCode::_lift( - l21 as u8, + l3 as u8, ) }; Err(e) } _ => _rt::invalid_enum_discriminant(), }; - result22 - } - } - } - impl TcpSocket { - #[allow(unused_unsafe, clippy::all)] - pub fn is_listening(&self) -> bool { - unsafe { - #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] - unsafe extern "C" { - #[link_name = "[method]tcp-socket.is-listening"] - fn wit_import0(_: i32) -> i32; - } - #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import0(_: i32) -> i32 { - unreachable!() - } - let ret = unsafe { wit_import0((self).handle() as i32) }; - _rt::bool_lift(ret as u8) + result4 } } } impl TcpSocket { #[allow(unused_unsafe, clippy::all)] - pub fn address_family(&self) -> IpAddressFamily { + pub fn send_buffer_size(&self) -> Result { unsafe { + #[repr(align(8))] + struct RetArea([::core::mem::MaybeUninit; 16]); + let mut ret_area = RetArea( + [::core::mem::MaybeUninit::uninit(); 16], + ); + let ptr0 = ret_area.0.as_mut_ptr().cast::(); #[cfg(target_arch = "wasm32")] #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] unsafe extern "C" { - #[link_name = "[method]tcp-socket.address-family"] - fn wit_import0(_: i32) -> i32; + #[link_name = "[method]tcp-socket.send-buffer-size"] + fn wit_import1(_: i32, _: *mut u8); } #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import0(_: i32) -> i32 { + unsafe extern "C" fn wit_import1(_: i32, _: *mut u8) { unreachable!() } - let ret = unsafe { wit_import0((self).handle() as i32) }; - super::super::super::wasi::sockets::network::IpAddressFamily::_lift( - ret as u8, - ) + unsafe { wit_import1((self).handle() as i32, ptr0) }; + let l2 = i32::from(*ptr0.add(0).cast::()); + let result5 = match l2 { + 0 => { + let e = { + let l3 = *ptr0.add(8).cast::(); + l3 as u64 + }; + Ok(e) + } + 1 => { + let e = { + let l4 = i32::from(*ptr0.add(8).cast::()); + super::super::super::wasi::sockets::network::ErrorCode::_lift( + l4 as u8, + ) + }; + Err(e) + } + _ => _rt::invalid_enum_discriminant(), + }; + result5 } } } impl TcpSocket { #[allow(unused_unsafe, clippy::all)] - pub fn set_listen_backlog_size( - &self, - value: u64, - ) -> Result<(), ErrorCode> { + pub fn set_send_buffer_size(&self, value: u64) -> Result<(), ErrorCode> { unsafe { #[repr(align(1))] struct RetArea([::core::mem::MaybeUninit; 2]); @@ -14171,7 +16577,7 @@ pub mod wasi { #[cfg(target_arch = "wasm32")] #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] unsafe extern "C" { - #[link_name = "[method]tcp-socket.set-listen-backlog-size"] + #[link_name = "[method]tcp-socket.set-send-buffer-size"] fn wit_import1(_: i32, _: i64, _: *mut u8); } #[cfg(not(target_arch = "wasm32"))] @@ -14208,54 +16614,32 @@ pub mod wasi { } impl TcpSocket { #[allow(unused_unsafe, clippy::all)] - pub fn keep_alive_enabled(&self) -> Result { + pub fn subscribe(&self) -> Pollable { unsafe { - #[repr(align(1))] - struct RetArea([::core::mem::MaybeUninit; 2]); - let mut ret_area = RetArea( - [::core::mem::MaybeUninit::uninit(); 2], - ); - let ptr0 = ret_area.0.as_mut_ptr().cast::(); #[cfg(target_arch = "wasm32")] #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] unsafe extern "C" { - #[link_name = "[method]tcp-socket.keep-alive-enabled"] - fn wit_import1(_: i32, _: *mut u8); + #[link_name = "[method]tcp-socket.subscribe"] + fn wit_import0(_: i32) -> i32; } #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import1(_: i32, _: *mut u8) { + unsafe extern "C" fn wit_import0(_: i32) -> i32 { unreachable!() } - unsafe { wit_import1((self).handle() as i32, ptr0) }; - let l2 = i32::from(*ptr0.add(0).cast::()); - let result5 = match l2 { - 0 => { - let e = { - let l3 = i32::from(*ptr0.add(1).cast::()); - _rt::bool_lift(l3 as u8) - }; - Ok(e) - } - 1 => { - let e = { - let l4 = i32::from(*ptr0.add(1).cast::()); - super::super::super::wasi::sockets::network::ErrorCode::_lift( - l4 as u8, - ) - }; - Err(e) - } - _ => _rt::invalid_enum_discriminant(), - }; - result5 + let ret = unsafe { wit_import0((self).handle() as i32) }; + unsafe { + super::super::super::wasi::io::poll::Pollable::from_handle( + ret as u32, + ) + } } } } impl TcpSocket { #[allow(unused_unsafe, clippy::all)] - pub fn set_keep_alive_enabled( + pub fn shutdown( &self, - value: bool, + shutdown_type: ShutdownType, ) -> Result<(), ErrorCode> { unsafe { #[repr(align(1))] @@ -14267,7 +16651,7 @@ pub mod wasi { #[cfg(target_arch = "wasm32")] #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] unsafe extern "C" { - #[link_name = "[method]tcp-socket.set-keep-alive-enabled"] + #[link_name = "[method]tcp-socket.shutdown"] fn wit_import1(_: i32, _: i32, _: *mut u8); } #[cfg(not(target_arch = "wasm32"))] @@ -14277,10 +16661,7 @@ pub mod wasi { unsafe { wit_import1( (self).handle() as i32, - match &value { - true => 1, - false => 0, - }, + shutdown_type.clone() as i32, ptr0, ) }; @@ -14305,20 +16686,127 @@ pub mod wasi { } } } - impl TcpSocket { + } + #[allow(dead_code, async_fn_in_trait, unused_imports, clippy::all)] + pub mod tcp_create_socket { + #[used] + #[doc(hidden)] + static __FORCE_SECTION_REF: fn() = super::super::super::__link_custom_section_describing_imports; + use super::super::super::_rt; + pub type ErrorCode = super::super::super::wasi::sockets::network::ErrorCode; + pub type IpAddressFamily = super::super::super::wasi::sockets::network::IpAddressFamily; + pub type TcpSocket = super::super::super::wasi::sockets::tcp::TcpSocket; + #[allow(unused_unsafe, clippy::all)] + pub fn create_tcp_socket( + address_family: IpAddressFamily, + ) -> Result { + unsafe { + #[repr(align(4))] + struct RetArea([::core::mem::MaybeUninit; 8]); + let mut ret_area = RetArea([::core::mem::MaybeUninit::uninit(); 8]); + let ptr0 = ret_area.0.as_mut_ptr().cast::(); + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "wasi:sockets/tcp-create-socket@0.2.0")] + unsafe extern "C" { + #[link_name = "create-tcp-socket"] + fn wit_import1(_: i32, _: *mut u8); + } + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import1(_: i32, _: *mut u8) { + unreachable!() + } + unsafe { wit_import1(address_family.clone() as i32, ptr0) }; + let l2 = i32::from(*ptr0.add(0).cast::()); + let result5 = match l2 { + 0 => { + let e = { + let l3 = *ptr0.add(4).cast::(); + unsafe { + super::super::super::wasi::sockets::tcp::TcpSocket::from_handle( + l3 as u32, + ) + } + }; + Ok(e) + } + 1 => { + let e = { + let l4 = i32::from(*ptr0.add(4).cast::()); + super::super::super::wasi::sockets::network::ErrorCode::_lift( + l4 as u8, + ) + }; + Err(e) + } + _ => _rt::invalid_enum_discriminant(), + }; + result5 + } + } + } + #[allow(dead_code, async_fn_in_trait, unused_imports, clippy::all)] + pub mod ip_name_lookup { + #[used] + #[doc(hidden)] + static __FORCE_SECTION_REF: fn() = super::super::super::__link_custom_section_describing_imports; + use super::super::super::_rt; + pub type Pollable = super::super::super::wasi::io::poll::Pollable; + pub type Network = super::super::super::wasi::sockets::network::Network; + pub type ErrorCode = super::super::super::wasi::sockets::network::ErrorCode; + pub type IpAddress = super::super::super::wasi::sockets::network::IpAddress; + #[derive(Debug)] + #[repr(transparent)] + pub struct ResolveAddressStream { + handle: _rt::Resource, + } + impl ResolveAddressStream { + #[doc(hidden)] + pub unsafe fn from_handle(handle: u32) -> Self { + Self { + handle: unsafe { _rt::Resource::from_handle(handle) }, + } + } + #[doc(hidden)] + pub fn take_handle(&self) -> u32 { + _rt::Resource::take_handle(&self.handle) + } + #[doc(hidden)] + pub fn handle(&self) -> u32 { + _rt::Resource::handle(&self.handle) + } + } + unsafe impl _rt::WasmResource for ResolveAddressStream { + #[inline] + unsafe fn drop(_handle: u32) { + #[cfg(not(target_arch = "wasm32"))] + unreachable!(); + #[cfg(target_arch = "wasm32")] + { + #[link(wasm_import_module = "wasi:sockets/ip-name-lookup@0.2.0")] + unsafe extern "C" { + #[link_name = "[resource-drop]resolve-address-stream"] + fn drop(_: u32); + } + unsafe { drop(_handle) }; + } + } + } + impl ResolveAddressStream { #[allow(unused_unsafe, clippy::all)] - pub fn keep_alive_idle_time(&self) -> Result { + pub fn resolve_next_address( + &self, + ) -> Result, ErrorCode> { unsafe { - #[repr(align(8))] - struct RetArea([::core::mem::MaybeUninit; 16]); + #[repr(align(2))] + struct RetArea([::core::mem::MaybeUninit; 22]); let mut ret_area = RetArea( - [::core::mem::MaybeUninit::uninit(); 16], + [::core::mem::MaybeUninit::uninit(); 22], ); let ptr0 = ret_area.0.as_mut_ptr().cast::(); #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] + #[link(wasm_import_module = "wasi:sockets/ip-name-lookup@0.2.0")] unsafe extern "C" { - #[link_name = "[method]tcp-socket.keep-alive-idle-time"] + #[link_name = "[method]resolve-address-stream.resolve-next-address"] fn wit_import1(_: i32, _: *mut u8); } #[cfg(not(target_arch = "wasm32"))] @@ -14327,1410 +16815,2567 @@ pub mod wasi { } unsafe { wit_import1((self).handle() as i32, ptr0) }; let l2 = i32::from(*ptr0.add(0).cast::()); - let result5 = match l2 { + let result19 = match l2 { 0 => { let e = { - let l3 = *ptr0.add(8).cast::(); - l3 as u64 + let l3 = i32::from(*ptr0.add(2).cast::()); + match l3 { + 0 => None, + 1 => { + let e = { + let l4 = i32::from(*ptr0.add(4).cast::()); + use super::super::super::wasi::sockets::network::IpAddress as V17; + let v17 = match l4 { + 0 => { + let e17 = { + let l5 = i32::from(*ptr0.add(6).cast::()); + let l6 = i32::from(*ptr0.add(7).cast::()); + let l7 = i32::from(*ptr0.add(8).cast::()); + let l8 = i32::from(*ptr0.add(9).cast::()); + (l5 as u8, l6 as u8, l7 as u8, l8 as u8) + }; + V17::Ipv4(e17) + } + n => { + debug_assert_eq!(n, 1, "invalid enum discriminant"); + let e17 = { + let l9 = i32::from(*ptr0.add(6).cast::()); + let l10 = i32::from(*ptr0.add(8).cast::()); + let l11 = i32::from(*ptr0.add(10).cast::()); + let l12 = i32::from(*ptr0.add(12).cast::()); + let l13 = i32::from(*ptr0.add(14).cast::()); + let l14 = i32::from(*ptr0.add(16).cast::()); + let l15 = i32::from(*ptr0.add(18).cast::()); + let l16 = i32::from(*ptr0.add(20).cast::()); + ( + l9 as u16, + l10 as u16, + l11 as u16, + l12 as u16, + l13 as u16, + l14 as u16, + l15 as u16, + l16 as u16, + ) + }; + V17::Ipv6(e17) + } + }; + v17 + }; + Some(e) + } + _ => _rt::invalid_enum_discriminant(), + } }; Ok(e) } 1 => { let e = { - let l4 = i32::from(*ptr0.add(8).cast::()); + let l18 = i32::from(*ptr0.add(2).cast::()); super::super::super::wasi::sockets::network::ErrorCode::_lift( - l4 as u8, + l18 as u8, ) }; Err(e) } _ => _rt::invalid_enum_discriminant(), }; - result5 + result19 } } } - impl TcpSocket { + impl ResolveAddressStream { #[allow(unused_unsafe, clippy::all)] - pub fn set_keep_alive_idle_time( - &self, - value: Duration, - ) -> Result<(), ErrorCode> { + pub fn subscribe(&self) -> Pollable { unsafe { - #[repr(align(1))] - struct RetArea([::core::mem::MaybeUninit; 2]); - let mut ret_area = RetArea( - [::core::mem::MaybeUninit::uninit(); 2], - ); - let ptr0 = ret_area.0.as_mut_ptr().cast::(); #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] + #[link(wasm_import_module = "wasi:sockets/ip-name-lookup@0.2.0")] unsafe extern "C" { - #[link_name = "[method]tcp-socket.set-keep-alive-idle-time"] - fn wit_import1(_: i32, _: i64, _: *mut u8); + #[link_name = "[method]resolve-address-stream.subscribe"] + fn wit_import0(_: i32) -> i32; } #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import1(_: i32, _: i64, _: *mut u8) { + unsafe extern "C" fn wit_import0(_: i32) -> i32 { unreachable!() } + let ret = unsafe { wit_import0((self).handle() as i32) }; unsafe { - wit_import1((self).handle() as i32, _rt::as_i64(value), ptr0) - }; - let l2 = i32::from(*ptr0.add(0).cast::()); - let result4 = match l2 { - 0 => { - let e = (); - Ok(e) - } - 1 => { - let e = { - let l3 = i32::from(*ptr0.add(1).cast::()); - super::super::super::wasi::sockets::network::ErrorCode::_lift( - l3 as u8, - ) - }; - Err(e) - } - _ => _rt::invalid_enum_discriminant(), - }; - result4 + super::super::super::wasi::io::poll::Pollable::from_handle( + ret as u32, + ) + } } } } - impl TcpSocket { - #[allow(unused_unsafe, clippy::all)] - pub fn keep_alive_interval(&self) -> Result { + #[allow(unused_unsafe, clippy::all)] + pub fn resolve_addresses( + network: &Network, + name: &str, + ) -> Result { + unsafe { + #[repr(align(4))] + struct RetArea([::core::mem::MaybeUninit; 8]); + let mut ret_area = RetArea([::core::mem::MaybeUninit::uninit(); 8]); + let vec0 = name; + let ptr0 = vec0.as_ptr().cast::(); + let len0 = vec0.len(); + let ptr1 = ret_area.0.as_mut_ptr().cast::(); + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "wasi:sockets/ip-name-lookup@0.2.0")] + unsafe extern "C" { + #[link_name = "resolve-addresses"] + fn wit_import2(_: i32, _: *mut u8, _: usize, _: *mut u8); + } + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import2( + _: i32, + _: *mut u8, + _: usize, + _: *mut u8, + ) { + unreachable!() + } unsafe { - #[repr(align(8))] - struct RetArea([::core::mem::MaybeUninit; 16]); - let mut ret_area = RetArea( - [::core::mem::MaybeUninit::uninit(); 16], - ); - let ptr0 = ret_area.0.as_mut_ptr().cast::(); - #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] - unsafe extern "C" { - #[link_name = "[method]tcp-socket.keep-alive-interval"] - fn wit_import1(_: i32, _: *mut u8); + wit_import2( + (network).handle() as i32, + ptr0.cast_mut(), + len0, + ptr1, + ) + }; + let l3 = i32::from(*ptr1.add(0).cast::()); + let result6 = match l3 { + 0 => { + let e = { + let l4 = *ptr1.add(4).cast::(); + unsafe { ResolveAddressStream::from_handle(l4 as u32) } + }; + Ok(e) } - #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import1(_: i32, _: *mut u8) { - unreachable!() + 1 => { + let e = { + let l5 = i32::from(*ptr1.add(4).cast::()); + super::super::super::wasi::sockets::network::ErrorCode::_lift( + l5 as u8, + ) + }; + Err(e) } - unsafe { wit_import1((self).handle() as i32, ptr0) }; - let l2 = i32::from(*ptr0.add(0).cast::()); - let result5 = match l2 { - 0 => { - let e = { - let l3 = *ptr0.add(8).cast::(); - l3 as u64 - }; - Ok(e) - } - 1 => { - let e = { - let l4 = i32::from(*ptr0.add(8).cast::()); - super::super::super::wasi::sockets::network::ErrorCode::_lift( - l4 as u8, - ) - }; - Err(e) - } - _ => _rt::invalid_enum_discriminant(), - }; - result5 + _ => _rt::invalid_enum_discriminant(), + }; + result6 + } + } + } + } +} +#[rustfmt::skip] +#[allow(dead_code, clippy::all)] +pub mod wavs { + pub mod operator { + #[allow(dead_code, async_fn_in_trait, unused_imports, clippy::all)] + pub mod input { + #[used] + #[doc(hidden)] + static __FORCE_SECTION_REF: fn() = super::super::super::__link_custom_section_describing_imports; + pub type ServiceId = super::super::super::wavs::types::service::ServiceId; + pub type WorkflowId = super::super::super::wavs::types::service::WorkflowId; + pub type Trigger = super::super::super::wavs::types::service::Trigger; + pub type TriggerData = super::super::super::wavs::types::events::TriggerData; + #[derive(Clone)] + pub struct TriggerConfig { + pub service_id: ServiceId, + pub workflow_id: WorkflowId, + pub trigger: Trigger, + } + impl ::core::fmt::Debug for TriggerConfig { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + f.debug_struct("TriggerConfig") + .field("service-id", &self.service_id) + .field("workflow-id", &self.workflow_id) + .field("trigger", &self.trigger) + .finish() + } + } + #[derive(Clone)] + pub struct TriggerAction { + pub config: TriggerConfig, + pub data: TriggerData, + } + impl ::core::fmt::Debug for TriggerAction { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + f.debug_struct("TriggerAction") + .field("config", &self.config) + .field("data", &self.data) + .finish() + } + } + } + #[allow(dead_code, async_fn_in_trait, unused_imports, clippy::all)] + pub mod output { + #[used] + #[doc(hidden)] + static __FORCE_SECTION_REF: fn() = super::super::super::__link_custom_section_describing_imports; + use super::super::super::_rt; + #[derive(Clone)] + pub struct WasmResponse { + pub payload: _rt::Vec, + pub ordering: Option, + } + impl ::core::fmt::Debug for WasmResponse { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + f.debug_struct("WasmResponse") + .field("payload", &self.payload) + .field("ordering", &self.ordering) + .finish() + } + } + } + } + pub mod types { + #[allow(dead_code, async_fn_in_trait, unused_imports, clippy::all)] + pub mod core { + #[used] + #[doc(hidden)] + static __FORCE_SECTION_REF: fn() = super::super::super::__link_custom_section_describing_imports; + use super::super::super::_rt; + pub type Digest = _rt::String; + #[repr(C)] + #[derive(Clone, Copy)] + pub struct Timestamp { + pub nanos: u64, + } + impl ::core::fmt::Debug for Timestamp { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + f.debug_struct("Timestamp").field("nanos", &self.nanos).finish() + } + } + #[derive(Clone, Copy)] + pub enum LogLevel { + Error, + Warn, + Info, + Debug, + Trace, + } + impl ::core::fmt::Debug for LogLevel { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + match self { + LogLevel::Error => f.debug_tuple("LogLevel::Error").finish(), + LogLevel::Warn => f.debug_tuple("LogLevel::Warn").finish(), + LogLevel::Info => f.debug_tuple("LogLevel::Info").finish(), + LogLevel::Debug => f.debug_tuple("LogLevel::Debug").finish(), + LogLevel::Trace => f.debug_tuple("LogLevel::Trace").finish(), } } } - impl TcpSocket { - #[allow(unused_unsafe, clippy::all)] - pub fn set_keep_alive_interval( + } + #[allow(dead_code, async_fn_in_trait, unused_imports, clippy::all)] + pub mod chain { + #[used] + #[doc(hidden)] + static __FORCE_SECTION_REF: fn() = super::super::super::__link_custom_section_describing_imports; + use super::super::super::_rt; + pub type ChainKey = _rt::String; + pub type EvmTxHash = _rt::Vec; + #[derive(Clone)] + pub struct CosmosAddress { + pub bech32_addr: _rt::String, + pub prefix_len: u32, + } + impl ::core::fmt::Debug for CosmosAddress { + fn fmt( &self, - value: Duration, - ) -> Result<(), ErrorCode> { - unsafe { - #[repr(align(1))] - struct RetArea([::core::mem::MaybeUninit; 2]); - let mut ret_area = RetArea( - [::core::mem::MaybeUninit::uninit(); 2], - ); - let ptr0 = ret_area.0.as_mut_ptr().cast::(); - #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] - unsafe extern "C" { - #[link_name = "[method]tcp-socket.set-keep-alive-interval"] - fn wit_import1(_: i32, _: i64, _: *mut u8); - } - #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import1(_: i32, _: i64, _: *mut u8) { - unreachable!() - } - unsafe { - wit_import1((self).handle() as i32, _rt::as_i64(value), ptr0) - }; - let l2 = i32::from(*ptr0.add(0).cast::()); - let result4 = match l2 { - 0 => { - let e = (); - Ok(e) - } - 1 => { - let e = { - let l3 = i32::from(*ptr0.add(1).cast::()); - super::super::super::wasi::sockets::network::ErrorCode::_lift( - l3 as u8, - ) - }; - Err(e) - } - _ => _rt::invalid_enum_discriminant(), - }; - result4 - } + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + f.debug_struct("CosmosAddress") + .field("bech32-addr", &self.bech32_addr) + .field("prefix-len", &self.prefix_len) + .finish() } } - impl TcpSocket { - #[allow(unused_unsafe, clippy::all)] - pub fn keep_alive_count(&self) -> Result { - unsafe { - #[repr(align(4))] - struct RetArea([::core::mem::MaybeUninit; 8]); - let mut ret_area = RetArea( - [::core::mem::MaybeUninit::uninit(); 8], - ); - let ptr0 = ret_area.0.as_mut_ptr().cast::(); - #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] - unsafe extern "C" { - #[link_name = "[method]tcp-socket.keep-alive-count"] - fn wit_import1(_: i32, _: *mut u8); - } - #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import1(_: i32, _: *mut u8) { - unreachable!() - } - unsafe { wit_import1((self).handle() as i32, ptr0) }; - let l2 = i32::from(*ptr0.add(0).cast::()); - let result5 = match l2 { - 0 => { - let e = { - let l3 = *ptr0.add(4).cast::(); - l3 as u32 - }; - Ok(e) - } - 1 => { - let e = { - let l4 = i32::from(*ptr0.add(4).cast::()); - super::super::super::wasi::sockets::network::ErrorCode::_lift( - l4 as u8, - ) - }; - Err(e) - } - _ => _rt::invalid_enum_discriminant(), - }; - result5 - } + #[derive(Clone)] + pub struct CosmosEvent { + pub ty: _rt::String, + pub attributes: _rt::Vec<(_rt::String, _rt::String)>, + } + impl ::core::fmt::Debug for CosmosEvent { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + f.debug_struct("CosmosEvent") + .field("ty", &self.ty) + .field("attributes", &self.attributes) + .finish() } } - impl TcpSocket { - #[allow(unused_unsafe, clippy::all)] - pub fn set_keep_alive_count(&self, value: u32) -> Result<(), ErrorCode> { - unsafe { - #[repr(align(1))] - struct RetArea([::core::mem::MaybeUninit; 2]); - let mut ret_area = RetArea( - [::core::mem::MaybeUninit::uninit(); 2], - ); - let ptr0 = ret_area.0.as_mut_ptr().cast::(); - #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] - unsafe extern "C" { - #[link_name = "[method]tcp-socket.set-keep-alive-count"] - fn wit_import1(_: i32, _: i32, _: *mut u8); - } - #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import1(_: i32, _: i32, _: *mut u8) { - unreachable!() - } - unsafe { - wit_import1( - (self).handle() as i32, - _rt::as_i32(&value), - ptr0, - ) - }; - let l2 = i32::from(*ptr0.add(0).cast::()); - let result4 = match l2 { - 0 => { - let e = (); - Ok(e) - } - 1 => { - let e = { - let l3 = i32::from(*ptr0.add(1).cast::()); - super::super::super::wasi::sockets::network::ErrorCode::_lift( - l3 as u8, - ) - }; - Err(e) - } - _ => _rt::invalid_enum_discriminant(), - }; - result4 - } + #[derive(Clone)] + pub struct CosmosChainConfig { + pub chain_id: _rt::String, + pub rpc_endpoint: Option<_rt::String>, + pub grpc_endpoint: Option<_rt::String>, + pub grpc_web_endpoint: Option<_rt::String>, + pub gas_price: f32, + pub gas_denom: _rt::String, + pub bech32_prefix: _rt::String, + } + impl ::core::fmt::Debug for CosmosChainConfig { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + f.debug_struct("CosmosChainConfig") + .field("chain-id", &self.chain_id) + .field("rpc-endpoint", &self.rpc_endpoint) + .field("grpc-endpoint", &self.grpc_endpoint) + .field("grpc-web-endpoint", &self.grpc_web_endpoint) + .field("gas-price", &self.gas_price) + .field("gas-denom", &self.gas_denom) + .field("bech32-prefix", &self.bech32_prefix) + .finish() } } - impl TcpSocket { - #[allow(unused_unsafe, clippy::all)] - pub fn hop_limit(&self) -> Result { - unsafe { - #[repr(align(1))] - struct RetArea([::core::mem::MaybeUninit; 2]); - let mut ret_area = RetArea( - [::core::mem::MaybeUninit::uninit(); 2], - ); - let ptr0 = ret_area.0.as_mut_ptr().cast::(); - #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] - unsafe extern "C" { - #[link_name = "[method]tcp-socket.hop-limit"] - fn wit_import1(_: i32, _: *mut u8); - } - #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import1(_: i32, _: *mut u8) { - unreachable!() - } - unsafe { wit_import1((self).handle() as i32, ptr0) }; - let l2 = i32::from(*ptr0.add(0).cast::()); - let result5 = match l2 { - 0 => { - let e = { - let l3 = i32::from(*ptr0.add(1).cast::()); - l3 as u8 - }; - Ok(e) - } - 1 => { - let e = { - let l4 = i32::from(*ptr0.add(1).cast::()); - super::super::super::wasi::sockets::network::ErrorCode::_lift( - l4 as u8, - ) - }; - Err(e) - } - _ => _rt::invalid_enum_discriminant(), - }; - result5 - } + #[derive(Clone)] + pub struct EvmAddress { + pub raw_bytes: _rt::Vec, + } + impl ::core::fmt::Debug for EvmAddress { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + f.debug_struct("EvmAddress") + .field("raw-bytes", &self.raw_bytes) + .finish() + } + } + #[derive(Clone)] + pub struct EvmEventLogData { + pub topics: _rt::Vec<_rt::Vec>, + pub data: _rt::Vec, + } + impl ::core::fmt::Debug for EvmEventLogData { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + f.debug_struct("EvmEventLogData") + .field("topics", &self.topics) + .field("data", &self.data) + .finish() } } - impl TcpSocket { - #[allow(unused_unsafe, clippy::all)] - pub fn set_hop_limit(&self, value: u8) -> Result<(), ErrorCode> { - unsafe { - #[repr(align(1))] - struct RetArea([::core::mem::MaybeUninit; 2]); - let mut ret_area = RetArea( - [::core::mem::MaybeUninit::uninit(); 2], - ); - let ptr0 = ret_area.0.as_mut_ptr().cast::(); - #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] - unsafe extern "C" { - #[link_name = "[method]tcp-socket.set-hop-limit"] - fn wit_import1(_: i32, _: i32, _: *mut u8); - } - #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import1(_: i32, _: i32, _: *mut u8) { - unreachable!() - } - unsafe { - wit_import1( - (self).handle() as i32, - _rt::as_i32(&value), - ptr0, - ) - }; - let l2 = i32::from(*ptr0.add(0).cast::()); - let result4 = match l2 { - 0 => { - let e = (); - Ok(e) - } - 1 => { - let e = { - let l3 = i32::from(*ptr0.add(1).cast::()); - super::super::super::wasi::sockets::network::ErrorCode::_lift( - l3 as u8, - ) - }; - Err(e) - } - _ => _rt::invalid_enum_discriminant(), - }; - result4 - } + #[derive(Clone)] + pub struct EvmEventLog { + pub address: EvmAddress, + pub data: EvmEventLogData, + pub tx_hash: EvmTxHash, + pub block_number: u64, + pub log_index: u64, + pub block_hash: _rt::Vec, + pub block_timestamp: Option, + pub tx_index: u64, + } + impl ::core::fmt::Debug for EvmEventLog { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + f.debug_struct("EvmEventLog") + .field("address", &self.address) + .field("data", &self.data) + .field("tx-hash", &self.tx_hash) + .field("block-number", &self.block_number) + .field("log-index", &self.log_index) + .field("block-hash", &self.block_hash) + .field("block-timestamp", &self.block_timestamp) + .field("tx-index", &self.tx_index) + .finish() } } - impl TcpSocket { - #[allow(unused_unsafe, clippy::all)] - pub fn receive_buffer_size(&self) -> Result { - unsafe { - #[repr(align(8))] - struct RetArea([::core::mem::MaybeUninit; 16]); - let mut ret_area = RetArea( - [::core::mem::MaybeUninit::uninit(); 16], - ); - let ptr0 = ret_area.0.as_mut_ptr().cast::(); - #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] - unsafe extern "C" { - #[link_name = "[method]tcp-socket.receive-buffer-size"] - fn wit_import1(_: i32, _: *mut u8); - } - #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import1(_: i32, _: *mut u8) { - unreachable!() - } - unsafe { wit_import1((self).handle() as i32, ptr0) }; - let l2 = i32::from(*ptr0.add(0).cast::()); - let result5 = match l2 { - 0 => { - let e = { - let l3 = *ptr0.add(8).cast::(); - l3 as u64 - }; - Ok(e) - } - 1 => { - let e = { - let l4 = i32::from(*ptr0.add(8).cast::()); - super::super::super::wasi::sockets::network::ErrorCode::_lift( - l4 as u8, - ) - }; - Err(e) - } - _ => _rt::invalid_enum_discriminant(), - }; - result5 - } + #[derive(Clone)] + pub struct EvmChainConfig { + pub chain_id: _rt::String, + pub ws_endpoint: Option<_rt::String>, + pub http_endpoint: Option<_rt::String>, + } + impl ::core::fmt::Debug for EvmChainConfig { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + f.debug_struct("EvmChainConfig") + .field("chain-id", &self.chain_id) + .field("ws-endpoint", &self.ws_endpoint) + .field("http-endpoint", &self.http_endpoint) + .finish() } } - impl TcpSocket { - #[allow(unused_unsafe, clippy::all)] - pub fn set_receive_buffer_size( + } + #[allow(dead_code, async_fn_in_trait, unused_imports, clippy::all)] + pub mod service { + #[used] + #[doc(hidden)] + static __FORCE_SECTION_REF: fn() = super::super::super::__link_custom_section_describing_imports; + use super::super::super::_rt; + pub type Digest = super::super::super::wavs::types::core::Digest; + pub type Timestamp = super::super::super::wavs::types::core::Timestamp; + pub type ChainKey = super::super::super::wavs::types::chain::ChainKey; + pub type EvmAddress = super::super::super::wavs::types::chain::EvmAddress; + pub type CosmosAddress = super::super::super::wavs::types::chain::CosmosAddress; + pub type ServiceId = _rt::String; + pub type WorkflowId = _rt::String; + pub type PackageRef = _rt::String; + pub type SemverVersion = _rt::String; + #[derive(Clone, Copy)] + pub enum ServiceStatus { + Active, + Paused, + } + impl ::core::fmt::Debug for ServiceStatus { + fn fmt( &self, - value: u64, - ) -> Result<(), ErrorCode> { - unsafe { - #[repr(align(1))] - struct RetArea([::core::mem::MaybeUninit; 2]); - let mut ret_area = RetArea( - [::core::mem::MaybeUninit::uninit(); 2], - ); - let ptr0 = ret_area.0.as_mut_ptr().cast::(); - #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] - unsafe extern "C" { - #[link_name = "[method]tcp-socket.set-receive-buffer-size"] - fn wit_import1(_: i32, _: i64, _: *mut u8); + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + match self { + ServiceStatus::Active => { + f.debug_tuple("ServiceStatus::Active").finish() } - #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import1(_: i32, _: i64, _: *mut u8) { - unreachable!() + ServiceStatus::Paused => { + f.debug_tuple("ServiceStatus::Paused").finish() } - unsafe { - wit_import1( - (self).handle() as i32, - _rt::as_i64(&value), - ptr0, - ) - }; - let l2 = i32::from(*ptr0.add(0).cast::()); - let result4 = match l2 { - 0 => { - let e = (); - Ok(e) - } - 1 => { - let e = { - let l3 = i32::from(*ptr0.add(1).cast::()); - super::super::super::wasi::sockets::network::ErrorCode::_lift( - l3 as u8, - ) - }; - Err(e) - } - _ => _rt::invalid_enum_discriminant(), - }; - result4 } } } - impl TcpSocket { - #[allow(unused_unsafe, clippy::all)] - pub fn send_buffer_size(&self) -> Result { - unsafe { - #[repr(align(8))] - struct RetArea([::core::mem::MaybeUninit; 16]); - let mut ret_area = RetArea( - [::core::mem::MaybeUninit::uninit(); 16], - ); - let ptr0 = ret_area.0.as_mut_ptr().cast::(); - #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] - unsafe extern "C" { - #[link_name = "[method]tcp-socket.send-buffer-size"] - fn wit_import1(_: i32, _: *mut u8); - } - #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import1(_: i32, _: *mut u8) { - unreachable!() - } - unsafe { wit_import1((self).handle() as i32, ptr0) }; - let l2 = i32::from(*ptr0.add(0).cast::()); - let result5 = match l2 { - 0 => { - let e = { - let l3 = *ptr0.add(8).cast::(); - l3 as u64 - }; - Ok(e) - } - 1 => { - let e = { - let l4 = i32::from(*ptr0.add(8).cast::()); - super::super::super::wasi::sockets::network::ErrorCode::_lift( - l4 as u8, - ) - }; - Err(e) - } - _ => _rt::invalid_enum_discriminant(), - }; - result5 - } + #[derive(Clone)] + pub struct EvmManager { + pub chain: ChainKey, + pub address: EvmAddress, + } + impl ::core::fmt::Debug for EvmManager { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + f.debug_struct("EvmManager") + .field("chain", &self.chain) + .field("address", &self.address) + .finish() } } - impl TcpSocket { - #[allow(unused_unsafe, clippy::all)] - pub fn set_send_buffer_size(&self, value: u64) -> Result<(), ErrorCode> { - unsafe { - #[repr(align(1))] - struct RetArea([::core::mem::MaybeUninit; 2]); - let mut ret_area = RetArea( - [::core::mem::MaybeUninit::uninit(); 2], - ); - let ptr0 = ret_area.0.as_mut_ptr().cast::(); - #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] - unsafe extern "C" { - #[link_name = "[method]tcp-socket.set-send-buffer-size"] - fn wit_import1(_: i32, _: i64, _: *mut u8); - } - #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import1(_: i32, _: i64, _: *mut u8) { - unreachable!() + #[derive(Clone)] + pub enum ServiceManager { + Evm(EvmManager), + } + impl ::core::fmt::Debug for ServiceManager { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + match self { + ServiceManager::Evm(e) => { + f.debug_tuple("ServiceManager::Evm").field(e).finish() } - unsafe { - wit_import1( - (self).handle() as i32, - _rt::as_i64(&value), - ptr0, - ) - }; - let l2 = i32::from(*ptr0.add(0).cast::()); - let result4 = match l2 { - 0 => { - let e = (); - Ok(e) - } - 1 => { - let e = { - let l3 = i32::from(*ptr0.add(1).cast::()); - super::super::super::wasi::sockets::network::ErrorCode::_lift( - l3 as u8, - ) - }; - Err(e) - } - _ => _rt::invalid_enum_discriminant(), - }; - result4 } } } - impl TcpSocket { - #[allow(unused_unsafe, clippy::all)] - pub fn subscribe(&self) -> Pollable { - unsafe { - #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] - unsafe extern "C" { - #[link_name = "[method]tcp-socket.subscribe"] - fn wit_import0(_: i32) -> i32; + #[derive(Clone)] + pub struct ComponentSourceDownload { + pub url: _rt::String, + pub digest: Digest, + } + impl ::core::fmt::Debug for ComponentSourceDownload { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + f.debug_struct("ComponentSourceDownload") + .field("url", &self.url) + .field("digest", &self.digest) + .finish() + } + } + #[derive(Clone)] + pub struct Registry { + pub digest: Digest, + pub domain: Option<_rt::String>, + pub version: Option, + pub pkg: PackageRef, + } + impl ::core::fmt::Debug for Registry { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + f.debug_struct("Registry") + .field("digest", &self.digest) + .field("domain", &self.domain) + .field("version", &self.version) + .field("pkg", &self.pkg) + .finish() + } + } + #[derive(Clone)] + pub enum ComponentSource { + Download(ComponentSourceDownload), + Registry(Registry), + Digest(Digest), + } + impl ::core::fmt::Debug for ComponentSource { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + match self { + ComponentSource::Download(e) => { + f.debug_tuple("ComponentSource::Download").field(e).finish() } - #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import0(_: i32) -> i32 { - unreachable!() + ComponentSource::Registry(e) => { + f.debug_tuple("ComponentSource::Registry").field(e).finish() } - let ret = unsafe { wit_import0((self).handle() as i32) }; - unsafe { - super::super::super::wasi::io::poll::Pollable::from_handle( - ret as u32, - ) + ComponentSource::Digest(e) => { + f.debug_tuple("ComponentSource::Digest").field(e).finish() } } } } - impl TcpSocket { - #[allow(unused_unsafe, clippy::all)] - pub fn shutdown( + #[derive(Clone)] + pub enum AllowedHostPermission { + All, + Only(_rt::Vec<_rt::String>), + None, + } + impl ::core::fmt::Debug for AllowedHostPermission { + fn fmt( &self, - shutdown_type: ShutdownType, - ) -> Result<(), ErrorCode> { - unsafe { - #[repr(align(1))] - struct RetArea([::core::mem::MaybeUninit; 2]); - let mut ret_area = RetArea( - [::core::mem::MaybeUninit::uninit(); 2], - ); - let ptr0 = ret_area.0.as_mut_ptr().cast::(); - #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:sockets/tcp@0.2.0")] - unsafe extern "C" { - #[link_name = "[method]tcp-socket.shutdown"] - fn wit_import1(_: i32, _: i32, _: *mut u8); + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + match self { + AllowedHostPermission::All => { + f.debug_tuple("AllowedHostPermission::All").finish() } - #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import1(_: i32, _: i32, _: *mut u8) { - unreachable!() + AllowedHostPermission::Only(e) => { + f.debug_tuple("AllowedHostPermission::Only") + .field(e) + .finish() + } + AllowedHostPermission::None => { + f.debug_tuple("AllowedHostPermission::None").finish() } - unsafe { - wit_import1( - (self).handle() as i32, - shutdown_type.clone() as i32, - ptr0, - ) - }; - let l2 = i32::from(*ptr0.add(0).cast::()); - let result4 = match l2 { - 0 => { - let e = (); - Ok(e) - } - 1 => { - let e = { - let l3 = i32::from(*ptr0.add(1).cast::()); - super::super::super::wasi::sockets::network::ErrorCode::_lift( - l3 as u8, - ) - }; - Err(e) - } - _ => _rt::invalid_enum_discriminant(), - }; - result4 } } } - } - #[allow(dead_code, async_fn_in_trait, unused_imports, clippy::all)] - pub mod tcp_create_socket { - #[used] - #[doc(hidden)] - static __FORCE_SECTION_REF: fn() = super::super::super::__link_custom_section_describing_imports; - use super::super::super::_rt; - pub type ErrorCode = super::super::super::wasi::sockets::network::ErrorCode; - pub type IpAddressFamily = super::super::super::wasi::sockets::network::IpAddressFamily; - pub type TcpSocket = super::super::super::wasi::sockets::tcp::TcpSocket; - #[allow(unused_unsafe, clippy::all)] - pub fn create_tcp_socket( - address_family: IpAddressFamily, - ) -> Result { - unsafe { - #[repr(align(4))] - struct RetArea([::core::mem::MaybeUninit; 8]); - let mut ret_area = RetArea([::core::mem::MaybeUninit::uninit(); 8]); - let ptr0 = ret_area.0.as_mut_ptr().cast::(); - #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:sockets/tcp-create-socket@0.2.0")] - unsafe extern "C" { - #[link_name = "create-tcp-socket"] - fn wit_import1(_: i32, _: *mut u8); - } - #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import1(_: i32, _: *mut u8) { - unreachable!() - } - unsafe { wit_import1(address_family.clone() as i32, ptr0) }; - let l2 = i32::from(*ptr0.add(0).cast::()); - let result5 = match l2 { - 0 => { - let e = { - let l3 = *ptr0.add(4).cast::(); - unsafe { - super::super::super::wasi::sockets::tcp::TcpSocket::from_handle( - l3 as u32, - ) - } - }; - Ok(e) - } - 1 => { - let e = { - let l4 = i32::from(*ptr0.add(4).cast::()); - super::super::super::wasi::sockets::network::ErrorCode::_lift( - l4 as u8, - ) - }; - Err(e) - } - _ => _rt::invalid_enum_discriminant(), - }; - result5 + #[derive(Clone)] + pub struct Permissions { + pub allowed_http_hosts: AllowedHostPermission, + pub file_system: bool, + } + impl ::core::fmt::Debug for Permissions { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + f.debug_struct("Permissions") + .field("allowed-http-hosts", &self.allowed_http_hosts) + .field("file-system", &self.file_system) + .finish() } } - } - #[allow(dead_code, async_fn_in_trait, unused_imports, clippy::all)] - pub mod ip_name_lookup { - #[used] - #[doc(hidden)] - static __FORCE_SECTION_REF: fn() = super::super::super::__link_custom_section_describing_imports; - use super::super::super::_rt; - pub type Pollable = super::super::super::wasi::io::poll::Pollable; - pub type Network = super::super::super::wasi::sockets::network::Network; - pub type ErrorCode = super::super::super::wasi::sockets::network::ErrorCode; - pub type IpAddress = super::super::super::wasi::sockets::network::IpAddress; - #[derive(Debug)] - #[repr(transparent)] - pub struct ResolveAddressStream { - handle: _rt::Resource, + #[derive(Clone)] + pub struct Component { + pub source: ComponentSource, + pub permissions: Permissions, + pub fuel_limit: Option, + pub time_limit_seconds: Option, + pub config: _rt::Vec<(_rt::String, _rt::String)>, + pub env_keys: _rt::Vec<_rt::String>, + } + impl ::core::fmt::Debug for Component { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + f.debug_struct("Component") + .field("source", &self.source) + .field("permissions", &self.permissions) + .field("fuel-limit", &self.fuel_limit) + .field("time-limit-seconds", &self.time_limit_seconds) + .field("config", &self.config) + .field("env-keys", &self.env_keys) + .finish() + } } - impl ResolveAddressStream { - #[doc(hidden)] - pub unsafe fn from_handle(handle: u32) -> Self { - Self { - handle: unsafe { _rt::Resource::from_handle(handle) }, - } + #[derive(Clone)] + pub struct TriggerEvmContractEvent { + pub address: EvmAddress, + pub chain: ChainKey, + pub event_hash: _rt::Vec, + } + impl ::core::fmt::Debug for TriggerEvmContractEvent { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + f.debug_struct("TriggerEvmContractEvent") + .field("address", &self.address) + .field("chain", &self.chain) + .field("event-hash", &self.event_hash) + .finish() } - #[doc(hidden)] - pub fn take_handle(&self) -> u32 { - _rt::Resource::take_handle(&self.handle) + } + #[derive(Clone)] + pub struct TriggerCosmosContractEvent { + pub address: CosmosAddress, + pub chain: ChainKey, + pub event_type: _rt::String, + } + impl ::core::fmt::Debug for TriggerCosmosContractEvent { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + f.debug_struct("TriggerCosmosContractEvent") + .field("address", &self.address) + .field("chain", &self.chain) + .field("event-type", &self.event_type) + .finish() } - #[doc(hidden)] - pub fn handle(&self) -> u32 { - _rt::Resource::handle(&self.handle) + } + #[derive(Clone)] + pub struct TriggerBlockInterval { + pub chain: ChainKey, + pub n_blocks: u32, + pub start_block: Option, + pub end_block: Option, + } + impl ::core::fmt::Debug for TriggerBlockInterval { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + f.debug_struct("TriggerBlockInterval") + .field("chain", &self.chain) + .field("n-blocks", &self.n_blocks) + .field("start-block", &self.start_block) + .field("end-block", &self.end_block) + .finish() } } - unsafe impl _rt::WasmResource for ResolveAddressStream { - #[inline] - unsafe fn drop(_handle: u32) { - #[cfg(not(target_arch = "wasm32"))] - unreachable!(); - #[cfg(target_arch = "wasm32")] - { - #[link(wasm_import_module = "wasi:sockets/ip-name-lookup@0.2.0")] - unsafe extern "C" { - #[link_name = "[resource-drop]resolve-address-stream"] - fn drop(_: u32); - } - unsafe { drop(_handle) }; - } + #[derive(Clone)] + pub struct TriggerCron { + pub schedule: _rt::String, + pub start_time: Option, + pub end_time: Option, + } + impl ::core::fmt::Debug for TriggerCron { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + f.debug_struct("TriggerCron") + .field("schedule", &self.schedule) + .field("start-time", &self.start_time) + .field("end-time", &self.end_time) + .finish() } } - impl ResolveAddressStream { - #[allow(unused_unsafe, clippy::all)] - pub fn resolve_next_address( + #[derive(Clone)] + pub enum Trigger { + EvmContractEvent(TriggerEvmContractEvent), + CosmosContractEvent(TriggerCosmosContractEvent), + BlockInterval(TriggerBlockInterval), + Cron(TriggerCron), + Manual, + } + impl ::core::fmt::Debug for Trigger { + fn fmt( &self, - ) -> Result, ErrorCode> { - unsafe { - #[repr(align(2))] - struct RetArea([::core::mem::MaybeUninit; 22]); - let mut ret_area = RetArea( - [::core::mem::MaybeUninit::uninit(); 22], - ); - let ptr0 = ret_area.0.as_mut_ptr().cast::(); - #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:sockets/ip-name-lookup@0.2.0")] - unsafe extern "C" { - #[link_name = "[method]resolve-address-stream.resolve-next-address"] - fn wit_import1(_: i32, _: *mut u8); + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + match self { + Trigger::EvmContractEvent(e) => { + f.debug_tuple("Trigger::EvmContractEvent").field(e).finish() } - #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import1(_: i32, _: *mut u8) { - unreachable!() + Trigger::CosmosContractEvent(e) => { + f.debug_tuple("Trigger::CosmosContractEvent") + .field(e) + .finish() } - unsafe { wit_import1((self).handle() as i32, ptr0) }; - let l2 = i32::from(*ptr0.add(0).cast::()); - let result19 = match l2 { - 0 => { - let e = { - let l3 = i32::from(*ptr0.add(2).cast::()); - match l3 { - 0 => None, - 1 => { - let e = { - let l4 = i32::from(*ptr0.add(4).cast::()); - use super::super::super::wasi::sockets::network::IpAddress as V17; - let v17 = match l4 { - 0 => { - let e17 = { - let l5 = i32::from(*ptr0.add(6).cast::()); - let l6 = i32::from(*ptr0.add(7).cast::()); - let l7 = i32::from(*ptr0.add(8).cast::()); - let l8 = i32::from(*ptr0.add(9).cast::()); - (l5 as u8, l6 as u8, l7 as u8, l8 as u8) - }; - V17::Ipv4(e17) - } - n => { - debug_assert_eq!(n, 1, "invalid enum discriminant"); - let e17 = { - let l9 = i32::from(*ptr0.add(6).cast::()); - let l10 = i32::from(*ptr0.add(8).cast::()); - let l11 = i32::from(*ptr0.add(10).cast::()); - let l12 = i32::from(*ptr0.add(12).cast::()); - let l13 = i32::from(*ptr0.add(14).cast::()); - let l14 = i32::from(*ptr0.add(16).cast::()); - let l15 = i32::from(*ptr0.add(18).cast::()); - let l16 = i32::from(*ptr0.add(20).cast::()); - ( - l9 as u16, - l10 as u16, - l11 as u16, - l12 as u16, - l13 as u16, - l14 as u16, - l15 as u16, - l16 as u16, - ) - }; - V17::Ipv6(e17) - } - }; - v17 - }; - Some(e) - } - _ => _rt::invalid_enum_discriminant(), - } - }; - Ok(e) - } - 1 => { - let e = { - let l18 = i32::from(*ptr0.add(2).cast::()); - super::super::super::wasi::sockets::network::ErrorCode::_lift( - l18 as u8, - ) - }; - Err(e) - } - _ => _rt::invalid_enum_discriminant(), - }; - result19 + Trigger::BlockInterval(e) => { + f.debug_tuple("Trigger::BlockInterval").field(e).finish() + } + Trigger::Cron(e) => { + f.debug_tuple("Trigger::Cron").field(e).finish() + } + Trigger::Manual => f.debug_tuple("Trigger::Manual").finish(), } } } - impl ResolveAddressStream { - #[allow(unused_unsafe, clippy::all)] - pub fn subscribe(&self) -> Pollable { - unsafe { - #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:sockets/ip-name-lookup@0.2.0")] - unsafe extern "C" { - #[link_name = "[method]resolve-address-stream.subscribe"] - fn wit_import0(_: i32) -> i32; - } - #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import0(_: i32) -> i32 { - unreachable!() - } - let ret = unsafe { wit_import0((self).handle() as i32) }; - unsafe { - super::super::super::wasi::io::poll::Pollable::from_handle( - ret as u32, - ) + #[derive(Clone, Copy)] + pub enum SignatureAlgorithm { + Secp256k1, + } + impl ::core::fmt::Debug for SignatureAlgorithm { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + match self { + SignatureAlgorithm::Secp256k1 => { + f.debug_tuple("SignatureAlgorithm::Secp256k1").finish() } } } } - #[allow(unused_unsafe, clippy::all)] - pub fn resolve_addresses( - network: &Network, - name: &str, - ) -> Result { - unsafe { - #[repr(align(4))] - struct RetArea([::core::mem::MaybeUninit; 8]); - let mut ret_area = RetArea([::core::mem::MaybeUninit::uninit(); 8]); - let vec0 = name; - let ptr0 = vec0.as_ptr().cast::(); - let len0 = vec0.len(); - let ptr1 = ret_area.0.as_mut_ptr().cast::(); - #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "wasi:sockets/ip-name-lookup@0.2.0")] - unsafe extern "C" { - #[link_name = "resolve-addresses"] - fn wit_import2(_: i32, _: *mut u8, _: usize, _: *mut u8); - } - #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import2( - _: i32, - _: *mut u8, - _: usize, - _: *mut u8, - ) { - unreachable!() - } - unsafe { - wit_import2( - (network).handle() as i32, - ptr0.cast_mut(), - len0, - ptr1, - ) - }; - let l3 = i32::from(*ptr1.add(0).cast::()); - let result6 = match l3 { - 0 => { - let e = { - let l4 = *ptr1.add(4).cast::(); - unsafe { ResolveAddressStream::from_handle(l4 as u32) } - }; - Ok(e) - } - 1 => { - let e = { - let l5 = i32::from(*ptr1.add(4).cast::()); - super::super::super::wasi::sockets::network::ErrorCode::_lift( - l5 as u8, - ) - }; - Err(e) + #[derive(Clone, Copy)] + pub enum SignaturePrefix { + Eip191, + } + impl ::core::fmt::Debug for SignaturePrefix { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + match self { + SignaturePrefix::Eip191 => { + f.debug_tuple("SignaturePrefix::Eip191").finish() } - _ => _rt::invalid_enum_discriminant(), - }; - result6 + } } } - } - } -} -#[rustfmt::skip] -#[allow(dead_code, clippy::all)] -pub mod wavs { - pub mod worker { - #[allow(dead_code, async_fn_in_trait, unused_imports, clippy::all)] - pub mod layer_types { - #[used] - #[doc(hidden)] - static __FORCE_SECTION_REF: fn() = super::super::super::__link_custom_section_describing_imports; - use super::super::super::_rt; #[repr(C)] #[derive(Clone, Copy)] - pub struct Timestamp { - pub nanos: u64, + pub struct SignatureKind { + pub algorithm: SignatureAlgorithm, + pub prefix: Option, + } + impl ::core::fmt::Debug for SignatureKind { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + f.debug_struct("SignatureKind") + .field("algorithm", &self.algorithm) + .field("prefix", &self.prefix) + .finish() + } + } + #[derive(Clone)] + pub struct AggregatorSubmit { + pub url: _rt::String, + pub component: Component, + pub signature_kind: SignatureKind, + } + impl ::core::fmt::Debug for AggregatorSubmit { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + f.debug_struct("AggregatorSubmit") + .field("url", &self.url) + .field("component", &self.component) + .field("signature-kind", &self.signature_kind) + .finish() + } + } + #[derive(Clone)] + pub enum Submit { + None, + Aggregator(AggregatorSubmit), + } + impl ::core::fmt::Debug for Submit { + fn fmt( + &self, + f: &mut ::core::fmt::Formatter<'_>, + ) -> ::core::fmt::Result { + match self { + Submit::None => f.debug_tuple("Submit::None").finish(), + Submit::Aggregator(e) => { + f.debug_tuple("Submit::Aggregator").field(e).finish() + } + } + } } - impl ::core::fmt::Debug for Timestamp { + #[derive(Clone)] + pub struct Workflow { + pub trigger: Trigger, + pub component: Component, + pub submit: Submit, + } + impl ::core::fmt::Debug for Workflow { fn fmt( &self, f: &mut ::core::fmt::Formatter<'_>, ) -> ::core::fmt::Result { - f.debug_struct("Timestamp").field("nanos", &self.nanos).finish() + f.debug_struct("Workflow") + .field("trigger", &self.trigger) + .field("component", &self.component) + .field("submit", &self.submit) + .finish() } } #[derive(Clone)] - pub struct CosmosAddress { - pub bech32_addr: _rt::String, - /// prefix is the first part of the bech32 address - pub prefix_len: u32, + pub struct Service { + pub name: _rt::String, + pub workflows: _rt::Vec<(WorkflowId, Workflow)>, + pub status: ServiceStatus, + pub manager: ServiceManager, } - impl ::core::fmt::Debug for CosmosAddress { + impl ::core::fmt::Debug for Service { fn fmt( &self, f: &mut ::core::fmt::Formatter<'_>, ) -> ::core::fmt::Result { - f.debug_struct("CosmosAddress") - .field("bech32-addr", &self.bech32_addr) - .field("prefix-len", &self.prefix_len) + f.debug_struct("Service") + .field("name", &self.name) + .field("workflows", &self.workflows) + .field("status", &self.status) + .field("manager", &self.manager) .finish() } } #[derive(Clone)] - pub struct CosmosEvent { - pub ty: _rt::String, - pub attributes: _rt::Vec<(_rt::String, _rt::String)>, + pub struct ServiceAndWorkflowId { + pub service: Service, + pub workflow_id: WorkflowId, } - impl ::core::fmt::Debug for CosmosEvent { + impl ::core::fmt::Debug for ServiceAndWorkflowId { fn fmt( &self, f: &mut ::core::fmt::Formatter<'_>, ) -> ::core::fmt::Result { - f.debug_struct("CosmosEvent") - .field("ty", &self.ty) - .field("attributes", &self.attributes) + f.debug_struct("ServiceAndWorkflowId") + .field("service", &self.service) + .field("workflow-id", &self.workflow_id) .finish() } } #[derive(Clone)] - pub struct CosmosChainConfig { - pub chain_id: _rt::String, - pub rpc_endpoint: Option<_rt::String>, - pub grpc_endpoint: Option<_rt::String>, - pub grpc_web_endpoint: Option<_rt::String>, - pub gas_price: f32, - pub gas_denom: _rt::String, - pub bech32_prefix: _rt::String, + pub struct WorkflowAndWorkflowId { + pub workflow: Workflow, + pub workflow_id: WorkflowId, } - impl ::core::fmt::Debug for CosmosChainConfig { + impl ::core::fmt::Debug for WorkflowAndWorkflowId { fn fmt( &self, f: &mut ::core::fmt::Formatter<'_>, ) -> ::core::fmt::Result { - f.debug_struct("CosmosChainConfig") - .field("chain-id", &self.chain_id) - .field("rpc-endpoint", &self.rpc_endpoint) - .field("grpc-endpoint", &self.grpc_endpoint) - .field("grpc-web-endpoint", &self.grpc_web_endpoint) - .field("gas-price", &self.gas_price) - .field("gas-denom", &self.gas_denom) - .field("bech32-prefix", &self.bech32_prefix) + f.debug_struct("WorkflowAndWorkflowId") + .field("workflow", &self.workflow) + .field("workflow-id", &self.workflow_id) .finish() } } + } + #[allow(dead_code, async_fn_in_trait, unused_imports, clippy::all)] + pub mod events { + #[used] + #[doc(hidden)] + static __FORCE_SECTION_REF: fn() = super::super::super::__link_custom_section_describing_imports; + use super::super::super::_rt; + pub type ChainKey = super::super::super::wavs::types::chain::ChainKey; + pub type EvmEventLog = super::super::super::wavs::types::chain::EvmEventLog; + pub type CosmosAddress = super::super::super::wavs::types::chain::CosmosAddress; + pub type CosmosEvent = super::super::super::wavs::types::chain::CosmosEvent; + pub type Timestamp = super::super::super::wavs::types::core::Timestamp; + pub type EventId = _rt::Vec; #[derive(Clone)] - pub struct EvmAddress { - pub raw_bytes: _rt::Vec, + pub struct TriggerDataEvmContractEvent { + pub chain: ChainKey, + pub log: EvmEventLog, } - impl ::core::fmt::Debug for EvmAddress { + impl ::core::fmt::Debug for TriggerDataEvmContractEvent { fn fmt( &self, f: &mut ::core::fmt::Formatter<'_>, ) -> ::core::fmt::Result { - f.debug_struct("EvmAddress") - .field("raw-bytes", &self.raw_bytes) + f.debug_struct("TriggerDataEvmContractEvent") + .field("chain", &self.chain) + .field("log", &self.log) .finish() } } #[derive(Clone)] - pub struct EvmEventLogData { - /// the raw log topics that can be decoded into an event - pub topics: _rt::Vec<_rt::Vec>, - /// the raw log data that can be decoded into an event - pub data: _rt::Vec, + pub struct TriggerDataCosmosContractEvent { + pub contract_address: CosmosAddress, + pub chain: ChainKey, + pub event: CosmosEvent, + pub event_index: u64, + pub block_height: u64, } - impl ::core::fmt::Debug for EvmEventLogData { + impl ::core::fmt::Debug for TriggerDataCosmosContractEvent { fn fmt( &self, f: &mut ::core::fmt::Formatter<'_>, ) -> ::core::fmt::Result { - f.debug_struct("EvmEventLogData") - .field("topics", &self.topics) - .field("data", &self.data) + f.debug_struct("TriggerDataCosmosContractEvent") + .field("contract-address", &self.contract_address) + .field("chain", &self.chain) + .field("event", &self.event) + .field("event-index", &self.event_index) + .field("block-height", &self.block_height) .finish() } } #[derive(Clone)] - pub struct EvmChainConfig { - pub chain_id: _rt::String, - pub ws_endpoint: Option<_rt::String>, - pub http_endpoint: Option<_rt::String>, + pub struct TriggerDataBlockInterval { + pub chain: ChainKey, + pub block_height: u64, } - impl ::core::fmt::Debug for EvmChainConfig { + impl ::core::fmt::Debug for TriggerDataBlockInterval { fn fmt( &self, f: &mut ::core::fmt::Formatter<'_>, ) -> ::core::fmt::Result { - f.debug_struct("EvmChainConfig") - .field("chain-id", &self.chain_id) - .field("ws-endpoint", &self.ws_endpoint) - .field("http-endpoint", &self.http_endpoint) + f.debug_struct("TriggerDataBlockInterval") + .field("chain", &self.chain) + .field("block-height", &self.block_height) .finish() } } - #[derive(Clone)] - pub struct TriggerSourceEvmContractEvent { - pub address: EvmAddress, - pub chain_name: _rt::String, - pub event_hash: _rt::Vec, + #[repr(C)] + #[derive(Clone, Copy)] + pub struct TriggerDataCron { + pub trigger_time: Timestamp, } - impl ::core::fmt::Debug for TriggerSourceEvmContractEvent { + impl ::core::fmt::Debug for TriggerDataCron { fn fmt( &self, f: &mut ::core::fmt::Formatter<'_>, ) -> ::core::fmt::Result { - f.debug_struct("TriggerSourceEvmContractEvent") - .field("address", &self.address) - .field("chain-name", &self.chain_name) - .field("event-hash", &self.event_hash) + f.debug_struct("TriggerDataCron") + .field("trigger-time", &self.trigger_time) .finish() } } #[derive(Clone)] - pub struct TriggerSourceCosmosContractEvent { - pub address: CosmosAddress, - pub chain_name: _rt::String, - pub event_type: _rt::String, + pub enum TriggerData { + EvmContractEvent(TriggerDataEvmContractEvent), + CosmosContractEvent(TriggerDataCosmosContractEvent), + BlockInterval(TriggerDataBlockInterval), + Cron(TriggerDataCron), + Raw(_rt::Vec), } - impl ::core::fmt::Debug for TriggerSourceCosmosContractEvent { + impl ::core::fmt::Debug for TriggerData { fn fmt( &self, f: &mut ::core::fmt::Formatter<'_>, ) -> ::core::fmt::Result { - f.debug_struct("TriggerSourceCosmosContractEvent") - .field("address", &self.address) - .field("chain-name", &self.chain_name) - .field("event-type", &self.event_type) - .finish() + match self { + TriggerData::EvmContractEvent(e) => { + f.debug_tuple("TriggerData::EvmContractEvent") + .field(e) + .finish() + } + TriggerData::CosmosContractEvent(e) => { + f.debug_tuple("TriggerData::CosmosContractEvent") + .field(e) + .finish() + } + TriggerData::BlockInterval(e) => { + f.debug_tuple("TriggerData::BlockInterval").field(e).finish() + } + TriggerData::Cron(e) => { + f.debug_tuple("TriggerData::Cron").field(e).finish() + } + TriggerData::Raw(e) => { + f.debug_tuple("TriggerData::Raw").field(e).finish() + } + } + } + } + } + } +} +#[allow(dead_code, async_fn_in_trait, unused_imports, clippy::all)] +pub mod host { + #[used] + #[doc(hidden)] + static __FORCE_SECTION_REF: fn() = super::__link_custom_section_describing_imports; + use super::_rt; + pub type EvmChainConfig = super::wavs::types::chain::EvmChainConfig; + pub type CosmosChainConfig = super::wavs::types::chain::CosmosChainConfig; + pub type ServiceAndWorkflowId = super::wavs::types::service::ServiceAndWorkflowId; + pub type WorkflowAndWorkflowId = super::wavs::types::service::WorkflowAndWorkflowId; + pub type LogLevel = super::wavs::types::core::LogLevel; + pub type EventId = super::wavs::types::events::EventId; + #[allow(unused_unsafe, clippy::all)] + pub fn get_evm_chain_config(chain_key: &str) -> Option { + unsafe { + #[cfg_attr(target_pointer_width = "64", repr(align(8)))] + #[cfg_attr(target_pointer_width = "32", repr(align(4)))] + struct RetArea( + [::core::mem::MaybeUninit; 9 * ::core::mem::size_of::<*const u8>()], + ); + let mut ret_area = RetArea( + [::core::mem::MaybeUninit::uninit(); 9 + * ::core::mem::size_of::<*const u8>()], + ); + let vec0 = chain_key; + let ptr0 = vec0.as_ptr().cast::(); + let len0 = vec0.len(); + let ptr1 = ret_area.0.as_mut_ptr().cast::(); + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "host")] + unsafe extern "C" { + #[link_name = "get-evm-chain-config"] + fn wit_import2(_: *mut u8, _: usize, _: *mut u8); + } + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import2(_: *mut u8, _: usize, _: *mut u8) { + unreachable!() + } + unsafe { wit_import2(ptr0.cast_mut(), len0, ptr1) }; + let l3 = i32::from(*ptr1.add(0).cast::()); + let result15 = match l3 { + 0 => None, + 1 => { + let e = { + let l4 = *ptr1 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l5 = *ptr1 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len6 = l5; + let bytes6 = _rt::Vec::from_raw_parts(l4.cast(), len6, len6); + let l7 = i32::from( + *ptr1 + .add(3 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + let l11 = i32::from( + *ptr1 + .add(6 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + super::wavs::types::chain::EvmChainConfig { + chain_id: _rt::string_lift(bytes6), + ws_endpoint: match l7 { + 0 => None, + 1 => { + let e = { + let l8 = *ptr1 + .add(4 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l9 = *ptr1 + .add(5 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len10 = l9; + let bytes10 = _rt::Vec::from_raw_parts( + l8.cast(), + len10, + len10, + ); + _rt::string_lift(bytes10) + }; + Some(e) + } + _ => _rt::invalid_enum_discriminant(), + }, + http_endpoint: match l11 { + 0 => None, + 1 => { + let e = { + let l12 = *ptr1 + .add(7 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l13 = *ptr1 + .add(8 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len14 = l13; + let bytes14 = _rt::Vec::from_raw_parts( + l12.cast(), + len14, + len14, + ); + _rt::string_lift(bytes14) + }; + Some(e) + } + _ => _rt::invalid_enum_discriminant(), + }, + } + }; + Some(e) + } + _ => _rt::invalid_enum_discriminant(), + }; + result15 + } + } + #[allow(unused_unsafe, clippy::all)] + pub fn get_cosmos_chain_config(chain_key: &str) -> Option { + unsafe { + #[cfg_attr(target_pointer_width = "64", repr(align(8)))] + #[cfg_attr(target_pointer_width = "32", repr(align(4)))] + struct RetArea( + [::core::mem::MaybeUninit; 17 * ::core::mem::size_of::<*const u8>()], + ); + let mut ret_area = RetArea( + [::core::mem::MaybeUninit::uninit(); 17 + * ::core::mem::size_of::<*const u8>()], + ); + let vec0 = chain_key; + let ptr0 = vec0.as_ptr().cast::(); + let len0 = vec0.len(); + let ptr1 = ret_area.0.as_mut_ptr().cast::(); + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "host")] + unsafe extern "C" { + #[link_name = "get-cosmos-chain-config"] + fn wit_import2(_: *mut u8, _: usize, _: *mut u8); + } + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import2(_: *mut u8, _: usize, _: *mut u8) { + unreachable!() + } + unsafe { wit_import2(ptr0.cast_mut(), len0, ptr1) }; + let l3 = i32::from(*ptr1.add(0).cast::()); + let result26 = match l3 { + 0 => None, + 1 => { + let e = { + let l4 = *ptr1 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l5 = *ptr1 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len6 = l5; + let bytes6 = _rt::Vec::from_raw_parts(l4.cast(), len6, len6); + let l7 = i32::from( + *ptr1 + .add(3 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + let l11 = i32::from( + *ptr1 + .add(6 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + let l15 = i32::from( + *ptr1 + .add(9 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + let l19 = *ptr1 + .add(12 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let l20 = *ptr1 + .add(13 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l21 = *ptr1 + .add(14 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len22 = l21; + let bytes22 = _rt::Vec::from_raw_parts(l20.cast(), len22, len22); + let l23 = *ptr1 + .add(15 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l24 = *ptr1 + .add(16 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len25 = l24; + let bytes25 = _rt::Vec::from_raw_parts(l23.cast(), len25, len25); + super::wavs::types::chain::CosmosChainConfig { + chain_id: _rt::string_lift(bytes6), + rpc_endpoint: match l7 { + 0 => None, + 1 => { + let e = { + let l8 = *ptr1 + .add(4 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l9 = *ptr1 + .add(5 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len10 = l9; + let bytes10 = _rt::Vec::from_raw_parts( + l8.cast(), + len10, + len10, + ); + _rt::string_lift(bytes10) + }; + Some(e) + } + _ => _rt::invalid_enum_discriminant(), + }, + grpc_endpoint: match l11 { + 0 => None, + 1 => { + let e = { + let l12 = *ptr1 + .add(7 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l13 = *ptr1 + .add(8 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len14 = l13; + let bytes14 = _rt::Vec::from_raw_parts( + l12.cast(), + len14, + len14, + ); + _rt::string_lift(bytes14) + }; + Some(e) + } + _ => _rt::invalid_enum_discriminant(), + }, + grpc_web_endpoint: match l15 { + 0 => None, + 1 => { + let e = { + let l16 = *ptr1 + .add(10 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l17 = *ptr1 + .add(11 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len18 = l17; + let bytes18 = _rt::Vec::from_raw_parts( + l16.cast(), + len18, + len18, + ); + _rt::string_lift(bytes18) + }; + Some(e) + } + _ => _rt::invalid_enum_discriminant(), + }, + gas_price: l19, + gas_denom: _rt::string_lift(bytes22), + bech32_prefix: _rt::string_lift(bytes25), + } + }; + Some(e) } + _ => _rt::invalid_enum_discriminant(), + }; + result26 + } + } + #[allow(unused_unsafe, clippy::all)] + pub fn config_var(key: &str) -> Option<_rt::String> { + unsafe { + #[cfg_attr(target_pointer_width = "64", repr(align(8)))] + #[cfg_attr(target_pointer_width = "32", repr(align(4)))] + struct RetArea( + [::core::mem::MaybeUninit; 3 * ::core::mem::size_of::<*const u8>()], + ); + let mut ret_area = RetArea( + [::core::mem::MaybeUninit::uninit(); 3 + * ::core::mem::size_of::<*const u8>()], + ); + let vec0 = key; + let ptr0 = vec0.as_ptr().cast::(); + let len0 = vec0.len(); + let ptr1 = ret_area.0.as_mut_ptr().cast::(); + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "host")] + unsafe extern "C" { + #[link_name = "config-var"] + fn wit_import2(_: *mut u8, _: usize, _: *mut u8); } - #[derive(Clone)] - pub struct BlockIntervalSource { - pub chain_name: _rt::String, - pub n_blocks: u32, - pub start_block: Option, - pub end_block: Option, + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import2(_: *mut u8, _: usize, _: *mut u8) { + unreachable!() } - impl ::core::fmt::Debug for BlockIntervalSource { - fn fmt( - &self, - f: &mut ::core::fmt::Formatter<'_>, - ) -> ::core::fmt::Result { - f.debug_struct("BlockIntervalSource") - .field("chain-name", &self.chain_name) - .field("n-blocks", &self.n_blocks) - .field("start-block", &self.start_block) - .field("end-block", &self.end_block) - .finish() + unsafe { wit_import2(ptr0.cast_mut(), len0, ptr1) }; + let l3 = i32::from(*ptr1.add(0).cast::()); + let result7 = match l3 { + 0 => None, + 1 => { + let e = { + let l4 = *ptr1 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l5 = *ptr1 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len6 = l5; + let bytes6 = _rt::Vec::from_raw_parts(l4.cast(), len6, len6); + _rt::string_lift(bytes6) + }; + Some(e) } + _ => _rt::invalid_enum_discriminant(), + }; + result7 + } + } + #[allow(unused_unsafe, clippy::all)] + pub fn log(level: LogLevel, message: &str) -> () { + unsafe { + use super::wavs::types::core::LogLevel as V0; + let result1 = match level { + V0::Error => 0i32, + V0::Warn => 1i32, + V0::Info => 2i32, + V0::Debug => 3i32, + V0::Trace => 4i32, + }; + let vec2 = message; + let ptr2 = vec2.as_ptr().cast::(); + let len2 = vec2.len(); + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "host")] + unsafe extern "C" { + #[link_name = "log"] + fn wit_import3(_: i32, _: *mut u8, _: usize); } - #[derive(Clone)] - pub struct TriggerSourceCron { - pub schedule: _rt::String, - pub start_time: Option, - pub end_time: Option, + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import3(_: i32, _: *mut u8, _: usize) { + unreachable!() } - impl ::core::fmt::Debug for TriggerSourceCron { - fn fmt( - &self, - f: &mut ::core::fmt::Formatter<'_>, - ) -> ::core::fmt::Result { - f.debug_struct("TriggerSourceCron") - .field("schedule", &self.schedule) - .field("start-time", &self.start_time) - .field("end-time", &self.end_time) - .finish() - } + unsafe { wit_import3(result1, ptr2.cast_mut(), len2) }; + } + } + #[allow(unused_unsafe, clippy::all)] + /// gets the service and workflow id that called this component + pub fn get_service() -> ServiceAndWorkflowId { + unsafe { + #[cfg_attr(target_pointer_width = "64", repr(align(8)))] + #[cfg_attr(target_pointer_width = "32", repr(align(4)))] + struct RetArea( + [::core::mem::MaybeUninit; 12 * ::core::mem::size_of::<*const u8>()], + ); + let mut ret_area = RetArea( + [::core::mem::MaybeUninit::uninit(); 12 + * ::core::mem::size_of::<*const u8>()], + ); + let ptr0 = ret_area.0.as_mut_ptr().cast::(); + #[cfg(target_arch = "wasm32")] + #[link(wasm_import_module = "host")] + unsafe extern "C" { + #[link_name = "get-service"] + fn wit_import1(_: *mut u8); } - #[derive(Clone)] - pub enum TriggerSource { - EvmContractEvent(TriggerSourceEvmContractEvent), - CosmosContractEvent(TriggerSourceCosmosContractEvent), - BlockInterval(BlockIntervalSource), - Cron(TriggerSourceCron), - Manual, + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import1(_: *mut u8) { + unreachable!() } - impl ::core::fmt::Debug for TriggerSource { - fn fmt( - &self, - f: &mut ::core::fmt::Formatter<'_>, - ) -> ::core::fmt::Result { - match self { - TriggerSource::EvmContractEvent(e) => { - f.debug_tuple("TriggerSource::EvmContractEvent") - .field(e) - .finish() + unsafe { wit_import1(ptr0) }; + let l2 = *ptr0.add(0).cast::<*mut u8>(); + let l3 = *ptr0.add(::core::mem::size_of::<*const u8>()).cast::(); + let len4 = l3; + let bytes4 = _rt::Vec::from_raw_parts(l2.cast(), len4, len4); + let l5 = *ptr0 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l6 = *ptr0.add(3 * ::core::mem::size_of::<*const u8>()).cast::(); + let base162 = l5; + let len162 = l6; + let mut result162 = _rt::Vec::with_capacity(len162); + for i in 0..len162 { + let base = base162 + .add(i * (144 + 42 * ::core::mem::size_of::<*const u8>())); + let e162 = { + let l7 = *base.add(0).cast::<*mut u8>(); + let l8 = *base + .add(::core::mem::size_of::<*const u8>()) + .cast::(); + let len9 = l8; + let bytes9 = _rt::Vec::from_raw_parts(l7.cast(), len9, len9); + let l10 = i32::from( + *base.add(2 * ::core::mem::size_of::<*const u8>()).cast::(), + ); + use super::wavs::types::service::Trigger as V45; + let v45 = match l10 { + 0 => { + let e45 = { + let l11 = *base + .add(8 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l12 = *base + .add(8 + 3 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len13 = l12; + let l14 = *base + .add(8 + 4 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l15 = *base + .add(8 + 5 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len16 = l15; + let bytes16 = _rt::Vec::from_raw_parts( + l14.cast(), + len16, + len16, + ); + let l17 = *base + .add(8 + 6 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l18 = *base + .add(8 + 7 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len19 = l18; + super::wavs::types::service::TriggerEvmContractEvent { + address: super::wavs::types::chain::EvmAddress { + raw_bytes: _rt::Vec::from_raw_parts( + l11.cast(), + len13, + len13, + ), + }, + chain: _rt::string_lift(bytes16), + event_hash: _rt::Vec::from_raw_parts( + l17.cast(), + len19, + len19, + ), + } + }; + V45::EvmContractEvent(e45) } - TriggerSource::CosmosContractEvent(e) => { - f.debug_tuple("TriggerSource::CosmosContractEvent") - .field(e) - .finish() + 1 => { + let e45 = { + let l20 = *base + .add(8 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l21 = *base + .add(8 + 3 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len22 = l21; + let bytes22 = _rt::Vec::from_raw_parts( + l20.cast(), + len22, + len22, + ); + let l23 = *base + .add(8 + 4 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let l24 = *base + .add(8 + 5 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l25 = *base + .add(8 + 6 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len26 = l25; + let bytes26 = _rt::Vec::from_raw_parts( + l24.cast(), + len26, + len26, + ); + let l27 = *base + .add(8 + 7 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l28 = *base + .add(8 + 8 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len29 = l28; + let bytes29 = _rt::Vec::from_raw_parts( + l27.cast(), + len29, + len29, + ); + super::wavs::types::service::TriggerCosmosContractEvent { + address: super::wavs::types::chain::CosmosAddress { + bech32_addr: _rt::string_lift(bytes22), + prefix_len: l23 as u32, + }, + chain: _rt::string_lift(bytes26), + event_type: _rt::string_lift(bytes29), + } + }; + V45::CosmosContractEvent(e45) } - TriggerSource::BlockInterval(e) => { - f.debug_tuple("TriggerSource::BlockInterval") - .field(e) - .finish() + 2 => { + let e45 = { + let l30 = *base + .add(8 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l31 = *base + .add(8 + 3 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len32 = l31; + let bytes32 = _rt::Vec::from_raw_parts( + l30.cast(), + len32, + len32, + ); + let l33 = *base + .add(8 + 4 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let l34 = i32::from( + *base + .add(16 + 4 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + let l36 = i32::from( + *base + .add(32 + 4 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + super::wavs::types::service::TriggerBlockInterval { + chain: _rt::string_lift(bytes32), + n_blocks: l33 as u32, + start_block: match l34 { + 0 => None, + 1 => { + let e = { + let l35 = *base + .add(24 + 4 * ::core::mem::size_of::<*const u8>()) + .cast::(); + l35 as u64 + }; + Some(e) + } + _ => _rt::invalid_enum_discriminant(), + }, + end_block: match l36 { + 0 => None, + 1 => { + let e = { + let l37 = *base + .add(40 + 4 * ::core::mem::size_of::<*const u8>()) + .cast::(); + l37 as u64 + }; + Some(e) + } + _ => _rt::invalid_enum_discriminant(), + }, + } + }; + V45::BlockInterval(e45) + } + 3 => { + let e45 = { + let l38 = *base + .add(8 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l39 = *base + .add(8 + 3 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len40 = l39; + let bytes40 = _rt::Vec::from_raw_parts( + l38.cast(), + len40, + len40, + ); + let l41 = i32::from( + *base + .add(8 + 4 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + let l43 = i32::from( + *base + .add(24 + 4 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + super::wavs::types::service::TriggerCron { + schedule: _rt::string_lift(bytes40), + start_time: match l41 { + 0 => None, + 1 => { + let e = { + let l42 = *base + .add(16 + 4 * ::core::mem::size_of::<*const u8>()) + .cast::(); + super::wavs::types::core::Timestamp { + nanos: l42 as u64, + } + }; + Some(e) + } + _ => _rt::invalid_enum_discriminant(), + }, + end_time: match l43 { + 0 => None, + 1 => { + let e = { + let l44 = *base + .add(32 + 4 * ::core::mem::size_of::<*const u8>()) + .cast::(); + super::wavs::types::core::Timestamp { + nanos: l44 as u64, + } + }; + Some(e) + } + _ => _rt::invalid_enum_discriminant(), + }, + } + }; + V45::Cron(e45) } - TriggerSource::Cron(e) => { - f.debug_tuple("TriggerSource::Cron").field(e).finish() + n => { + debug_assert_eq!(n, 4, "invalid enum discriminant"); + V45::Manual } - TriggerSource::Manual => { - f.debug_tuple("TriggerSource::Manual").finish() + }; + let l46 = i32::from( + *base + .add(48 + 4 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + use super::wavs::types::service::ComponentSource as V70; + let v70 = match l46 { + 0 => { + let e70 = { + let l47 = *base + .add(48 + 5 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l48 = *base + .add(48 + 6 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len49 = l48; + let bytes49 = _rt::Vec::from_raw_parts( + l47.cast(), + len49, + len49, + ); + let l50 = *base + .add(48 + 7 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l51 = *base + .add(48 + 8 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len52 = l51; + let bytes52 = _rt::Vec::from_raw_parts( + l50.cast(), + len52, + len52, + ); + super::wavs::types::service::ComponentSourceDownload { + url: _rt::string_lift(bytes49), + digest: _rt::string_lift(bytes52), + } + }; + V70::Download(e70) } - } - } - } - #[derive(Clone)] - pub struct TriggerConfig { - pub service_id: _rt::String, - pub workflow_id: _rt::String, - pub trigger_source: TriggerSource, - } - impl ::core::fmt::Debug for TriggerConfig { - fn fmt( - &self, - f: &mut ::core::fmt::Formatter<'_>, - ) -> ::core::fmt::Result { - f.debug_struct("TriggerConfig") - .field("service-id", &self.service_id) - .field("workflow-id", &self.workflow_id) - .field("trigger-source", &self.trigger_source) - .finish() - } - } - #[derive(Clone)] - pub struct TriggerDataEvmContractEvent { - pub contract_address: EvmAddress, - pub chain_name: _rt::String, - pub log: EvmEventLogData, - pub block_height: u64, - } - impl ::core::fmt::Debug for TriggerDataEvmContractEvent { - fn fmt( - &self, - f: &mut ::core::fmt::Formatter<'_>, - ) -> ::core::fmt::Result { - f.debug_struct("TriggerDataEvmContractEvent") - .field("contract-address", &self.contract_address) - .field("chain-name", &self.chain_name) - .field("log", &self.log) - .field("block-height", &self.block_height) - .finish() - } - } - #[derive(Clone)] - pub struct TriggerDataCosmosContractEvent { - pub contract_address: CosmosAddress, - pub chain_name: _rt::String, - pub event: CosmosEvent, - pub block_height: u64, - } - impl ::core::fmt::Debug for TriggerDataCosmosContractEvent { - fn fmt( - &self, - f: &mut ::core::fmt::Formatter<'_>, - ) -> ::core::fmt::Result { - f.debug_struct("TriggerDataCosmosContractEvent") - .field("contract-address", &self.contract_address) - .field("chain-name", &self.chain_name) - .field("event", &self.event) - .field("block-height", &self.block_height) - .finish() - } - } - #[derive(Clone)] - pub struct BlockIntervalData { - pub chain_name: _rt::String, - pub block_height: u64, - } - impl ::core::fmt::Debug for BlockIntervalData { - fn fmt( - &self, - f: &mut ::core::fmt::Formatter<'_>, - ) -> ::core::fmt::Result { - f.debug_struct("BlockIntervalData") - .field("chain-name", &self.chain_name) - .field("block-height", &self.block_height) - .finish() - } - } - #[repr(C)] - #[derive(Clone, Copy)] - pub struct TriggerDataCron { - pub trigger_time: Timestamp, - } - impl ::core::fmt::Debug for TriggerDataCron { - fn fmt( - &self, - f: &mut ::core::fmt::Formatter<'_>, - ) -> ::core::fmt::Result { - f.debug_struct("TriggerDataCron") - .field("trigger-time", &self.trigger_time) - .finish() - } - } - #[derive(Clone)] - pub enum TriggerData { - EvmContractEvent(TriggerDataEvmContractEvent), - CosmosContractEvent(TriggerDataCosmosContractEvent), - BlockInterval(BlockIntervalData), - Cron(TriggerDataCron), - Raw(_rt::Vec), - } - impl ::core::fmt::Debug for TriggerData { - fn fmt( - &self, - f: &mut ::core::fmt::Formatter<'_>, - ) -> ::core::fmt::Result { - match self { - TriggerData::EvmContractEvent(e) => { - f.debug_tuple("TriggerData::EvmContractEvent") - .field(e) - .finish() + 1 => { + let e70 = { + let l53 = *base + .add(48 + 5 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l54 = *base + .add(48 + 6 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len55 = l54; + let bytes55 = _rt::Vec::from_raw_parts( + l53.cast(), + len55, + len55, + ); + let l56 = i32::from( + *base + .add(48 + 7 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + let l60 = i32::from( + *base + .add(48 + 10 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + let l64 = *base + .add(48 + 13 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l65 = *base + .add(48 + 14 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len66 = l65; + let bytes66 = _rt::Vec::from_raw_parts( + l64.cast(), + len66, + len66, + ); + super::wavs::types::service::Registry { + digest: _rt::string_lift(bytes55), + domain: match l56 { + 0 => None, + 1 => { + let e = { + let l57 = *base + .add(48 + 8 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l58 = *base + .add(48 + 9 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len59 = l58; + let bytes59 = _rt::Vec::from_raw_parts( + l57.cast(), + len59, + len59, + ); + _rt::string_lift(bytes59) + }; + Some(e) + } + _ => _rt::invalid_enum_discriminant(), + }, + version: match l60 { + 0 => None, + 1 => { + let e = { + let l61 = *base + .add(48 + 11 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l62 = *base + .add(48 + 12 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len63 = l62; + let bytes63 = _rt::Vec::from_raw_parts( + l61.cast(), + len63, + len63, + ); + _rt::string_lift(bytes63) + }; + Some(e) + } + _ => _rt::invalid_enum_discriminant(), + }, + pkg: _rt::string_lift(bytes66), + } + }; + V70::Registry(e70) } - TriggerData::CosmosContractEvent(e) => { - f.debug_tuple("TriggerData::CosmosContractEvent") - .field(e) - .finish() + n => { + debug_assert_eq!(n, 2, "invalid enum discriminant"); + let e70 = { + let l67 = *base + .add(48 + 5 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l68 = *base + .add(48 + 6 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len69 = l68; + let bytes69 = _rt::Vec::from_raw_parts( + l67.cast(), + len69, + len69, + ); + _rt::string_lift(bytes69) + }; + V70::Digest(e70) } - TriggerData::BlockInterval(e) => { - f.debug_tuple("TriggerData::BlockInterval").field(e).finish() + }; + let l71 = i32::from( + *base + .add(48 + 15 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + use super::wavs::types::service::AllowedHostPermission as V78; + let v78 = match l71 { + 0 => V78::All, + 1 => { + let e78 = { + let l72 = *base + .add(48 + 16 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l73 = *base + .add(48 + 17 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let base77 = l72; + let len77 = l73; + let mut result77 = _rt::Vec::with_capacity(len77); + for i in 0..len77 { + let base = base77 + .add(i * (2 * ::core::mem::size_of::<*const u8>())); + let e77 = { + let l74 = *base.add(0).cast::<*mut u8>(); + let l75 = *base + .add(::core::mem::size_of::<*const u8>()) + .cast::(); + let len76 = l75; + let bytes76 = _rt::Vec::from_raw_parts( + l74.cast(), + len76, + len76, + ); + _rt::string_lift(bytes76) + }; + result77.push(e77); + } + _rt::cabi_dealloc( + base77, + len77 * (2 * ::core::mem::size_of::<*const u8>()), + ::core::mem::size_of::<*const u8>(), + ); + result77 + }; + V78::Only(e78) } - TriggerData::Cron(e) => { - f.debug_tuple("TriggerData::Cron").field(e).finish() + n => { + debug_assert_eq!(n, 2, "invalid enum discriminant"); + V78::None } - TriggerData::Raw(e) => { - f.debug_tuple("TriggerData::Raw").field(e).finish() + }; + let l79 = i32::from( + *base + .add(48 + 18 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + let l80 = i32::from( + *base + .add(56 + 18 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + let l82 = i32::from( + *base + .add(72 + 18 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + let l84 = *base + .add(88 + 18 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l85 = *base + .add(88 + 19 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let base92 = l84; + let len92 = l85; + let mut result92 = _rt::Vec::with_capacity(len92); + for i in 0..len92 { + let base = base92 + .add(i * (4 * ::core::mem::size_of::<*const u8>())); + let e92 = { + let l86 = *base.add(0).cast::<*mut u8>(); + let l87 = *base + .add(::core::mem::size_of::<*const u8>()) + .cast::(); + let len88 = l87; + let bytes88 = _rt::Vec::from_raw_parts( + l86.cast(), + len88, + len88, + ); + let l89 = *base + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l90 = *base + .add(3 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len91 = l90; + let bytes91 = _rt::Vec::from_raw_parts( + l89.cast(), + len91, + len91, + ); + (_rt::string_lift(bytes88), _rt::string_lift(bytes91)) + }; + result92.push(e92); + } + _rt::cabi_dealloc( + base92, + len92 * (4 * ::core::mem::size_of::<*const u8>()), + ::core::mem::size_of::<*const u8>(), + ); + let l93 = *base + .add(88 + 20 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l94 = *base + .add(88 + 21 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let base98 = l93; + let len98 = l94; + let mut result98 = _rt::Vec::with_capacity(len98); + for i in 0..len98 { + let base = base98 + .add(i * (2 * ::core::mem::size_of::<*const u8>())); + let e98 = { + let l95 = *base.add(0).cast::<*mut u8>(); + let l96 = *base + .add(::core::mem::size_of::<*const u8>()) + .cast::(); + let len97 = l96; + let bytes97 = _rt::Vec::from_raw_parts( + l95.cast(), + len97, + len97, + ); + _rt::string_lift(bytes97) + }; + result98.push(e98); + } + _rt::cabi_dealloc( + base98, + len98 * (2 * ::core::mem::size_of::<*const u8>()), + ::core::mem::size_of::<*const u8>(), + ); + let l99 = i32::from( + *base + .add(88 + 22 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + use super::wavs::types::service::Submit as V161; + let v161 = match l99 { + 0 => V161::None, + n => { + debug_assert_eq!(n, 1, "invalid enum discriminant"); + let e161 = { + let l100 = *base + .add(96 + 22 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l101 = *base + .add(96 + 23 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len102 = l101; + let bytes102 = _rt::Vec::from_raw_parts( + l100.cast(), + len102, + len102, + ); + let l103 = i32::from( + *base + .add(96 + 24 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + use super::wavs::types::service::ComponentSource as V127; + let v127 = match l103 { + 0 => { + let e127 = { + let l104 = *base + .add(96 + 25 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l105 = *base + .add(96 + 26 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len106 = l105; + let bytes106 = _rt::Vec::from_raw_parts( + l104.cast(), + len106, + len106, + ); + let l107 = *base + .add(96 + 27 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l108 = *base + .add(96 + 28 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len109 = l108; + let bytes109 = _rt::Vec::from_raw_parts( + l107.cast(), + len109, + len109, + ); + super::wavs::types::service::ComponentSourceDownload { + url: _rt::string_lift(bytes106), + digest: _rt::string_lift(bytes109), + } + }; + V127::Download(e127) + } + 1 => { + let e127 = { + let l110 = *base + .add(96 + 25 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l111 = *base + .add(96 + 26 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len112 = l111; + let bytes112 = _rt::Vec::from_raw_parts( + l110.cast(), + len112, + len112, + ); + let l113 = i32::from( + *base + .add(96 + 27 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + let l117 = i32::from( + *base + .add(96 + 30 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + let l121 = *base + .add(96 + 33 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l122 = *base + .add(96 + 34 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len123 = l122; + let bytes123 = _rt::Vec::from_raw_parts( + l121.cast(), + len123, + len123, + ); + super::wavs::types::service::Registry { + digest: _rt::string_lift(bytes112), + domain: match l113 { + 0 => None, + 1 => { + let e = { + let l114 = *base + .add(96 + 28 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l115 = *base + .add(96 + 29 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len116 = l115; + let bytes116 = _rt::Vec::from_raw_parts( + l114.cast(), + len116, + len116, + ); + _rt::string_lift(bytes116) + }; + Some(e) + } + _ => _rt::invalid_enum_discriminant(), + }, + version: match l117 { + 0 => None, + 1 => { + let e = { + let l118 = *base + .add(96 + 31 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l119 = *base + .add(96 + 32 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len120 = l119; + let bytes120 = _rt::Vec::from_raw_parts( + l118.cast(), + len120, + len120, + ); + _rt::string_lift(bytes120) + }; + Some(e) + } + _ => _rt::invalid_enum_discriminant(), + }, + pkg: _rt::string_lift(bytes123), + } + }; + V127::Registry(e127) + } + n => { + debug_assert_eq!(n, 2, "invalid enum discriminant"); + let e127 = { + let l124 = *base + .add(96 + 25 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l125 = *base + .add(96 + 26 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len126 = l125; + let bytes126 = _rt::Vec::from_raw_parts( + l124.cast(), + len126, + len126, + ); + _rt::string_lift(bytes126) + }; + V127::Digest(e127) + } + }; + let l128 = i32::from( + *base + .add(96 + 35 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + use super::wavs::types::service::AllowedHostPermission as V135; + let v135 = match l128 { + 0 => V135::All, + 1 => { + let e135 = { + let l129 = *base + .add(96 + 36 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l130 = *base + .add(96 + 37 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let base134 = l129; + let len134 = l130; + let mut result134 = _rt::Vec::with_capacity(len134); + for i in 0..len134 { + let base = base134 + .add(i * (2 * ::core::mem::size_of::<*const u8>())); + let e134 = { + let l131 = *base.add(0).cast::<*mut u8>(); + let l132 = *base + .add(::core::mem::size_of::<*const u8>()) + .cast::(); + let len133 = l132; + let bytes133 = _rt::Vec::from_raw_parts( + l131.cast(), + len133, + len133, + ); + _rt::string_lift(bytes133) + }; + result134.push(e134); + } + _rt::cabi_dealloc( + base134, + len134 * (2 * ::core::mem::size_of::<*const u8>()), + ::core::mem::size_of::<*const u8>(), + ); + result134 + }; + V135::Only(e135) + } + n => { + debug_assert_eq!(n, 2, "invalid enum discriminant"); + V135::None + } + }; + let l136 = i32::from( + *base + .add(96 + 38 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + let l137 = i32::from( + *base + .add(104 + 38 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + let l139 = i32::from( + *base + .add(120 + 38 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + let l141 = *base + .add(136 + 38 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l142 = *base + .add(136 + 39 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let base149 = l141; + let len149 = l142; + let mut result149 = _rt::Vec::with_capacity(len149); + for i in 0..len149 { + let base = base149 + .add(i * (4 * ::core::mem::size_of::<*const u8>())); + let e149 = { + let l143 = *base.add(0).cast::<*mut u8>(); + let l144 = *base + .add(::core::mem::size_of::<*const u8>()) + .cast::(); + let len145 = l144; + let bytes145 = _rt::Vec::from_raw_parts( + l143.cast(), + len145, + len145, + ); + let l146 = *base + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l147 = *base + .add(3 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len148 = l147; + let bytes148 = _rt::Vec::from_raw_parts( + l146.cast(), + len148, + len148, + ); + (_rt::string_lift(bytes145), _rt::string_lift(bytes148)) + }; + result149.push(e149); + } + _rt::cabi_dealloc( + base149, + len149 * (4 * ::core::mem::size_of::<*const u8>()), + ::core::mem::size_of::<*const u8>(), + ); + let l150 = *base + .add(136 + 40 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l151 = *base + .add(136 + 41 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let base155 = l150; + let len155 = l151; + let mut result155 = _rt::Vec::with_capacity(len155); + for i in 0..len155 { + let base = base155 + .add(i * (2 * ::core::mem::size_of::<*const u8>())); + let e155 = { + let l152 = *base.add(0).cast::<*mut u8>(); + let l153 = *base + .add(::core::mem::size_of::<*const u8>()) + .cast::(); + let len154 = l153; + let bytes154 = _rt::Vec::from_raw_parts( + l152.cast(), + len154, + len154, + ); + _rt::string_lift(bytes154) + }; + result155.push(e155); + } + _rt::cabi_dealloc( + base155, + len155 * (2 * ::core::mem::size_of::<*const u8>()), + ::core::mem::size_of::<*const u8>(), + ); + let l156 = i32::from( + *base + .add(136 + 42 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + use super::wavs::types::service::SignatureAlgorithm as V157; + let v157 = match l156 { + n => { + debug_assert_eq!(n, 0, "invalid enum discriminant"); + V157::Secp256k1 + } + }; + let l158 = i32::from( + *base + .add(137 + 42 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + super::wavs::types::service::AggregatorSubmit { + url: _rt::string_lift(bytes102), + component: super::wavs::types::service::Component { + source: v127, + permissions: super::wavs::types::service::Permissions { + allowed_http_hosts: v135, + file_system: _rt::bool_lift(l136 as u8), + }, + fuel_limit: match l137 { + 0 => None, + 1 => { + let e = { + let l138 = *base + .add(112 + 38 * ::core::mem::size_of::<*const u8>()) + .cast::(); + l138 as u64 + }; + Some(e) + } + _ => _rt::invalid_enum_discriminant(), + }, + time_limit_seconds: match l139 { + 0 => None, + 1 => { + let e = { + let l140 = *base + .add(128 + 38 * ::core::mem::size_of::<*const u8>()) + .cast::(); + l140 as u64 + }; + Some(e) + } + _ => _rt::invalid_enum_discriminant(), + }, + config: result149, + env_keys: result155, + }, + signature_kind: super::wavs::types::service::SignatureKind { + algorithm: v157, + prefix: match l158 { + 0 => None, + 1 => { + let e = { + let l159 = i32::from( + *base + .add(138 + 42 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + use super::wavs::types::service::SignaturePrefix as V160; + let v160 = match l159 { + n => { + debug_assert_eq!(n, 0, "invalid enum discriminant"); + V160::Eip191 + } + }; + v160 + }; + Some(e) + } + _ => _rt::invalid_enum_discriminant(), + }, + }, + } + }; + V161::Aggregator(e161) } - } - } - } - #[derive(Clone)] - pub struct TriggerAction { - pub config: TriggerConfig, - pub data: TriggerData, - } - impl ::core::fmt::Debug for TriggerAction { - fn fmt( - &self, - f: &mut ::core::fmt::Formatter<'_>, - ) -> ::core::fmt::Result { - f.debug_struct("TriggerAction") - .field("config", &self.config) - .field("data", &self.data) - .finish() - } - } - #[derive(Clone)] - pub struct WasmResponse { - pub payload: _rt::Vec, - pub ordering: Option, - } - impl ::core::fmt::Debug for WasmResponse { - fn fmt( - &self, - f: &mut ::core::fmt::Formatter<'_>, - ) -> ::core::fmt::Result { - f.debug_struct("WasmResponse") - .field("payload", &self.payload) - .field("ordering", &self.ordering) - .finish() + }; + ( + _rt::string_lift(bytes9), + super::wavs::types::service::Workflow { + trigger: v45, + component: super::wavs::types::service::Component { + source: v70, + permissions: super::wavs::types::service::Permissions { + allowed_http_hosts: v78, + file_system: _rt::bool_lift(l79 as u8), + }, + fuel_limit: match l80 { + 0 => None, + 1 => { + let e = { + let l81 = *base + .add(64 + 18 * ::core::mem::size_of::<*const u8>()) + .cast::(); + l81 as u64 + }; + Some(e) + } + _ => _rt::invalid_enum_discriminant(), + }, + time_limit_seconds: match l82 { + 0 => None, + 1 => { + let e = { + let l83 = *base + .add(80 + 18 * ::core::mem::size_of::<*const u8>()) + .cast::(); + l83 as u64 + }; + Some(e) + } + _ => _rt::invalid_enum_discriminant(), + }, + config: result92, + env_keys: result98, + }, + submit: v161, + }, + ) + }; + result162.push(e162); + } + _rt::cabi_dealloc( + base162, + len162 * (144 + 42 * ::core::mem::size_of::<*const u8>()), + 8, + ); + let l163 = i32::from( + *ptr0.add(4 * ::core::mem::size_of::<*const u8>()).cast::(), + ); + use super::wavs::types::service::ServiceStatus as V164; + let v164 = match l163 { + 0 => V164::Active, + n => { + debug_assert_eq!(n, 1, "invalid enum discriminant"); + V164::Paused } - } - #[derive(Clone, Copy)] - pub enum LogLevel { - Error, - Warn, - Info, - Debug, - Trace, - } - impl ::core::fmt::Debug for LogLevel { - fn fmt( - &self, - f: &mut ::core::fmt::Formatter<'_>, - ) -> ::core::fmt::Result { - match self { - LogLevel::Error => f.debug_tuple("LogLevel::Error").finish(), - LogLevel::Warn => f.debug_tuple("LogLevel::Warn").finish(), - LogLevel::Info => f.debug_tuple("LogLevel::Info").finish(), - LogLevel::Debug => f.debug_tuple("LogLevel::Debug").finish(), - LogLevel::Trace => f.debug_tuple("LogLevel::Trace").finish(), - } + }; + let l165 = i32::from( + *ptr0.add(5 * ::core::mem::size_of::<*const u8>()).cast::(), + ); + use super::wavs::types::service::ServiceManager as V172; + let v172 = match l165 { + n => { + debug_assert_eq!(n, 0, "invalid enum discriminant"); + let e172 = { + let l166 = *ptr0 + .add(6 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l167 = *ptr0 + .add(7 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len168 = l167; + let bytes168 = _rt::Vec::from_raw_parts( + l166.cast(), + len168, + len168, + ); + let l169 = *ptr0 + .add(8 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l170 = *ptr0 + .add(9 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len171 = l170; + super::wavs::types::service::EvmManager { + chain: _rt::string_lift(bytes168), + address: super::wavs::types::chain::EvmAddress { + raw_bytes: _rt::Vec::from_raw_parts( + l169.cast(), + len171, + len171, + ), + }, + } + }; + V172::Evm(e172) } - } + }; + let l173 = *ptr0 + .add(10 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l174 = *ptr0 + .add(11 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len175 = l174; + let bytes175 = _rt::Vec::from_raw_parts(l173.cast(), len175, len175); + let result176 = super::wavs::types::service::ServiceAndWorkflowId { + service: super::wavs::types::service::Service { + name: _rt::string_lift(bytes4), + workflows: result162, + status: v164, + manager: v172, + }, + workflow_id: _rt::string_lift(bytes175), + }; + result176 } } -} -#[allow(dead_code, async_fn_in_trait, unused_imports, clippy::all)] -pub mod host { - #[used] - #[doc(hidden)] - static __FORCE_SECTION_REF: fn() = super::__link_custom_section_describing_imports; - use super::_rt; - pub type EvmChainConfig = super::wavs::worker::layer_types::EvmChainConfig; - pub type CosmosChainConfig = super::wavs::worker::layer_types::CosmosChainConfig; - pub type LogLevel = super::wavs::worker::layer_types::LogLevel; #[allow(unused_unsafe, clippy::all)] - pub fn get_evm_chain_config(chain_name: &str) -> Option { + /// convenience function to get the workflow without having to walk service.workflows + pub fn get_workflow() -> WorkflowAndWorkflowId { unsafe { - #[cfg_attr(target_pointer_width = "64", repr(align(8)))] - #[cfg_attr(target_pointer_width = "32", repr(align(4)))] - struct RetArea([::core::mem::MaybeUninit; 9 * ::core::mem::size_of::<*const u8>()]); + #[repr(align(8))] + struct RetArea( + [::core::mem::MaybeUninit< + u8, + >; 144 + 42 * ::core::mem::size_of::<*const u8>()], + ); let mut ret_area = RetArea( - [::core::mem::MaybeUninit::uninit(); 9 * ::core::mem::size_of::<*const u8>()], + [::core::mem::MaybeUninit::uninit(); 144 + + 42 * ::core::mem::size_of::<*const u8>()], ); - let vec0 = chain_name; - let ptr0 = vec0.as_ptr().cast::(); - let len0 = vec0.len(); - let ptr1 = ret_area.0.as_mut_ptr().cast::(); + let ptr0 = ret_area.0.as_mut_ptr().cast::(); #[cfg(target_arch = "wasm32")] #[link(wasm_import_module = "host")] unsafe extern "C" { - #[link_name = "get-evm-chain-config"] - fn wit_import2(_: *mut u8, _: usize, _: *mut u8); + #[link_name = "get-workflow"] + fn wit_import1(_: *mut u8); } #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import2(_: *mut u8, _: usize, _: *mut u8) { + unsafe extern "C" fn wit_import1(_: *mut u8) { unreachable!() } - unsafe { wit_import2(ptr0.cast_mut(), len0, ptr1) }; - let l3 = i32::from(*ptr1.add(0).cast::()); - let result15 = match l3 { - 0 => None, + unsafe { wit_import1(ptr0) }; + let l2 = i32::from(*ptr0.add(0).cast::()); + use super::wavs::types::service::Trigger as V37; + let v37 = match l2 { + 0 => { + let e37 = { + let l3 = *ptr0.add(8).cast::<*mut u8>(); + let l4 = *ptr0 + .add(8 + 1 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len5 = l4; + let l6 = *ptr0 + .add(8 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l7 = *ptr0 + .add(8 + 3 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len8 = l7; + let bytes8 = _rt::Vec::from_raw_parts(l6.cast(), len8, len8); + let l9 = *ptr0 + .add(8 + 4 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l10 = *ptr0 + .add(8 + 5 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len11 = l10; + super::wavs::types::service::TriggerEvmContractEvent { + address: super::wavs::types::chain::EvmAddress { + raw_bytes: _rt::Vec::from_raw_parts(l3.cast(), len5, len5), + }, + chain: _rt::string_lift(bytes8), + event_hash: _rt::Vec::from_raw_parts(l9.cast(), len11, len11), + } + }; + V37::EvmContractEvent(e37) + } 1 => { - let e = { - let l4 = *ptr1.add(::core::mem::size_of::<*const u8>()).cast::<*mut u8>(); - let l5 = *ptr1.add(2 * ::core::mem::size_of::<*const u8>()).cast::(); - let len6 = l5; - let bytes6 = _rt::Vec::from_raw_parts(l4.cast(), len6, len6); - let l7 = i32::from( - *ptr1.add(3 * ::core::mem::size_of::<*const u8>()).cast::(), + let e37 = { + let l12 = *ptr0.add(8).cast::<*mut u8>(); + let l13 = *ptr0 + .add(8 + 1 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len14 = l13; + let bytes14 = _rt::Vec::from_raw_parts(l12.cast(), len14, len14); + let l15 = *ptr0 + .add(8 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let l16 = *ptr0 + .add(8 + 3 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l17 = *ptr0 + .add(8 + 4 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len18 = l17; + let bytes18 = _rt::Vec::from_raw_parts(l16.cast(), len18, len18); + let l19 = *ptr0 + .add(8 + 5 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l20 = *ptr0 + .add(8 + 6 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len21 = l20; + let bytes21 = _rt::Vec::from_raw_parts(l19.cast(), len21, len21); + super::wavs::types::service::TriggerCosmosContractEvent { + address: super::wavs::types::chain::CosmosAddress { + bech32_addr: _rt::string_lift(bytes14), + prefix_len: l15 as u32, + }, + chain: _rt::string_lift(bytes18), + event_type: _rt::string_lift(bytes21), + } + }; + V37::CosmosContractEvent(e37) + } + 2 => { + let e37 = { + let l22 = *ptr0.add(8).cast::<*mut u8>(); + let l23 = *ptr0 + .add(8 + 1 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len24 = l23; + let bytes24 = _rt::Vec::from_raw_parts(l22.cast(), len24, len24); + let l25 = *ptr0 + .add(8 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let l26 = i32::from( + *ptr0 + .add(16 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::(), ); - let l11 = i32::from( - *ptr1.add(6 * ::core::mem::size_of::<*const u8>()).cast::(), + let l28 = i32::from( + *ptr0 + .add(32 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::(), ); - super::wavs::worker::layer_types::EvmChainConfig { - chain_id: _rt::string_lift(bytes6), - ws_endpoint: match l7 { + super::wavs::types::service::TriggerBlockInterval { + chain: _rt::string_lift(bytes24), + n_blocks: l25 as u32, + start_block: match l26 { 0 => None, 1 => { let e = { - let l8 = *ptr1 - .add(4 * ::core::mem::size_of::<*const u8>()) - .cast::<*mut u8>(); - let l9 = *ptr1 - .add(5 * ::core::mem::size_of::<*const u8>()) - .cast::(); - let len10 = l9; - let bytes10 = - _rt::Vec::from_raw_parts(l8.cast(), len10, len10); - _rt::string_lift(bytes10) + let l27 = *ptr0 + .add(24 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + l27 as u64 }; Some(e) } _ => _rt::invalid_enum_discriminant(), }, - http_endpoint: match l11 { + end_block: match l28 { 0 => None, 1 => { let e = { - let l12 = *ptr1 - .add(7 * ::core::mem::size_of::<*const u8>()) - .cast::<*mut u8>(); - let l13 = *ptr1 - .add(8 * ::core::mem::size_of::<*const u8>()) - .cast::(); - let len14 = l13; - let bytes14 = - _rt::Vec::from_raw_parts(l12.cast(), len14, len14); - _rt::string_lift(bytes14) + let l29 = *ptr0 + .add(40 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + l29 as u64 }; Some(e) } @@ -15738,208 +19383,785 @@ pub mod host { }, } }; - Some(e) + V37::BlockInterval(e37) } - _ => _rt::invalid_enum_discriminant(), - }; - result15 - } - } - #[allow(unused_unsafe, clippy::all)] - pub fn get_cosmos_chain_config(chain_name: &str) -> Option { - unsafe { - #[cfg_attr(target_pointer_width = "64", repr(align(8)))] - #[cfg_attr(target_pointer_width = "32", repr(align(4)))] - struct RetArea( - [::core::mem::MaybeUninit; 17 * ::core::mem::size_of::<*const u8>()], - ); - let mut ret_area = RetArea( - [::core::mem::MaybeUninit::uninit(); 17 * ::core::mem::size_of::<*const u8>()], - ); - let vec0 = chain_name; - let ptr0 = vec0.as_ptr().cast::(); - let len0 = vec0.len(); - let ptr1 = ret_area.0.as_mut_ptr().cast::(); - #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "host")] - unsafe extern "C" { - #[link_name = "get-cosmos-chain-config"] - fn wit_import2(_: *mut u8, _: usize, _: *mut u8); - } - #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import2(_: *mut u8, _: usize, _: *mut u8) { - unreachable!() - } - unsafe { wit_import2(ptr0.cast_mut(), len0, ptr1) }; - let l3 = i32::from(*ptr1.add(0).cast::()); - let result26 = match l3 { - 0 => None, - 1 => { - let e = { - let l4 = *ptr1.add(::core::mem::size_of::<*const u8>()).cast::<*mut u8>(); - let l5 = *ptr1.add(2 * ::core::mem::size_of::<*const u8>()).cast::(); - let len6 = l5; - let bytes6 = _rt::Vec::from_raw_parts(l4.cast(), len6, len6); - let l7 = i32::from( - *ptr1.add(3 * ::core::mem::size_of::<*const u8>()).cast::(), - ); - let l11 = i32::from( - *ptr1.add(6 * ::core::mem::size_of::<*const u8>()).cast::(), + 3 => { + let e37 = { + let l30 = *ptr0.add(8).cast::<*mut u8>(); + let l31 = *ptr0 + .add(8 + 1 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len32 = l31; + let bytes32 = _rt::Vec::from_raw_parts(l30.cast(), len32, len32); + let l33 = i32::from( + *ptr0 + .add(8 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::(), ); - let l15 = i32::from( - *ptr1.add(9 * ::core::mem::size_of::<*const u8>()).cast::(), + let l35 = i32::from( + *ptr0 + .add(24 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::(), ); - let l19 = *ptr1.add(12 * ::core::mem::size_of::<*const u8>()).cast::(); - let l20 = - *ptr1.add(13 * ::core::mem::size_of::<*const u8>()).cast::<*mut u8>(); - let l21 = - *ptr1.add(14 * ::core::mem::size_of::<*const u8>()).cast::(); - let len22 = l21; - let bytes22 = _rt::Vec::from_raw_parts(l20.cast(), len22, len22); - let l23 = - *ptr1.add(15 * ::core::mem::size_of::<*const u8>()).cast::<*mut u8>(); - let l24 = - *ptr1.add(16 * ::core::mem::size_of::<*const u8>()).cast::(); - let len25 = l24; - let bytes25 = _rt::Vec::from_raw_parts(l23.cast(), len25, len25); - super::wavs::worker::layer_types::CosmosChainConfig { - chain_id: _rt::string_lift(bytes6), - rpc_endpoint: match l7 { + super::wavs::types::service::TriggerCron { + schedule: _rt::string_lift(bytes32), + start_time: match l33 { 0 => None, 1 => { let e = { - let l8 = *ptr1 - .add(4 * ::core::mem::size_of::<*const u8>()) - .cast::<*mut u8>(); - let l9 = *ptr1 - .add(5 * ::core::mem::size_of::<*const u8>()) - .cast::(); - let len10 = l9; - let bytes10 = - _rt::Vec::from_raw_parts(l8.cast(), len10, len10); - _rt::string_lift(bytes10) + let l34 = *ptr0 + .add(16 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + super::wavs::types::core::Timestamp { + nanos: l34 as u64, + } }; Some(e) } _ => _rt::invalid_enum_discriminant(), }, - grpc_endpoint: match l11 { + end_time: match l35 { + 0 => None, + 1 => { + let e = { + let l36 = *ptr0 + .add(32 + 2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + super::wavs::types::core::Timestamp { + nanos: l36 as u64, + } + }; + Some(e) + } + _ => _rt::invalid_enum_discriminant(), + }, + } + }; + V37::Cron(e37) + } + n => { + debug_assert_eq!(n, 4, "invalid enum discriminant"); + V37::Manual + } + }; + let l38 = i32::from( + *ptr0.add(48 + 2 * ::core::mem::size_of::<*const u8>()).cast::(), + ); + use super::wavs::types::service::ComponentSource as V62; + let v62 = match l38 { + 0 => { + let e62 = { + let l39 = *ptr0 + .add(48 + 3 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l40 = *ptr0 + .add(48 + 4 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len41 = l40; + let bytes41 = _rt::Vec::from_raw_parts(l39.cast(), len41, len41); + let l42 = *ptr0 + .add(48 + 5 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l43 = *ptr0 + .add(48 + 6 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len44 = l43; + let bytes44 = _rt::Vec::from_raw_parts(l42.cast(), len44, len44); + super::wavs::types::service::ComponentSourceDownload { + url: _rt::string_lift(bytes41), + digest: _rt::string_lift(bytes44), + } + }; + V62::Download(e62) + } + 1 => { + let e62 = { + let l45 = *ptr0 + .add(48 + 3 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l46 = *ptr0 + .add(48 + 4 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len47 = l46; + let bytes47 = _rt::Vec::from_raw_parts(l45.cast(), len47, len47); + let l48 = i32::from( + *ptr0 + .add(48 + 5 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + let l52 = i32::from( + *ptr0 + .add(48 + 8 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + let l56 = *ptr0 + .add(48 + 11 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l57 = *ptr0 + .add(48 + 12 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len58 = l57; + let bytes58 = _rt::Vec::from_raw_parts(l56.cast(), len58, len58); + super::wavs::types::service::Registry { + digest: _rt::string_lift(bytes47), + domain: match l48 { 0 => None, 1 => { let e = { - let l12 = *ptr1 - .add(7 * ::core::mem::size_of::<*const u8>()) + let l49 = *ptr0 + .add(48 + 6 * ::core::mem::size_of::<*const u8>()) .cast::<*mut u8>(); - let l13 = *ptr1 - .add(8 * ::core::mem::size_of::<*const u8>()) + let l50 = *ptr0 + .add(48 + 7 * ::core::mem::size_of::<*const u8>()) .cast::(); - let len14 = l13; - let bytes14 = - _rt::Vec::from_raw_parts(l12.cast(), len14, len14); - _rt::string_lift(bytes14) + let len51 = l50; + let bytes51 = _rt::Vec::from_raw_parts( + l49.cast(), + len51, + len51, + ); + _rt::string_lift(bytes51) }; Some(e) } _ => _rt::invalid_enum_discriminant(), }, - grpc_web_endpoint: match l15 { + version: match l52 { 0 => None, 1 => { let e = { - let l16 = *ptr1 - .add(10 * ::core::mem::size_of::<*const u8>()) + let l53 = *ptr0 + .add(48 + 9 * ::core::mem::size_of::<*const u8>()) .cast::<*mut u8>(); - let l17 = *ptr1 - .add(11 * ::core::mem::size_of::<*const u8>()) + let l54 = *ptr0 + .add(48 + 10 * ::core::mem::size_of::<*const u8>()) .cast::(); - let len18 = l17; - let bytes18 = - _rt::Vec::from_raw_parts(l16.cast(), len18, len18); - _rt::string_lift(bytes18) + let len55 = l54; + let bytes55 = _rt::Vec::from_raw_parts( + l53.cast(), + len55, + len55, + ); + _rt::string_lift(bytes55) }; Some(e) } _ => _rt::invalid_enum_discriminant(), }, - gas_price: l19, - gas_denom: _rt::string_lift(bytes22), - bech32_prefix: _rt::string_lift(bytes25), + pkg: _rt::string_lift(bytes58), } }; - Some(e) + V62::Registry(e62) + } + n => { + debug_assert_eq!(n, 2, "invalid enum discriminant"); + let e62 = { + let l59 = *ptr0 + .add(48 + 3 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l60 = *ptr0 + .add(48 + 4 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len61 = l60; + let bytes61 = _rt::Vec::from_raw_parts(l59.cast(), len61, len61); + _rt::string_lift(bytes61) + }; + V62::Digest(e62) } - _ => _rt::invalid_enum_discriminant(), }; - result26 - } - } - #[allow(unused_unsafe, clippy::all)] - pub fn config_var(key: &str) -> Option<_rt::String> { - unsafe { - #[cfg_attr(target_pointer_width = "64", repr(align(8)))] - #[cfg_attr(target_pointer_width = "32", repr(align(4)))] - struct RetArea([::core::mem::MaybeUninit; 3 * ::core::mem::size_of::<*const u8>()]); - let mut ret_area = RetArea( - [::core::mem::MaybeUninit::uninit(); 3 * ::core::mem::size_of::<*const u8>()], + let l63 = i32::from( + *ptr0.add(48 + 13 * ::core::mem::size_of::<*const u8>()).cast::(), ); - let vec0 = key; - let ptr0 = vec0.as_ptr().cast::(); - let len0 = vec0.len(); - let ptr1 = ret_area.0.as_mut_ptr().cast::(); - #[cfg(target_arch = "wasm32")] - #[link(wasm_import_module = "host")] - unsafe extern "C" { - #[link_name = "config-var"] - fn wit_import2(_: *mut u8, _: usize, _: *mut u8); - } - #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import2(_: *mut u8, _: usize, _: *mut u8) { - unreachable!() - } - unsafe { wit_import2(ptr0.cast_mut(), len0, ptr1) }; - let l3 = i32::from(*ptr1.add(0).cast::()); - let result7 = match l3 { - 0 => None, + use super::wavs::types::service::AllowedHostPermission as V70; + let v70 = match l63 { + 0 => V70::All, 1 => { - let e = { - let l4 = *ptr1.add(::core::mem::size_of::<*const u8>()).cast::<*mut u8>(); - let l5 = *ptr1.add(2 * ::core::mem::size_of::<*const u8>()).cast::(); - let len6 = l5; - let bytes6 = _rt::Vec::from_raw_parts(l4.cast(), len6, len6); - _rt::string_lift(bytes6) + let e70 = { + let l64 = *ptr0 + .add(48 + 14 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l65 = *ptr0 + .add(48 + 15 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let base69 = l64; + let len69 = l65; + let mut result69 = _rt::Vec::with_capacity(len69); + for i in 0..len69 { + let base = base69 + .add(i * (2 * ::core::mem::size_of::<*const u8>())); + let e69 = { + let l66 = *base.add(0).cast::<*mut u8>(); + let l67 = *base + .add(::core::mem::size_of::<*const u8>()) + .cast::(); + let len68 = l67; + let bytes68 = _rt::Vec::from_raw_parts( + l66.cast(), + len68, + len68, + ); + _rt::string_lift(bytes68) + }; + result69.push(e69); + } + _rt::cabi_dealloc( + base69, + len69 * (2 * ::core::mem::size_of::<*const u8>()), + ::core::mem::size_of::<*const u8>(), + ); + result69 }; - Some(e) + V70::Only(e70) + } + n => { + debug_assert_eq!(n, 2, "invalid enum discriminant"); + V70::None } - _ => _rt::invalid_enum_discriminant(), }; - result7 + let l71 = i32::from( + *ptr0.add(48 + 16 * ::core::mem::size_of::<*const u8>()).cast::(), + ); + let l72 = i32::from( + *ptr0.add(56 + 16 * ::core::mem::size_of::<*const u8>()).cast::(), + ); + let l74 = i32::from( + *ptr0.add(72 + 16 * ::core::mem::size_of::<*const u8>()).cast::(), + ); + let l76 = *ptr0 + .add(88 + 16 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l77 = *ptr0 + .add(88 + 17 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let base84 = l76; + let len84 = l77; + let mut result84 = _rt::Vec::with_capacity(len84); + for i in 0..len84 { + let base = base84.add(i * (4 * ::core::mem::size_of::<*const u8>())); + let e84 = { + let l78 = *base.add(0).cast::<*mut u8>(); + let l79 = *base + .add(::core::mem::size_of::<*const u8>()) + .cast::(); + let len80 = l79; + let bytes80 = _rt::Vec::from_raw_parts(l78.cast(), len80, len80); + let l81 = *base + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l82 = *base + .add(3 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len83 = l82; + let bytes83 = _rt::Vec::from_raw_parts(l81.cast(), len83, len83); + (_rt::string_lift(bytes80), _rt::string_lift(bytes83)) + }; + result84.push(e84); + } + _rt::cabi_dealloc( + base84, + len84 * (4 * ::core::mem::size_of::<*const u8>()), + ::core::mem::size_of::<*const u8>(), + ); + let l85 = *ptr0 + .add(88 + 18 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l86 = *ptr0 + .add(88 + 19 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let base90 = l85; + let len90 = l86; + let mut result90 = _rt::Vec::with_capacity(len90); + for i in 0..len90 { + let base = base90.add(i * (2 * ::core::mem::size_of::<*const u8>())); + let e90 = { + let l87 = *base.add(0).cast::<*mut u8>(); + let l88 = *base + .add(::core::mem::size_of::<*const u8>()) + .cast::(); + let len89 = l88; + let bytes89 = _rt::Vec::from_raw_parts(l87.cast(), len89, len89); + _rt::string_lift(bytes89) + }; + result90.push(e90); + } + _rt::cabi_dealloc( + base90, + len90 * (2 * ::core::mem::size_of::<*const u8>()), + ::core::mem::size_of::<*const u8>(), + ); + let l91 = i32::from( + *ptr0.add(88 + 20 * ::core::mem::size_of::<*const u8>()).cast::(), + ); + use super::wavs::types::service::Submit as V153; + let v153 = match l91 { + 0 => V153::None, + n => { + debug_assert_eq!(n, 1, "invalid enum discriminant"); + let e153 = { + let l92 = *ptr0 + .add(96 + 20 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l93 = *ptr0 + .add(96 + 21 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len94 = l93; + let bytes94 = _rt::Vec::from_raw_parts(l92.cast(), len94, len94); + let l95 = i32::from( + *ptr0 + .add(96 + 22 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + use super::wavs::types::service::ComponentSource as V119; + let v119 = match l95 { + 0 => { + let e119 = { + let l96 = *ptr0 + .add(96 + 23 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l97 = *ptr0 + .add(96 + 24 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len98 = l97; + let bytes98 = _rt::Vec::from_raw_parts( + l96.cast(), + len98, + len98, + ); + let l99 = *ptr0 + .add(96 + 25 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l100 = *ptr0 + .add(96 + 26 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len101 = l100; + let bytes101 = _rt::Vec::from_raw_parts( + l99.cast(), + len101, + len101, + ); + super::wavs::types::service::ComponentSourceDownload { + url: _rt::string_lift(bytes98), + digest: _rt::string_lift(bytes101), + } + }; + V119::Download(e119) + } + 1 => { + let e119 = { + let l102 = *ptr0 + .add(96 + 23 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l103 = *ptr0 + .add(96 + 24 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len104 = l103; + let bytes104 = _rt::Vec::from_raw_parts( + l102.cast(), + len104, + len104, + ); + let l105 = i32::from( + *ptr0 + .add(96 + 25 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + let l109 = i32::from( + *ptr0 + .add(96 + 28 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + let l113 = *ptr0 + .add(96 + 31 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l114 = *ptr0 + .add(96 + 32 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len115 = l114; + let bytes115 = _rt::Vec::from_raw_parts( + l113.cast(), + len115, + len115, + ); + super::wavs::types::service::Registry { + digest: _rt::string_lift(bytes104), + domain: match l105 { + 0 => None, + 1 => { + let e = { + let l106 = *ptr0 + .add(96 + 26 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l107 = *ptr0 + .add(96 + 27 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len108 = l107; + let bytes108 = _rt::Vec::from_raw_parts( + l106.cast(), + len108, + len108, + ); + _rt::string_lift(bytes108) + }; + Some(e) + } + _ => _rt::invalid_enum_discriminant(), + }, + version: match l109 { + 0 => None, + 1 => { + let e = { + let l110 = *ptr0 + .add(96 + 29 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l111 = *ptr0 + .add(96 + 30 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len112 = l111; + let bytes112 = _rt::Vec::from_raw_parts( + l110.cast(), + len112, + len112, + ); + _rt::string_lift(bytes112) + }; + Some(e) + } + _ => _rt::invalid_enum_discriminant(), + }, + pkg: _rt::string_lift(bytes115), + } + }; + V119::Registry(e119) + } + n => { + debug_assert_eq!(n, 2, "invalid enum discriminant"); + let e119 = { + let l116 = *ptr0 + .add(96 + 23 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l117 = *ptr0 + .add(96 + 24 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len118 = l117; + let bytes118 = _rt::Vec::from_raw_parts( + l116.cast(), + len118, + len118, + ); + _rt::string_lift(bytes118) + }; + V119::Digest(e119) + } + }; + let l120 = i32::from( + *ptr0 + .add(96 + 33 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + use super::wavs::types::service::AllowedHostPermission as V127; + let v127 = match l120 { + 0 => V127::All, + 1 => { + let e127 = { + let l121 = *ptr0 + .add(96 + 34 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l122 = *ptr0 + .add(96 + 35 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let base126 = l121; + let len126 = l122; + let mut result126 = _rt::Vec::with_capacity(len126); + for i in 0..len126 { + let base = base126 + .add(i * (2 * ::core::mem::size_of::<*const u8>())); + let e126 = { + let l123 = *base.add(0).cast::<*mut u8>(); + let l124 = *base + .add(::core::mem::size_of::<*const u8>()) + .cast::(); + let len125 = l124; + let bytes125 = _rt::Vec::from_raw_parts( + l123.cast(), + len125, + len125, + ); + _rt::string_lift(bytes125) + }; + result126.push(e126); + } + _rt::cabi_dealloc( + base126, + len126 * (2 * ::core::mem::size_of::<*const u8>()), + ::core::mem::size_of::<*const u8>(), + ); + result126 + }; + V127::Only(e127) + } + n => { + debug_assert_eq!(n, 2, "invalid enum discriminant"); + V127::None + } + }; + let l128 = i32::from( + *ptr0 + .add(96 + 36 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + let l129 = i32::from( + *ptr0 + .add(104 + 36 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + let l131 = i32::from( + *ptr0 + .add(120 + 36 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + let l133 = *ptr0 + .add(136 + 36 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l134 = *ptr0 + .add(136 + 37 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let base141 = l133; + let len141 = l134; + let mut result141 = _rt::Vec::with_capacity(len141); + for i in 0..len141 { + let base = base141 + .add(i * (4 * ::core::mem::size_of::<*const u8>())); + let e141 = { + let l135 = *base.add(0).cast::<*mut u8>(); + let l136 = *base + .add(::core::mem::size_of::<*const u8>()) + .cast::(); + let len137 = l136; + let bytes137 = _rt::Vec::from_raw_parts( + l135.cast(), + len137, + len137, + ); + let l138 = *base + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l139 = *base + .add(3 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len140 = l139; + let bytes140 = _rt::Vec::from_raw_parts( + l138.cast(), + len140, + len140, + ); + (_rt::string_lift(bytes137), _rt::string_lift(bytes140)) + }; + result141.push(e141); + } + _rt::cabi_dealloc( + base141, + len141 * (4 * ::core::mem::size_of::<*const u8>()), + ::core::mem::size_of::<*const u8>(), + ); + let l142 = *ptr0 + .add(136 + 38 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l143 = *ptr0 + .add(136 + 39 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let base147 = l142; + let len147 = l143; + let mut result147 = _rt::Vec::with_capacity(len147); + for i in 0..len147 { + let base = base147 + .add(i * (2 * ::core::mem::size_of::<*const u8>())); + let e147 = { + let l144 = *base.add(0).cast::<*mut u8>(); + let l145 = *base + .add(::core::mem::size_of::<*const u8>()) + .cast::(); + let len146 = l145; + let bytes146 = _rt::Vec::from_raw_parts( + l144.cast(), + len146, + len146, + ); + _rt::string_lift(bytes146) + }; + result147.push(e147); + } + _rt::cabi_dealloc( + base147, + len147 * (2 * ::core::mem::size_of::<*const u8>()), + ::core::mem::size_of::<*const u8>(), + ); + let l148 = i32::from( + *ptr0 + .add(136 + 40 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + use super::wavs::types::service::SignatureAlgorithm as V149; + let v149 = match l148 { + n => { + debug_assert_eq!(n, 0, "invalid enum discriminant"); + V149::Secp256k1 + } + }; + let l150 = i32::from( + *ptr0 + .add(137 + 40 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + super::wavs::types::service::AggregatorSubmit { + url: _rt::string_lift(bytes94), + component: super::wavs::types::service::Component { + source: v119, + permissions: super::wavs::types::service::Permissions { + allowed_http_hosts: v127, + file_system: _rt::bool_lift(l128 as u8), + }, + fuel_limit: match l129 { + 0 => None, + 1 => { + let e = { + let l130 = *ptr0 + .add(112 + 36 * ::core::mem::size_of::<*const u8>()) + .cast::(); + l130 as u64 + }; + Some(e) + } + _ => _rt::invalid_enum_discriminant(), + }, + time_limit_seconds: match l131 { + 0 => None, + 1 => { + let e = { + let l132 = *ptr0 + .add(128 + 36 * ::core::mem::size_of::<*const u8>()) + .cast::(); + l132 as u64 + }; + Some(e) + } + _ => _rt::invalid_enum_discriminant(), + }, + config: result141, + env_keys: result147, + }, + signature_kind: super::wavs::types::service::SignatureKind { + algorithm: v149, + prefix: match l150 { + 0 => None, + 1 => { + let e = { + let l151 = i32::from( + *ptr0 + .add(138 + 40 * ::core::mem::size_of::<*const u8>()) + .cast::(), + ); + use super::wavs::types::service::SignaturePrefix as V152; + let v152 = match l151 { + n => { + debug_assert_eq!(n, 0, "invalid enum discriminant"); + V152::Eip191 + } + }; + v152 + }; + Some(e) + } + _ => _rt::invalid_enum_discriminant(), + }, + }, + } + }; + V153::Aggregator(e153) + } + }; + let l154 = *ptr0 + .add(144 + 40 * ::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l155 = *ptr0 + .add(144 + 41 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len156 = l155; + let bytes156 = _rt::Vec::from_raw_parts(l154.cast(), len156, len156); + let result157 = super::wavs::types::service::WorkflowAndWorkflowId { + workflow: super::wavs::types::service::Workflow { + trigger: v37, + component: super::wavs::types::service::Component { + source: v62, + permissions: super::wavs::types::service::Permissions { + allowed_http_hosts: v70, + file_system: _rt::bool_lift(l71 as u8), + }, + fuel_limit: match l72 { + 0 => None, + 1 => { + let e = { + let l73 = *ptr0 + .add(64 + 16 * ::core::mem::size_of::<*const u8>()) + .cast::(); + l73 as u64 + }; + Some(e) + } + _ => _rt::invalid_enum_discriminant(), + }, + time_limit_seconds: match l74 { + 0 => None, + 1 => { + let e = { + let l75 = *ptr0 + .add(80 + 16 * ::core::mem::size_of::<*const u8>()) + .cast::(); + l75 as u64 + }; + Some(e) + } + _ => _rt::invalid_enum_discriminant(), + }, + config: result84, + env_keys: result90, + }, + submit: v153, + }, + workflow_id: _rt::string_lift(bytes156), + }; + result157 } } #[allow(unused_unsafe, clippy::all)] - pub fn log(level: LogLevel, message: &str) -> () { + /// convenience function to get the event-id + pub fn get_event_id() -> EventId { unsafe { - use super::wavs::worker::layer_types::LogLevel as V0; - let result1 = match level { - V0::Error => 0i32, - V0::Warn => 1i32, - V0::Info => 2i32, - V0::Debug => 3i32, - V0::Trace => 4i32, - }; - let vec2 = message; - let ptr2 = vec2.as_ptr().cast::(); - let len2 = vec2.len(); + #[cfg_attr(target_pointer_width = "64", repr(align(8)))] + #[cfg_attr(target_pointer_width = "32", repr(align(4)))] + struct RetArea( + [::core::mem::MaybeUninit; 2 * ::core::mem::size_of::<*const u8>()], + ); + let mut ret_area = RetArea( + [::core::mem::MaybeUninit::uninit(); 2 + * ::core::mem::size_of::<*const u8>()], + ); + let ptr0 = ret_area.0.as_mut_ptr().cast::(); #[cfg(target_arch = "wasm32")] #[link(wasm_import_module = "host")] unsafe extern "C" { - #[link_name = "log"] - fn wit_import3(_: i32, _: *mut u8, _: usize); + #[link_name = "get-event-id"] + fn wit_import1(_: *mut u8); } #[cfg(not(target_arch = "wasm32"))] - unsafe extern "C" fn wit_import3(_: i32, _: *mut u8, _: usize) { + unsafe extern "C" fn wit_import1(_: *mut u8) { unreachable!() } - unsafe { wit_import3(result1, ptr2.cast_mut(), len2) }; + unsafe { wit_import1(ptr0) }; + let l2 = *ptr0.add(0).cast::<*mut u8>(); + let l3 = *ptr0.add(::core::mem::size_of::<*const u8>()).cast::(); + let len4 = l3; + let result5 = _rt::Vec::from_raw_parts(l2.cast(), len4, len4); + result5 } } } @@ -16161,369 +20383,445 @@ mod _rt { /// ``` #[allow(unused_macros)] #[doc(hidden)] -macro_rules! __export_layer_trigger_world_impl { +macro_rules! __export_wavs_world_impl { ($ty:ident) => { self::export!($ty with_types_in self); }; ($ty:ident with_types_in $($path_to_types_root:tt)*) => { - $($path_to_types_root)*:: __export_world_layer_trigger_world_cabi!($ty - with_types_in $($path_to_types_root)*); + $($path_to_types_root)*:: __export_world_wavs_world_cabi!($ty with_types_in + $($path_to_types_root)*); }; } #[doc(inline)] -pub(crate) use __export_layer_trigger_world_impl as export; +pub(crate) use __export_wavs_world_impl as export; #[cfg(target_arch = "wasm32")] -#[unsafe(link_section = "component-type:wit-bindgen:0.41.0:wavs:worker@0.4.0:layer-trigger-world:encoded world")] +#[unsafe( + link_section = "component-type:wit-bindgen:0.41.0:wavs:operator@1.2.0:wavs-world:encoded world" +)] #[doc(hidden)] #[allow(clippy::octal_escapes)] -pub static __WIT_BINDGEN_COMPONENT_TYPE: [u8; 17448] = *b"\ -\0asm\x0d\0\x01\0\0\x19\x16wit-component-encoding\x04\0\x07\x9d\x87\x01\x01A\x02\ -\x01A^\x01B1\x01r\x01\x05nanosw\x04\0\x09timestamp\x03\0\0\x01r\x02\x0bbech32-ad\ -drs\x0aprefix-leny\x04\0\x0ecosmos-address\x03\0\x02\x01o\x02ss\x01p\x04\x01r\x02\ -\x02tys\x0aattributes\x05\x04\0\x0ccosmos-event\x03\0\x06\x01ks\x01r\x07\x08chai\ -n-ids\x0crpc-endpoint\x08\x0dgrpc-endpoint\x08\x11grpc-web-endpoint\x08\x09gas-p\ -ricev\x09gas-denoms\x0dbech32-prefixs\x04\0\x13cosmos-chain-config\x03\0\x09\x01\ -p}\x01r\x01\x09raw-bytes\x0b\x04\0\x0bevm-address\x03\0\x0c\x01p\x0b\x01r\x02\x06\ -topics\x0e\x04data\x0b\x04\0\x12evm-event-log-data\x03\0\x0f\x01r\x03\x08chain-i\ -ds\x0bws-endpoint\x08\x0dhttp-endpoint\x08\x04\0\x10evm-chain-config\x03\0\x11\x01\ -r\x03\x07address\x0d\x0achain-names\x0aevent-hash\x0b\x04\0!trigger-source-evm-c\ -ontract-event\x03\0\x13\x01r\x03\x07address\x03\x0achain-names\x0aevent-types\x04\ -\0$trigger-source-cosmos-contract-event\x03\0\x15\x01kw\x01r\x04\x0achain-names\x08\ -n-blocksy\x0bstart-block\x17\x09end-block\x17\x04\0\x15block-interval-source\x03\ -\0\x18\x01k\x01\x01r\x03\x08schedules\x0astart-time\x1a\x08end-time\x1a\x04\0\x13\ -trigger-source-cron\x03\0\x1b\x01q\x05\x12evm-contract-event\x01\x14\0\x15cosmos\ --contract-event\x01\x16\0\x0eblock-interval\x01\x19\0\x04cron\x01\x1c\0\x06manua\ -l\0\0\x04\0\x0etrigger-source\x03\0\x1d\x01r\x03\x0aservice-ids\x0bworkflow-ids\x0e\ -trigger-source\x1e\x04\0\x0etrigger-config\x03\0\x1f\x01r\x04\x10contract-addres\ -s\x0d\x0achain-names\x03log\x10\x0cblock-heightw\x04\0\x1ftrigger-data-evm-contr\ -act-event\x03\0!\x01r\x04\x10contract-address\x03\x0achain-names\x05event\x07\x0c\ -block-heightw\x04\0\"trigger-data-cosmos-contract-event\x03\0#\x01r\x02\x0achain\ --names\x0cblock-heightw\x04\0\x13block-interval-data\x03\0%\x01r\x01\x0ctrigger-\ -time\x01\x04\0\x11trigger-data-cron\x03\0'\x01q\x05\x12evm-contract-event\x01\"\0\ -\x15cosmos-contract-event\x01$\0\x0eblock-interval\x01&\0\x04cron\x01(\0\x03raw\x01\ -\x0b\0\x04\0\x0ctrigger-data\x03\0)\x01r\x02\x06config\x20\x04data*\x04\0\x0etri\ -gger-action\x03\0+\x01r\x02\x07payload\x0b\x08ordering\x17\x04\0\x0dwasm-respons\ -e\x03\0-\x01q\x05\x05error\0\0\x04warn\0\0\x04info\0\0\x05debug\0\0\x05trace\0\0\ -\x04\0\x09log-level\x03\0/\x03\0\x1dwavs:worker/layer-types@0.4.0\x05\0\x02\x03\0\ -\0\x0etrigger-action\x03\0\x0etrigger-action\x03\0\x01\x02\x03\0\0\x0dwasm-respo\ -nse\x03\0\x0dwasm-response\x03\0\x03\x01B\x0a\x04\0\x08pollable\x03\x01\x01h\0\x01\ -@\x01\x04self\x01\0\x7f\x04\0\x16[method]pollable.ready\x01\x02\x01@\x01\x04self\ -\x01\x01\0\x04\0\x16[method]pollable.block\x01\x03\x01p\x01\x01py\x01@\x01\x02in\ -\x04\0\x05\x04\0\x04poll\x01\x06\x03\0\x12wasi:io/poll@0.2.0\x05\x05\x02\x03\0\x01\ -\x08pollable\x01B\x0f\x02\x03\x02\x01\x06\x04\0\x08pollable\x03\0\0\x01w\x04\0\x07\ -instant\x03\0\x02\x01w\x04\0\x08duration\x03\0\x04\x01@\0\0\x03\x04\0\x03now\x01\ -\x06\x01@\0\0\x05\x04\0\x0aresolution\x01\x07\x01i\x01\x01@\x01\x04when\x03\0\x08\ -\x04\0\x11subscribe-instant\x01\x09\x01@\x01\x04when\x05\0\x08\x04\0\x12subscrib\ -e-duration\x01\x0a\x03\0!wasi:clocks/monotonic-clock@0.2.0\x05\x07\x01B\x04\x04\0\ -\x05error\x03\x01\x01h\0\x01@\x01\x04self\x01\0s\x04\0\x1d[method]error.to-debug\ --string\x01\x02\x03\0\x13wasi:io/error@0.2.0\x05\x08\x02\x03\0\x03\x05error\x01B\ -(\x02\x03\x02\x01\x09\x04\0\x05error\x03\0\0\x02\x03\x02\x01\x06\x04\0\x08pollab\ -le\x03\0\x02\x01i\x01\x01q\x02\x15last-operation-failed\x01\x04\0\x06closed\0\0\x04\ -\0\x0cstream-error\x03\0\x05\x04\0\x0cinput-stream\x03\x01\x04\0\x0doutput-strea\ -m\x03\x01\x01h\x07\x01p}\x01j\x01\x0a\x01\x06\x01@\x02\x04self\x09\x03lenw\0\x0b\ -\x04\0\x19[method]input-stream.read\x01\x0c\x04\0\"[method]input-stream.blocking\ --read\x01\x0c\x01j\x01w\x01\x06\x01@\x02\x04self\x09\x03lenw\0\x0d\x04\0\x19[met\ -hod]input-stream.skip\x01\x0e\x04\0\"[method]input-stream.blocking-skip\x01\x0e\x01\ -i\x03\x01@\x01\x04self\x09\0\x0f\x04\0\x1e[method]input-stream.subscribe\x01\x10\ -\x01h\x08\x01@\x01\x04self\x11\0\x0d\x04\0![method]output-stream.check-write\x01\ -\x12\x01j\0\x01\x06\x01@\x02\x04self\x11\x08contents\x0a\0\x13\x04\0\x1b[method]\ -output-stream.write\x01\x14\x04\0.[method]output-stream.blocking-write-and-flush\ -\x01\x14\x01@\x01\x04self\x11\0\x13\x04\0\x1b[method]output-stream.flush\x01\x15\ -\x04\0$[method]output-stream.blocking-flush\x01\x15\x01@\x01\x04self\x11\0\x0f\x04\ -\0\x1f[method]output-stream.subscribe\x01\x16\x01@\x02\x04self\x11\x03lenw\0\x13\ -\x04\0\"[method]output-stream.write-zeroes\x01\x17\x04\05[method]output-stream.b\ -locking-write-zeroes-and-flush\x01\x17\x01@\x03\x04self\x11\x03src\x09\x03lenw\0\ -\x0d\x04\0\x1c[method]output-stream.splice\x01\x18\x04\0%[method]output-stream.b\ -locking-splice\x01\x18\x03\0\x15wasi:io/streams@0.2.0\x05\x0a\x02\x03\0\x02\x08d\ -uration\x02\x03\0\x04\x0cinput-stream\x02\x03\0\x04\x0doutput-stream\x01B\xc0\x01\ -\x02\x03\x02\x01\x0b\x04\0\x08duration\x03\0\0\x02\x03\x02\x01\x0c\x04\0\x0cinpu\ -t-stream\x03\0\x02\x02\x03\x02\x01\x0d\x04\0\x0doutput-stream\x03\0\x04\x02\x03\x02\ -\x01\x09\x04\0\x08io-error\x03\0\x06\x02\x03\x02\x01\x06\x04\0\x08pollable\x03\0\ -\x08\x01q\x0a\x03get\0\0\x04head\0\0\x04post\0\0\x03put\0\0\x06delete\0\0\x07con\ -nect\0\0\x07options\0\0\x05trace\0\0\x05patch\0\0\x05other\x01s\0\x04\0\x06metho\ -d\x03\0\x0a\x01q\x03\x04HTTP\0\0\x05HTTPS\0\0\x05other\x01s\0\x04\0\x06scheme\x03\ -\0\x0c\x01ks\x01k{\x01r\x02\x05rcode\x0e\x09info-code\x0f\x04\0\x11DNS-error-pay\ -load\x03\0\x10\x01k}\x01r\x02\x08alert-id\x12\x0dalert-message\x0e\x04\0\x1aTLS-\ -alert-received-payload\x03\0\x13\x01ky\x01r\x02\x0afield-name\x0e\x0afield-size\x15\ -\x04\0\x12field-size-payload\x03\0\x16\x01kw\x01k\x17\x01q'\x0bDNS-timeout\0\0\x09\ -DNS-error\x01\x11\0\x15destination-not-found\0\0\x17destination-unavailable\0\0\x19\ -destination-IP-prohibited\0\0\x19destination-IP-unroutable\0\0\x12connection-ref\ -used\0\0\x15connection-terminated\0\0\x12connection-timeout\0\0\x17connection-re\ -ad-timeout\0\0\x18connection-write-timeout\0\0\x18connection-limit-reached\0\0\x12\ -TLS-protocol-error\0\0\x15TLS-certificate-error\0\0\x12TLS-alert-received\x01\x14\ -\0\x13HTTP-request-denied\0\0\x1cHTTP-request-length-required\0\0\x16HTTP-reques\ -t-body-size\x01\x18\0\x1bHTTP-request-method-invalid\0\0\x18HTTP-request-URI-inv\ -alid\0\0\x19HTTP-request-URI-too-long\0\0\x20HTTP-request-header-section-size\x01\ -\x15\0\x18HTTP-request-header-size\x01\x19\0!HTTP-request-trailer-section-size\x01\ -\x15\0\x19HTTP-request-trailer-size\x01\x17\0\x18HTTP-response-incomplete\0\0!HT\ -TP-response-header-section-size\x01\x15\0\x19HTTP-response-header-size\x01\x17\0\ -\x17HTTP-response-body-size\x01\x18\0\"HTTP-response-trailer-section-size\x01\x15\ -\0\x1aHTTP-response-trailer-size\x01\x17\0\x1dHTTP-response-transfer-coding\x01\x0e\ -\0\x1cHTTP-response-content-coding\x01\x0e\0\x15HTTP-response-timeout\0\0\x13HTT\ -P-upgrade-failed\0\0\x13HTTP-protocol-error\0\0\x0dloop-detected\0\0\x13configur\ -ation-error\0\0\x0einternal-error\x01\x0e\0\x04\0\x0aerror-code\x03\0\x1a\x01q\x03\ -\x0einvalid-syntax\0\0\x09forbidden\0\0\x09immutable\0\0\x04\0\x0cheader-error\x03\ -\0\x1c\x01s\x04\0\x09field-key\x03\0\x1e\x01p}\x04\0\x0bfield-value\x03\0\x20\x04\ -\0\x06fields\x03\x01\x04\0\x07headers\x03\0\"\x04\0\x08trailers\x03\0\"\x04\0\x10\ -incoming-request\x03\x01\x04\0\x10outgoing-request\x03\x01\x04\0\x0frequest-opti\ -ons\x03\x01\x04\0\x11response-outparam\x03\x01\x01{\x04\0\x0bstatus-code\x03\0)\x04\ -\0\x11incoming-response\x03\x01\x04\0\x0dincoming-body\x03\x01\x04\0\x0ffuture-t\ -railers\x03\x01\x04\0\x11outgoing-response\x03\x01\x04\0\x0doutgoing-body\x03\x01\ -\x04\0\x18future-incoming-response\x03\x01\x01i\"\x01@\0\01\x04\0\x13[constructo\ -r]fields\x012\x01o\x02\x1f!\x01p3\x01j\x011\x01\x1d\x01@\x01\x07entries4\05\x04\0\ -\x18[static]fields.from-list\x016\x01h\"\x01p!\x01@\x02\x04self7\x04name\x1f\08\x04\ -\0\x12[method]fields.get\x019\x01@\x02\x04self7\x04name\x1f\0\x7f\x04\0\x12[meth\ -od]fields.has\x01:\x01j\0\x01\x1d\x01@\x03\x04self7\x04name\x1f\x05value8\0;\x04\ -\0\x12[method]fields.set\x01<\x01@\x02\x04self7\x04name\x1f\0;\x04\0\x15[method]\ -fields.delete\x01=\x01@\x03\x04self7\x04name\x1f\x05value!\0;\x04\0\x15[method]f\ -ields.append\x01>\x01@\x01\x04self7\04\x04\0\x16[method]fields.entries\x01?\x01@\ -\x01\x04self7\01\x04\0\x14[method]fields.clone\x01@\x01h%\x01@\x01\x04self\xc1\0\ -\0\x0b\x04\0\x1f[method]incoming-request.method\x01B\x01@\x01\x04self\xc1\0\0\x0e\ -\x04\0([method]incoming-request.path-with-query\x01C\x01k\x0d\x01@\x01\x04self\xc1\ -\0\0\xc4\0\x04\0\x1f[method]incoming-request.scheme\x01E\x04\0\"[method]incoming\ --request.authority\x01C\x01i#\x01@\x01\x04self\xc1\0\0\xc6\0\x04\0\x20[method]in\ -coming-request.headers\x01G\x01i,\x01j\x01\xc8\0\0\x01@\x01\x04self\xc1\0\0\xc9\0\ -\x04\0\x20[method]incoming-request.consume\x01J\x01i&\x01@\x01\x07headers\xc6\0\0\ -\xcb\0\x04\0\x1d[constructor]outgoing-request\x01L\x01h&\x01i/\x01j\x01\xce\0\0\x01\ -@\x01\x04self\xcd\0\0\xcf\0\x04\0\x1d[method]outgoing-request.body\x01P\x01@\x01\ -\x04self\xcd\0\0\x0b\x04\0\x1f[method]outgoing-request.method\x01Q\x01j\0\0\x01@\ -\x02\x04self\xcd\0\x06method\x0b\0\xd2\0\x04\0#[method]outgoing-request.set-meth\ -od\x01S\x01@\x01\x04self\xcd\0\0\x0e\x04\0([method]outgoing-request.path-with-qu\ -ery\x01T\x01@\x02\x04self\xcd\0\x0fpath-with-query\x0e\0\xd2\0\x04\0,[method]out\ -going-request.set-path-with-query\x01U\x01@\x01\x04self\xcd\0\0\xc4\0\x04\0\x1f[\ -method]outgoing-request.scheme\x01V\x01@\x02\x04self\xcd\0\x06scheme\xc4\0\0\xd2\ -\0\x04\0#[method]outgoing-request.set-scheme\x01W\x04\0\"[method]outgoing-reques\ -t.authority\x01T\x01@\x02\x04self\xcd\0\x09authority\x0e\0\xd2\0\x04\0&[method]o\ -utgoing-request.set-authority\x01X\x01@\x01\x04self\xcd\0\0\xc6\0\x04\0\x20[meth\ -od]outgoing-request.headers\x01Y\x01i'\x01@\0\0\xda\0\x04\0\x1c[constructor]requ\ -est-options\x01[\x01h'\x01k\x01\x01@\x01\x04self\xdc\0\0\xdd\0\x04\0'[method]req\ -uest-options.connect-timeout\x01^\x01@\x02\x04self\xdc\0\x08duration\xdd\0\0\xd2\ -\0\x04\0+[method]request-options.set-connect-timeout\x01_\x04\0*[method]request-\ -options.first-byte-timeout\x01^\x04\0.[method]request-options.set-first-byte-tim\ -eout\x01_\x04\0-[method]request-options.between-bytes-timeout\x01^\x04\01[method\ -]request-options.set-between-bytes-timeout\x01_\x01i(\x01i.\x01j\x01\xe1\0\x01\x1b\ -\x01@\x02\x05param\xe0\0\x08response\xe2\0\x01\0\x04\0\x1d[static]response-outpa\ -ram.set\x01c\x01h+\x01@\x01\x04self\xe4\0\0*\x04\0\x20[method]incoming-response.\ -status\x01e\x01@\x01\x04self\xe4\0\0\xc6\0\x04\0![method]incoming-response.heade\ -rs\x01f\x01@\x01\x04self\xe4\0\0\xc9\0\x04\0![method]incoming-response.consume\x01\ -g\x01h,\x01i\x03\x01j\x01\xe9\0\0\x01@\x01\x04self\xe8\0\0\xea\0\x04\0\x1c[metho\ -d]incoming-body.stream\x01k\x01i-\x01@\x01\x04this\xc8\0\0\xec\0\x04\0\x1c[stati\ -c]incoming-body.finish\x01m\x01h-\x01i\x09\x01@\x01\x04self\xee\0\0\xef\0\x04\0!\ -[method]future-trailers.subscribe\x01p\x01i$\x01k\xf1\0\x01j\x01\xf2\0\x01\x1b\x01\ -j\x01\xf3\0\0\x01k\xf4\0\x01@\x01\x04self\xee\0\0\xf5\0\x04\0\x1b[method]future-\ -trailers.get\x01v\x01@\x01\x07headers\xc6\0\0\xe1\0\x04\0\x1e[constructor]outgoi\ -ng-response\x01w\x01h.\x01@\x01\x04self\xf8\0\0*\x04\0%[method]outgoing-response\ -.status-code\x01y\x01@\x02\x04self\xf8\0\x0bstatus-code*\0\xd2\0\x04\0)[method]o\ -utgoing-response.set-status-code\x01z\x01@\x01\x04self\xf8\0\0\xc6\0\x04\0![meth\ -od]outgoing-response.headers\x01{\x01@\x01\x04self\xf8\0\0\xcf\0\x04\0\x1e[metho\ -d]outgoing-response.body\x01|\x01h/\x01i\x05\x01j\x01\xfe\0\0\x01@\x01\x04self\xfd\ -\0\0\xff\0\x04\0\x1b[method]outgoing-body.write\x01\x80\x01\x01j\0\x01\x1b\x01@\x02\ -\x04this\xce\0\x08trailers\xf2\0\0\x81\x01\x04\0\x1c[static]outgoing-body.finish\ -\x01\x82\x01\x01h0\x01@\x01\x04self\x83\x01\0\xef\0\x04\0*[method]future-incomin\ -g-response.subscribe\x01\x84\x01\x01i+\x01j\x01\x85\x01\x01\x1b\x01j\x01\x86\x01\ -\0\x01k\x87\x01\x01@\x01\x04self\x83\x01\0\x88\x01\x04\0$[method]future-incoming\ --response.get\x01\x89\x01\x01h\x07\x01k\x1b\x01@\x01\x03err\x8a\x01\0\x8b\x01\x04\ -\0\x0fhttp-error-code\x01\x8c\x01\x03\0\x15wasi:http/types@0.2.0\x05\x0e\x02\x03\ -\0\x05\x10outgoing-request\x02\x03\0\x05\x0frequest-options\x02\x03\0\x05\x18fut\ -ure-incoming-response\x02\x03\0\x05\x0aerror-code\x01B\x0f\x02\x03\x02\x01\x0f\x04\ -\0\x10outgoing-request\x03\0\0\x02\x03\x02\x01\x10\x04\0\x0frequest-options\x03\0\ -\x02\x02\x03\x02\x01\x11\x04\0\x18future-incoming-response\x03\0\x04\x02\x03\x02\ -\x01\x12\x04\0\x0aerror-code\x03\0\x06\x01i\x01\x01i\x03\x01k\x09\x01i\x05\x01j\x01\ -\x0b\x01\x07\x01@\x02\x07request\x08\x07options\x0a\0\x0c\x04\0\x06handle\x01\x0d\ -\x03\0\x20wasi:http/outgoing-handler@0.2.0\x05\x13\x02\x03\0\0\x10evm-chain-conf\ -ig\x02\x03\0\0\x13cosmos-chain-config\x02\x03\0\0\x09log-level\x01B\x11\x02\x03\x02\ -\x01\x14\x04\0\x10evm-chain-config\x03\0\0\x02\x03\x02\x01\x15\x04\0\x13cosmos-c\ -hain-config\x03\0\x02\x02\x03\x02\x01\x16\x04\0\x09log-level\x03\0\x04\x01k\x01\x01\ -@\x01\x0achain-names\0\x06\x04\0\x14get-evm-chain-config\x01\x07\x01k\x03\x01@\x01\ -\x0achain-names\0\x08\x04\0\x17get-cosmos-chain-config\x01\x09\x01ks\x01@\x01\x03\ -keys\0\x0a\x04\0\x0aconfig-var\x01\x0b\x01@\x02\x05level\x05\x07messages\x01\0\x04\ -\0\x03log\x01\x0c\x03\0\x04host\x05\x17\x01B\x0a\x01o\x02ss\x01p\0\x01@\0\0\x01\x04\ -\0\x0fget-environment\x01\x02\x01ps\x01@\0\0\x03\x04\0\x0dget-arguments\x01\x04\x01\ -ks\x01@\0\0\x05\x04\0\x0binitial-cwd\x01\x06\x03\0\x1awasi:cli/environment@0.2.0\ -\x05\x18\x01B\x03\x01j\0\0\x01@\x01\x06status\0\x01\0\x04\0\x04exit\x01\x01\x03\0\ -\x13wasi:cli/exit@0.2.0\x05\x19\x01B\x05\x02\x03\x02\x01\x0c\x04\0\x0cinput-stre\ -am\x03\0\0\x01i\x01\x01@\0\0\x02\x04\0\x09get-stdin\x01\x03\x03\0\x14wasi:cli/st\ -din@0.2.0\x05\x1a\x01B\x05\x02\x03\x02\x01\x0d\x04\0\x0doutput-stream\x03\0\0\x01\ -i\x01\x01@\0\0\x02\x04\0\x0aget-stdout\x01\x03\x03\0\x15wasi:cli/stdout@0.2.0\x05\ -\x1b\x01B\x05\x02\x03\x02\x01\x0d\x04\0\x0doutput-stream\x03\0\0\x01i\x01\x01@\0\ -\0\x02\x04\0\x0aget-stderr\x01\x03\x03\0\x15wasi:cli/stderr@0.2.0\x05\x1c\x01B\x01\ -\x04\0\x0eterminal-input\x03\x01\x03\0\x1dwasi:cli/terminal-input@0.2.0\x05\x1d\x01\ -B\x01\x04\0\x0fterminal-output\x03\x01\x03\0\x1ewasi:cli/terminal-output@0.2.0\x05\ -\x1e\x02\x03\0\x0d\x0eterminal-input\x01B\x06\x02\x03\x02\x01\x1f\x04\0\x0etermi\ -nal-input\x03\0\0\x01i\x01\x01k\x02\x01@\0\0\x03\x04\0\x12get-terminal-stdin\x01\ -\x04\x03\0\x1dwasi:cli/terminal-stdin@0.2.0\x05\x20\x02\x03\0\x0e\x0fterminal-ou\ -tput\x01B\x06\x02\x03\x02\x01!\x04\0\x0fterminal-output\x03\0\0\x01i\x01\x01k\x02\ -\x01@\0\0\x03\x04\0\x13get-terminal-stdout\x01\x04\x03\0\x1ewasi:cli/terminal-st\ -dout@0.2.0\x05\"\x01B\x06\x02\x03\x02\x01!\x04\0\x0fterminal-output\x03\0\0\x01i\ -\x01\x01k\x02\x01@\0\0\x03\x04\0\x13get-terminal-stderr\x01\x04\x03\0\x1ewasi:cl\ -i/terminal-stderr@0.2.0\x05#\x01B\x05\x01r\x02\x07secondsw\x0bnanosecondsy\x04\0\ -\x08datetime\x03\0\0\x01@\0\0\x01\x04\0\x03now\x01\x02\x04\0\x0aresolution\x01\x02\ -\x03\0\x1cwasi:clocks/wall-clock@0.2.0\x05$\x02\x03\0\x04\x05error\x02\x03\0\x12\ -\x08datetime\x01Br\x02\x03\x02\x01\x0c\x04\0\x0cinput-stream\x03\0\0\x02\x03\x02\ -\x01\x0d\x04\0\x0doutput-stream\x03\0\x02\x02\x03\x02\x01%\x04\0\x05error\x03\0\x04\ -\x02\x03\x02\x01&\x04\0\x08datetime\x03\0\x06\x01w\x04\0\x08filesize\x03\0\x08\x01\ -m\x08\x07unknown\x0cblock-device\x10character-device\x09directory\x04fifo\x0dsym\ -bolic-link\x0cregular-file\x06socket\x04\0\x0fdescriptor-type\x03\0\x0a\x01n\x06\ -\x04read\x05write\x13file-integrity-sync\x13data-integrity-sync\x14requested-wri\ -te-sync\x10mutate-directory\x04\0\x10descriptor-flags\x03\0\x0c\x01n\x01\x0esyml\ -ink-follow\x04\0\x0apath-flags\x03\0\x0e\x01n\x04\x06create\x09directory\x09excl\ -usive\x08truncate\x04\0\x0aopen-flags\x03\0\x10\x01w\x04\0\x0alink-count\x03\0\x12\ -\x01k\x07\x01r\x06\x04type\x0b\x0alink-count\x13\x04size\x09\x15data-access-time\ -stamp\x14\x1bdata-modification-timestamp\x14\x17status-change-timestamp\x14\x04\0\ -\x0fdescriptor-stat\x03\0\x15\x01q\x03\x09no-change\0\0\x03now\0\0\x09timestamp\x01\ -\x07\0\x04\0\x0dnew-timestamp\x03\0\x17\x01r\x02\x04type\x0b\x04names\x04\0\x0fd\ -irectory-entry\x03\0\x19\x01m%\x06access\x0bwould-block\x07already\x0ebad-descri\ -ptor\x04busy\x08deadlock\x05quota\x05exist\x0efile-too-large\x15illegal-byte-seq\ -uence\x0bin-progress\x0binterrupted\x07invalid\x02io\x0cis-directory\x04loop\x0e\ -too-many-links\x0cmessage-size\x0dname-too-long\x09no-device\x08no-entry\x07no-l\ -ock\x13insufficient-memory\x12insufficient-space\x0dnot-directory\x09not-empty\x0f\ -not-recoverable\x0bunsupported\x06no-tty\x0eno-such-device\x08overflow\x0dnot-pe\ -rmitted\x04pipe\x09read-only\x0cinvalid-seek\x0etext-file-busy\x0ccross-device\x04\ -\0\x0aerror-code\x03\0\x1b\x01m\x06\x06normal\x0asequential\x06random\x09will-ne\ -ed\x09dont-need\x08no-reuse\x04\0\x06advice\x03\0\x1d\x01r\x02\x05lowerw\x05uppe\ -rw\x04\0\x13metadata-hash-value\x03\0\x1f\x04\0\x0adescriptor\x03\x01\x04\0\x16d\ -irectory-entry-stream\x03\x01\x01h!\x01i\x01\x01j\x01$\x01\x1c\x01@\x02\x04self#\ -\x06offset\x09\0%\x04\0\"[method]descriptor.read-via-stream\x01&\x01i\x03\x01j\x01\ -'\x01\x1c\x01@\x02\x04self#\x06offset\x09\0(\x04\0#[method]descriptor.write-via-\ -stream\x01)\x01@\x01\x04self#\0(\x04\0$[method]descriptor.append-via-stream\x01*\ -\x01j\0\x01\x1c\x01@\x04\x04self#\x06offset\x09\x06length\x09\x06advice\x1e\0+\x04\ -\0\x19[method]descriptor.advise\x01,\x01@\x01\x04self#\0+\x04\0\x1c[method]descr\ -iptor.sync-data\x01-\x01j\x01\x0d\x01\x1c\x01@\x01\x04self#\0.\x04\0\x1c[method]\ -descriptor.get-flags\x01/\x01j\x01\x0b\x01\x1c\x01@\x01\x04self#\00\x04\0\x1b[me\ -thod]descriptor.get-type\x011\x01@\x02\x04self#\x04size\x09\0+\x04\0\x1b[method]\ -descriptor.set-size\x012\x01@\x03\x04self#\x15data-access-timestamp\x18\x1bdata-\ -modification-timestamp\x18\0+\x04\0\x1c[method]descriptor.set-times\x013\x01p}\x01\ -o\x024\x7f\x01j\x015\x01\x1c\x01@\x03\x04self#\x06length\x09\x06offset\x09\06\x04\ -\0\x17[method]descriptor.read\x017\x01j\x01\x09\x01\x1c\x01@\x03\x04self#\x06buf\ -fer4\x06offset\x09\08\x04\0\x18[method]descriptor.write\x019\x01i\"\x01j\x01:\x01\ -\x1c\x01@\x01\x04self#\0;\x04\0![method]descriptor.read-directory\x01<\x04\0\x17\ -[method]descriptor.sync\x01-\x01@\x02\x04self#\x04paths\0+\x04\0&[method]descrip\ -tor.create-directory-at\x01=\x01j\x01\x16\x01\x1c\x01@\x01\x04self#\0>\x04\0\x17\ -[method]descriptor.stat\x01?\x01@\x03\x04self#\x0apath-flags\x0f\x04paths\0>\x04\ -\0\x1a[method]descriptor.stat-at\x01@\x01@\x05\x04self#\x0apath-flags\x0f\x04pat\ -hs\x15data-access-timestamp\x18\x1bdata-modification-timestamp\x18\0+\x04\0\x1f[\ -method]descriptor.set-times-at\x01A\x01@\x05\x04self#\x0eold-path-flags\x0f\x08o\ -ld-paths\x0enew-descriptor#\x08new-paths\0+\x04\0\x1a[method]descriptor.link-at\x01\ -B\x01i!\x01j\x01\xc3\0\x01\x1c\x01@\x05\x04self#\x0apath-flags\x0f\x04paths\x0ao\ -pen-flags\x11\x05flags\x0d\0\xc4\0\x04\0\x1a[method]descriptor.open-at\x01E\x01j\ -\x01s\x01\x1c\x01@\x02\x04self#\x04paths\0\xc6\0\x04\0\x1e[method]descriptor.rea\ -dlink-at\x01G\x04\0&[method]descriptor.remove-directory-at\x01=\x01@\x04\x04self\ -#\x08old-paths\x0enew-descriptor#\x08new-paths\0+\x04\0\x1c[method]descriptor.re\ -name-at\x01H\x01@\x03\x04self#\x08old-paths\x08new-paths\0+\x04\0\x1d[method]des\ -criptor.symlink-at\x01I\x04\0![method]descriptor.unlink-file-at\x01=\x01@\x02\x04\ -self#\x05other#\0\x7f\x04\0![method]descriptor.is-same-object\x01J\x01j\x01\x20\x01\ -\x1c\x01@\x01\x04self#\0\xcb\0\x04\0\x20[method]descriptor.metadata-hash\x01L\x01\ -@\x03\x04self#\x0apath-flags\x0f\x04paths\0\xcb\0\x04\0#[method]descriptor.metad\ -ata-hash-at\x01M\x01h\"\x01k\x1a\x01j\x01\xcf\0\x01\x1c\x01@\x01\x04self\xce\0\0\ -\xd0\0\x04\03[method]directory-entry-stream.read-directory-entry\x01Q\x01h\x05\x01\ -k\x1c\x01@\x01\x03err\xd2\0\0\xd3\0\x04\0\x15filesystem-error-code\x01T\x03\0\x1b\ -wasi:filesystem/types@0.2.0\x05'\x02\x03\0\x13\x0adescriptor\x01B\x07\x02\x03\x02\ -\x01(\x04\0\x0adescriptor\x03\0\0\x01i\x01\x01o\x02\x02s\x01p\x03\x01@\0\0\x04\x04\ -\0\x0fget-directories\x01\x05\x03\0\x1ewasi:filesystem/preopens@0.2.0\x05)\x01B\x11\ -\x04\0\x07network\x03\x01\x01m\x15\x07unknown\x0daccess-denied\x0dnot-supported\x10\ -invalid-argument\x0dout-of-memory\x07timeout\x14concurrency-conflict\x0fnot-in-p\ -rogress\x0bwould-block\x0dinvalid-state\x10new-socket-limit\x14address-not-binda\ -ble\x0eaddress-in-use\x12remote-unreachable\x12connection-refused\x10connection-\ -reset\x12connection-aborted\x12datagram-too-large\x11name-unresolvable\x1atempor\ -ary-resolver-failure\x1apermanent-resolver-failure\x04\0\x0aerror-code\x03\0\x01\ -\x01m\x02\x04ipv4\x04ipv6\x04\0\x11ip-address-family\x03\0\x03\x01o\x04}}}}\x04\0\ -\x0cipv4-address\x03\0\x05\x01o\x08{{{{{{{{\x04\0\x0cipv6-address\x03\0\x07\x01q\ -\x02\x04ipv4\x01\x06\0\x04ipv6\x01\x08\0\x04\0\x0aip-address\x03\0\x09\x01r\x02\x04\ -port{\x07address\x06\x04\0\x13ipv4-socket-address\x03\0\x0b\x01r\x04\x04port{\x09\ -flow-infoy\x07address\x08\x08scope-idy\x04\0\x13ipv6-socket-address\x03\0\x0d\x01\ -q\x02\x04ipv4\x01\x0c\0\x04ipv6\x01\x0e\0\x04\0\x11ip-socket-address\x03\0\x0f\x03\ -\0\x1awasi:sockets/network@0.2.0\x05*\x02\x03\0\x15\x07network\x01B\x05\x02\x03\x02\ -\x01+\x04\0\x07network\x03\0\0\x01i\x01\x01@\0\0\x02\x04\0\x10instance-network\x01\ -\x03\x03\0#wasi:sockets/instance-network@0.2.0\x05,\x02\x03\0\x15\x0aerror-code\x02\ -\x03\0\x15\x11ip-socket-address\x02\x03\0\x15\x11ip-address-family\x01BD\x02\x03\ -\x02\x01\x06\x04\0\x08pollable\x03\0\0\x02\x03\x02\x01+\x04\0\x07network\x03\0\x02\ -\x02\x03\x02\x01-\x04\0\x0aerror-code\x03\0\x04\x02\x03\x02\x01.\x04\0\x11ip-soc\ -ket-address\x03\0\x06\x02\x03\x02\x01/\x04\0\x11ip-address-family\x03\0\x08\x01p\ -}\x01r\x02\x04data\x0a\x0eremote-address\x07\x04\0\x11incoming-datagram\x03\0\x0b\ -\x01k\x07\x01r\x02\x04data\x0a\x0eremote-address\x0d\x04\0\x11outgoing-datagram\x03\ -\0\x0e\x04\0\x0audp-socket\x03\x01\x04\0\x18incoming-datagram-stream\x03\x01\x04\ -\0\x18outgoing-datagram-stream\x03\x01\x01h\x10\x01h\x03\x01j\0\x01\x05\x01@\x03\ -\x04self\x13\x07network\x14\x0dlocal-address\x07\0\x15\x04\0\x1d[method]udp-sock\ -et.start-bind\x01\x16\x01@\x01\x04self\x13\0\x15\x04\0\x1e[method]udp-socket.fin\ -ish-bind\x01\x17\x01i\x11\x01i\x12\x01o\x02\x18\x19\x01j\x01\x1a\x01\x05\x01@\x02\ -\x04self\x13\x0eremote-address\x0d\0\x1b\x04\0\x19[method]udp-socket.stream\x01\x1c\ -\x01j\x01\x07\x01\x05\x01@\x01\x04self\x13\0\x1d\x04\0\x20[method]udp-socket.loc\ -al-address\x01\x1e\x04\0![method]udp-socket.remote-address\x01\x1e\x01@\x01\x04s\ -elf\x13\0\x09\x04\0![method]udp-socket.address-family\x01\x1f\x01j\x01}\x01\x05\x01\ -@\x01\x04self\x13\0\x20\x04\0$[method]udp-socket.unicast-hop-limit\x01!\x01@\x02\ -\x04self\x13\x05value}\0\x15\x04\0([method]udp-socket.set-unicast-hop-limit\x01\"\ -\x01j\x01w\x01\x05\x01@\x01\x04self\x13\0#\x04\0&[method]udp-socket.receive-buff\ -er-size\x01$\x01@\x02\x04self\x13\x05valuew\0\x15\x04\0*[method]udp-socket.set-r\ -eceive-buffer-size\x01%\x04\0#[method]udp-socket.send-buffer-size\x01$\x04\0'[me\ -thod]udp-socket.set-send-buffer-size\x01%\x01i\x01\x01@\x01\x04self\x13\0&\x04\0\ -\x1c[method]udp-socket.subscribe\x01'\x01h\x11\x01p\x0c\x01j\x01)\x01\x05\x01@\x02\ -\x04self(\x0bmax-resultsw\0*\x04\0([method]incoming-datagram-stream.receive\x01+\ -\x01@\x01\x04self(\0&\x04\0*[method]incoming-datagram-stream.subscribe\x01,\x01h\ -\x12\x01@\x01\x04self-\0#\x04\0+[method]outgoing-datagram-stream.check-send\x01.\ -\x01p\x0f\x01@\x02\x04self-\x09datagrams/\0#\x04\0%[method]outgoing-datagram-str\ -eam.send\x010\x01@\x01\x04self-\0&\x04\0*[method]outgoing-datagram-stream.subscr\ -ibe\x011\x03\0\x16wasi:sockets/udp@0.2.0\x050\x02\x03\0\x17\x0audp-socket\x01B\x0c\ -\x02\x03\x02\x01+\x04\0\x07network\x03\0\0\x02\x03\x02\x01-\x04\0\x0aerror-code\x03\ -\0\x02\x02\x03\x02\x01/\x04\0\x11ip-address-family\x03\0\x04\x02\x03\x02\x011\x04\ -\0\x0audp-socket\x03\0\x06\x01i\x07\x01j\x01\x08\x01\x03\x01@\x01\x0eaddress-fam\ -ily\x05\0\x09\x04\0\x11create-udp-socket\x01\x0a\x03\0$wasi:sockets/udp-create-s\ -ocket@0.2.0\x052\x01BT\x02\x03\x02\x01\x0c\x04\0\x0cinput-stream\x03\0\0\x02\x03\ -\x02\x01\x0d\x04\0\x0doutput-stream\x03\0\x02\x02\x03\x02\x01\x06\x04\0\x08polla\ -ble\x03\0\x04\x02\x03\x02\x01\x0b\x04\0\x08duration\x03\0\x06\x02\x03\x02\x01+\x04\ -\0\x07network\x03\0\x08\x02\x03\x02\x01-\x04\0\x0aerror-code\x03\0\x0a\x02\x03\x02\ -\x01.\x04\0\x11ip-socket-address\x03\0\x0c\x02\x03\x02\x01/\x04\0\x11ip-address-\ -family\x03\0\x0e\x01m\x03\x07receive\x04send\x04both\x04\0\x0dshutdown-type\x03\0\ -\x10\x04\0\x0atcp-socket\x03\x01\x01h\x12\x01h\x09\x01j\0\x01\x0b\x01@\x03\x04se\ -lf\x13\x07network\x14\x0dlocal-address\x0d\0\x15\x04\0\x1d[method]tcp-socket.sta\ -rt-bind\x01\x16\x01@\x01\x04self\x13\0\x15\x04\0\x1e[method]tcp-socket.finish-bi\ -nd\x01\x17\x01@\x03\x04self\x13\x07network\x14\x0eremote-address\x0d\0\x15\x04\0\ -\x20[method]tcp-socket.start-connect\x01\x18\x01i\x01\x01i\x03\x01o\x02\x19\x1a\x01\ -j\x01\x1b\x01\x0b\x01@\x01\x04self\x13\0\x1c\x04\0![method]tcp-socket.finish-con\ -nect\x01\x1d\x04\0\x1f[method]tcp-socket.start-listen\x01\x17\x04\0\x20[method]t\ -cp-socket.finish-listen\x01\x17\x01i\x12\x01o\x03\x1e\x19\x1a\x01j\x01\x1f\x01\x0b\ -\x01@\x01\x04self\x13\0\x20\x04\0\x19[method]tcp-socket.accept\x01!\x01j\x01\x0d\ -\x01\x0b\x01@\x01\x04self\x13\0\"\x04\0\x20[method]tcp-socket.local-address\x01#\ -\x04\0![method]tcp-socket.remote-address\x01#\x01@\x01\x04self\x13\0\x7f\x04\0\x1f\ -[method]tcp-socket.is-listening\x01$\x01@\x01\x04self\x13\0\x0f\x04\0![method]tc\ -p-socket.address-family\x01%\x01@\x02\x04self\x13\x05valuew\0\x15\x04\0*[method]\ -tcp-socket.set-listen-backlog-size\x01&\x01j\x01\x7f\x01\x0b\x01@\x01\x04self\x13\ -\0'\x04\0%[method]tcp-socket.keep-alive-enabled\x01(\x01@\x02\x04self\x13\x05val\ -ue\x7f\0\x15\x04\0)[method]tcp-socket.set-keep-alive-enabled\x01)\x01j\x01\x07\x01\ -\x0b\x01@\x01\x04self\x13\0*\x04\0'[method]tcp-socket.keep-alive-idle-time\x01+\x01\ -@\x02\x04self\x13\x05value\x07\0\x15\x04\0+[method]tcp-socket.set-keep-alive-idl\ -e-time\x01,\x04\0&[method]tcp-socket.keep-alive-interval\x01+\x04\0*[method]tcp-\ -socket.set-keep-alive-interval\x01,\x01j\x01y\x01\x0b\x01@\x01\x04self\x13\0-\x04\ -\0#[method]tcp-socket.keep-alive-count\x01.\x01@\x02\x04self\x13\x05valuey\0\x15\ -\x04\0'[method]tcp-socket.set-keep-alive-count\x01/\x01j\x01}\x01\x0b\x01@\x01\x04\ -self\x13\00\x04\0\x1c[method]tcp-socket.hop-limit\x011\x01@\x02\x04self\x13\x05v\ -alue}\0\x15\x04\0\x20[method]tcp-socket.set-hop-limit\x012\x01j\x01w\x01\x0b\x01\ -@\x01\x04self\x13\03\x04\0&[method]tcp-socket.receive-buffer-size\x014\x04\0*[me\ -thod]tcp-socket.set-receive-buffer-size\x01&\x04\0#[method]tcp-socket.send-buffe\ -r-size\x014\x04\0'[method]tcp-socket.set-send-buffer-size\x01&\x01i\x05\x01@\x01\ -\x04self\x13\05\x04\0\x1c[method]tcp-socket.subscribe\x016\x01@\x02\x04self\x13\x0d\ -shutdown-type\x11\0\x15\x04\0\x1b[method]tcp-socket.shutdown\x017\x03\0\x16wasi:\ -sockets/tcp@0.2.0\x053\x02\x03\0\x19\x0atcp-socket\x01B\x0c\x02\x03\x02\x01+\x04\ -\0\x07network\x03\0\0\x02\x03\x02\x01-\x04\0\x0aerror-code\x03\0\x02\x02\x03\x02\ -\x01/\x04\0\x11ip-address-family\x03\0\x04\x02\x03\x02\x014\x04\0\x0atcp-socket\x03\ -\0\x06\x01i\x07\x01j\x01\x08\x01\x03\x01@\x01\x0eaddress-family\x05\0\x09\x04\0\x11\ -create-tcp-socket\x01\x0a\x03\0$wasi:sockets/tcp-create-socket@0.2.0\x055\x02\x03\ -\0\x15\x0aip-address\x01B\x16\x02\x03\x02\x01\x06\x04\0\x08pollable\x03\0\0\x02\x03\ -\x02\x01+\x04\0\x07network\x03\0\x02\x02\x03\x02\x01-\x04\0\x0aerror-code\x03\0\x04\ -\x02\x03\x02\x016\x04\0\x0aip-address\x03\0\x06\x04\0\x16resolve-address-stream\x03\ -\x01\x01h\x08\x01k\x07\x01j\x01\x0a\x01\x05\x01@\x01\x04self\x09\0\x0b\x04\03[me\ -thod]resolve-address-stream.resolve-next-address\x01\x0c\x01i\x01\x01@\x01\x04se\ -lf\x09\0\x0d\x04\0([method]resolve-address-stream.subscribe\x01\x0e\x01h\x03\x01\ -i\x08\x01j\x01\x10\x01\x05\x01@\x02\x07network\x0f\x04names\0\x11\x04\0\x11resol\ -ve-addresses\x01\x12\x03\0!wasi:sockets/ip-name-lookup@0.2.0\x057\x01B\x05\x01p}\ -\x01@\x01\x03lenw\0\0\x04\0\x10get-random-bytes\x01\x01\x01@\0\0w\x04\0\x0eget-r\ -andom-u64\x01\x02\x03\0\x18wasi:random/random@0.2.0\x058\x01B\x05\x01p}\x01@\x01\ -\x03lenw\0\0\x04\0\x19get-insecure-random-bytes\x01\x01\x01@\0\0w\x04\0\x17get-i\ -nsecure-random-u64\x01\x02\x03\0\x1awasi:random/insecure@0.2.0\x059\x01B\x03\x01\ -o\x02ww\x01@\0\0\0\x04\0\x0dinsecure-seed\x01\x01\x03\0\x1fwasi:random/insecure-\ -seed@0.2.0\x05:\x01k\x04\x01j\x01;\x01s\x01@\x01\x0etrigger-action\x02\0<\x04\0\x03\ -run\x01=\x04\0%wavs:worker/layer-trigger-world@0.4.0\x04\0\x0b\x19\x01\0\x13laye\ -r-trigger-world\x03\0\0\0G\x09producers\x01\x0cprocessed-by\x02\x0dwit-component\ -\x070.227.1\x10wit-bindgen-rust\x060.41.0"; +pub static __WIT_BINDGEN_COMPONENT_TYPE: [u8; 20560] = *b"\ +\0asm\x0d\0\x01\0\0\x19\x16wit-component-encoding\x04\0\x07\xce\x9f\x01\x01A\x02\ +\x01A~\x01B\x0b\x01s\x04\0\x06digest\x03\0\0\x01r\x01\x05nanosw\x04\0\x09timesta\ +mp\x03\0\x02\x01r\x01\x04secsw\x04\0\x08duration\x03\0\x04\x01o\x02ww\x01r\x01\x05\ +value\x06\x04\0\x04u128\x03\0\x07\x01q\x05\x05error\0\0\x04warn\0\0\x04info\0\0\x05\ +debug\0\0\x05trace\0\0\x04\0\x09log-level\x03\0\x09\x03\0\x15wavs:types/core@1.2\ +.0\x05\0\x01B\x1c\x01s\x04\0\x09chain-key\x03\0\0\x01p}\x04\0\x0bevm-tx-hash\x03\ +\0\x02\x01s\x04\0\x0ecosmos-tx-hash\x03\0\x04\x01q\x02\x03evm\x01\x03\0\x06cosmo\ +s\x01\x05\0\x04\0\x0bany-tx-hash\x03\0\x06\x01r\x02\x0bbech32-addrs\x0aprefix-le\ +ny\x04\0\x0ecosmos-address\x03\0\x08\x01o\x02ss\x01p\x0a\x01r\x02\x02tys\x0aattr\ +ibutes\x0b\x04\0\x0ccosmos-event\x03\0\x0c\x01ks\x01r\x07\x08chain-ids\x0crpc-en\ +dpoint\x0e\x0dgrpc-endpoint\x0e\x11grpc-web-endpoint\x0e\x09gas-pricev\x09gas-de\ +noms\x0dbech32-prefixs\x04\0\x13cosmos-chain-config\x03\0\x0f\x01p}\x01r\x01\x09\ +raw-bytes\x11\x04\0\x0bevm-address\x03\0\x12\x01p\x11\x01r\x02\x06topics\x14\x04\ +data\x11\x04\0\x12evm-event-log-data\x03\0\x15\x01kw\x01r\x08\x07address\x13\x04\ +data\x16\x07tx-hash\x03\x0cblock-numberw\x09log-indexw\x0ablock-hash\x11\x0fbloc\ +k-timestamp\x17\x08tx-indexw\x04\0\x0devm-event-log\x03\0\x18\x01r\x03\x08chain-\ +ids\x0bws-endpoint\x0e\x0dhttp-endpoint\x0e\x04\0\x10evm-chain-config\x03\0\x1a\x03\ +\0\x16wavs:types/chain@1.2.0\x05\x01\x02\x03\0\0\x06digest\x02\x03\0\0\x09timest\ +amp\x02\x03\0\x01\x09chain-key\x02\x03\0\x01\x0bevm-address\x02\x03\0\x01\x0ecos\ +mos-address\x01BO\x02\x03\x02\x01\x02\x04\0\x06digest\x03\0\0\x02\x03\x02\x01\x03\ +\x04\0\x09timestamp\x03\0\x02\x02\x03\x02\x01\x04\x04\0\x09chain-key\x03\0\x04\x02\ +\x03\x02\x01\x05\x04\0\x0bevm-address\x03\0\x06\x02\x03\x02\x01\x06\x04\0\x0ecos\ +mos-address\x03\0\x08\x01s\x04\0\x0aservice-id\x03\0\x0a\x01s\x04\0\x0bworkflow-\ +id\x03\0\x0c\x01s\x04\0\x0bpackage-ref\x03\0\x0e\x01s\x04\0\x0esemver-version\x03\ +\0\x10\x01q\x02\x06active\0\0\x06paused\0\0\x04\0\x0eservice-status\x03\0\x12\x01\ +r\x02\x05chain\x05\x07address\x07\x04\0\x0bevm-manager\x03\0\x14\x01q\x01\x03evm\ +\x01\x15\0\x04\0\x0fservice-manager\x03\0\x16\x01r\x02\x03urls\x06digest\x01\x04\ +\0\x19component-source-download\x03\0\x18\x01ks\x01k\x11\x01r\x04\x06digest\x01\x06\ +domain\x1a\x07version\x1b\x03pkg\x0f\x04\0\x08registry\x03\0\x1c\x01q\x03\x08dow\ +nload\x01\x19\0\x08registry\x01\x1d\0\x06digest\x01\x01\0\x04\0\x10component-sou\ +rce\x03\0\x1e\x01ps\x01q\x03\x03all\0\0\x04only\x01\x20\0\x04none\0\0\x04\0\x17a\ +llowed-host-permission\x03\0!\x01r\x02\x12allowed-http-hosts\"\x0bfile-system\x7f\ +\x04\0\x0bpermissions\x03\0#\x01kw\x01o\x02ss\x01p&\x01r\x06\x06source\x1f\x0bpe\ +rmissions$\x0afuel-limit%\x12time-limit-seconds%\x06config'\x08env-keys\x20\x04\0\ +\x09component\x03\0(\x01p}\x01r\x03\x07address\x07\x05chain\x05\x0aevent-hash*\x04\ +\0\x1atrigger-evm-contract-event\x03\0+\x01r\x03\x07address\x09\x05chain\x05\x0a\ +event-types\x04\0\x1dtrigger-cosmos-contract-event\x03\0-\x01r\x04\x05chain\x05\x08\ +n-blocksy\x0bstart-block%\x09end-block%\x04\0\x16trigger-block-interval\x03\0/\x01\ +k\x03\x01r\x03\x08schedules\x0astart-time1\x08end-time1\x04\0\x0ctrigger-cron\x03\ +\02\x01q\x05\x12evm-contract-event\x01,\0\x15cosmos-contract-event\x01.\0\x0eblo\ +ck-interval\x010\0\x04cron\x013\0\x06manual\0\0\x04\0\x07trigger\x03\04\x01q\x01\ +\x09secp256k1\0\0\x04\0\x13signature-algorithm\x03\06\x01q\x01\x06eip191\0\0\x04\ +\0\x10signature-prefix\x03\08\x01k9\x01r\x02\x09algorithm7\x06prefix:\x04\0\x0es\ +ignature-kind\x03\0;\x01r\x03\x03urls\x09component)\x0esignature-kind<\x04\0\x11\ +aggregator-submit\x03\0=\x01q\x02\x04none\0\0\x0aaggregator\x01>\0\x04\0\x06subm\ +it\x03\0?\x01r\x03\x07trigger5\x09component)\x06submit\xc0\0\x04\0\x08workflow\x03\ +\0A\x01o\x02\x0d\xc2\0\x01p\xc3\0\x01r\x04\x04names\x09workflows\xc4\0\x06status\ +\x13\x07manager\x17\x04\0\x07service\x03\0E\x01r\x03\x05chain\x05\x07address\x07\ +\x07max-gas%\x04\0\x17evm-contract-submission\x03\0G\x01q\x01\x03evm\x01\xc8\0\0\ +\x04\0\x0aaggregator\x03\0I\x01r\x02\x07service\xc6\0\x0bworkflow-id\x0d\x04\0\x17\ +service-and-workflow-id\x03\0K\x01r\x02\x08workflow\xc2\0\x0bworkflow-id\x0d\x04\ +\0\x18workflow-and-workflow-id\x03\0M\x03\0\x18wavs:types/service@1.2.0\x05\x07\x02\ +\x03\0\x01\x0devm-event-log\x02\x03\0\x01\x0ccosmos-event\x01B\x19\x02\x03\x02\x01\ +\x04\x04\0\x09chain-key\x03\0\0\x02\x03\x02\x01\x05\x04\0\x0bevm-address\x03\0\x02\ +\x02\x03\x02\x01\x08\x04\0\x0devm-event-log\x03\0\x04\x02\x03\x02\x01\x06\x04\0\x0e\ +cosmos-address\x03\0\x06\x02\x03\x02\x01\x09\x04\0\x0ccosmos-event\x03\0\x08\x02\ +\x03\x02\x01\x03\x04\0\x09timestamp\x03\0\x0a\x01p}\x04\0\x08event-id\x03\0\x0c\x01\ +r\x02\x05chain\x01\x03log\x05\x04\0\x1ftrigger-data-evm-contract-event\x03\0\x0e\ +\x01r\x05\x10contract-address\x07\x05chain\x01\x05event\x09\x0bevent-indexw\x0cb\ +lock-heightw\x04\0\"trigger-data-cosmos-contract-event\x03\0\x10\x01r\x02\x05cha\ +in\x01\x0cblock-heightw\x04\0\x1btrigger-data-block-interval\x03\0\x12\x01r\x01\x0c\ +trigger-time\x0b\x04\0\x11trigger-data-cron\x03\0\x14\x01p}\x01q\x05\x12evm-cont\ +ract-event\x01\x0f\0\x15cosmos-contract-event\x01\x11\0\x0eblock-interval\x01\x13\ +\0\x04cron\x01\x15\0\x03raw\x01\x16\0\x04\0\x0ctrigger-data\x03\0\x17\x03\0\x17w\ +avs:types/events@1.2.0\x05\x0a\x02\x03\0\x02\x0aservice-id\x02\x03\0\x02\x0bwork\ +flow-id\x02\x03\0\x02\x07trigger\x02\x03\0\x03\x0ctrigger-data\x01B\x0c\x02\x03\x02\ +\x01\x0b\x04\0\x0aservice-id\x03\0\0\x02\x03\x02\x01\x0c\x04\0\x0bworkflow-id\x03\ +\0\x02\x02\x03\x02\x01\x0d\x04\0\x07trigger\x03\0\x04\x02\x03\x02\x01\x0e\x04\0\x0c\ +trigger-data\x03\0\x06\x01r\x03\x0aservice-id\x01\x0bworkflow-id\x03\x07trigger\x05\ +\x04\0\x0etrigger-config\x03\0\x08\x01r\x02\x06config\x09\x04data\x07\x04\0\x0et\ +rigger-action\x03\0\x0a\x03\0\x19wavs:operator/input@1.2.0\x05\x0f\x02\x03\0\x04\ +\x0etrigger-action\x03\0\x0etrigger-action\x03\0\x10\x01B\x04\x01p}\x01kw\x01r\x02\ +\x07payload\0\x08ordering\x01\x04\0\x0dwasm-response\x03\0\x02\x03\0\x1awavs:ope\ +rator/output@1.2.0\x05\x12\x02\x03\0\x05\x0dwasm-response\x03\0\x0dwasm-response\ +\x03\0\x13\x01B\x0a\x04\0\x08pollable\x03\x01\x01h\0\x01@\x01\x04self\x01\0\x7f\x04\ +\0\x16[method]pollable.ready\x01\x02\x01@\x01\x04self\x01\x01\0\x04\0\x16[method\ +]pollable.block\x01\x03\x01p\x01\x01py\x01@\x01\x02in\x04\0\x05\x04\0\x04poll\x01\ +\x06\x03\0\x12wasi:io/poll@0.2.0\x05\x15\x02\x03\0\x06\x08pollable\x01B\x0f\x02\x03\ +\x02\x01\x16\x04\0\x08pollable\x03\0\0\x01w\x04\0\x07instant\x03\0\x02\x01w\x04\0\ +\x08duration\x03\0\x04\x01@\0\0\x03\x04\0\x03now\x01\x06\x01@\0\0\x05\x04\0\x0ar\ +esolution\x01\x07\x01i\x01\x01@\x01\x04when\x03\0\x08\x04\0\x11subscribe-instant\ +\x01\x09\x01@\x01\x04when\x05\0\x08\x04\0\x12subscribe-duration\x01\x0a\x03\0!wa\ +si:clocks/monotonic-clock@0.2.0\x05\x17\x01B\x04\x04\0\x05error\x03\x01\x01h\0\x01\ +@\x01\x04self\x01\0s\x04\0\x1d[method]error.to-debug-string\x01\x02\x03\0\x13was\ +i:io/error@0.2.0\x05\x18\x02\x03\0\x08\x05error\x01B(\x02\x03\x02\x01\x19\x04\0\x05\ +error\x03\0\0\x02\x03\x02\x01\x16\x04\0\x08pollable\x03\0\x02\x01i\x01\x01q\x02\x15\ +last-operation-failed\x01\x04\0\x06closed\0\0\x04\0\x0cstream-error\x03\0\x05\x04\ +\0\x0cinput-stream\x03\x01\x04\0\x0doutput-stream\x03\x01\x01h\x07\x01p}\x01j\x01\ +\x0a\x01\x06\x01@\x02\x04self\x09\x03lenw\0\x0b\x04\0\x19[method]input-stream.re\ +ad\x01\x0c\x04\0\"[method]input-stream.blocking-read\x01\x0c\x01j\x01w\x01\x06\x01\ +@\x02\x04self\x09\x03lenw\0\x0d\x04\0\x19[method]input-stream.skip\x01\x0e\x04\0\ +\"[method]input-stream.blocking-skip\x01\x0e\x01i\x03\x01@\x01\x04self\x09\0\x0f\ +\x04\0\x1e[method]input-stream.subscribe\x01\x10\x01h\x08\x01@\x01\x04self\x11\0\ +\x0d\x04\0![method]output-stream.check-write\x01\x12\x01j\0\x01\x06\x01@\x02\x04\ +self\x11\x08contents\x0a\0\x13\x04\0\x1b[method]output-stream.write\x01\x14\x04\0\ +.[method]output-stream.blocking-write-and-flush\x01\x14\x01@\x01\x04self\x11\0\x13\ +\x04\0\x1b[method]output-stream.flush\x01\x15\x04\0$[method]output-stream.blocki\ +ng-flush\x01\x15\x01@\x01\x04self\x11\0\x0f\x04\0\x1f[method]output-stream.subsc\ +ribe\x01\x16\x01@\x02\x04self\x11\x03lenw\0\x13\x04\0\"[method]output-stream.wri\ +te-zeroes\x01\x17\x04\05[method]output-stream.blocking-write-zeroes-and-flush\x01\ +\x17\x01@\x03\x04self\x11\x03src\x09\x03lenw\0\x0d\x04\0\x1c[method]output-strea\ +m.splice\x01\x18\x04\0%[method]output-stream.blocking-splice\x01\x18\x03\0\x15wa\ +si:io/streams@0.2.0\x05\x1a\x02\x03\0\x07\x08duration\x02\x03\0\x09\x0cinput-str\ +eam\x02\x03\0\x09\x0doutput-stream\x01B\xc0\x01\x02\x03\x02\x01\x1b\x04\0\x08dur\ +ation\x03\0\0\x02\x03\x02\x01\x1c\x04\0\x0cinput-stream\x03\0\x02\x02\x03\x02\x01\ +\x1d\x04\0\x0doutput-stream\x03\0\x04\x02\x03\x02\x01\x19\x04\0\x08io-error\x03\0\ +\x06\x02\x03\x02\x01\x16\x04\0\x08pollable\x03\0\x08\x01q\x0a\x03get\0\0\x04head\ +\0\0\x04post\0\0\x03put\0\0\x06delete\0\0\x07connect\0\0\x07options\0\0\x05trace\ +\0\0\x05patch\0\0\x05other\x01s\0\x04\0\x06method\x03\0\x0a\x01q\x03\x04HTTP\0\0\ +\x05HTTPS\0\0\x05other\x01s\0\x04\0\x06scheme\x03\0\x0c\x01ks\x01k{\x01r\x02\x05\ +rcode\x0e\x09info-code\x0f\x04\0\x11DNS-error-payload\x03\0\x10\x01k}\x01r\x02\x08\ +alert-id\x12\x0dalert-message\x0e\x04\0\x1aTLS-alert-received-payload\x03\0\x13\x01\ +ky\x01r\x02\x0afield-name\x0e\x0afield-size\x15\x04\0\x12field-size-payload\x03\0\ +\x16\x01kw\x01k\x17\x01q'\x0bDNS-timeout\0\0\x09DNS-error\x01\x11\0\x15destinati\ +on-not-found\0\0\x17destination-unavailable\0\0\x19destination-IP-prohibited\0\0\ +\x19destination-IP-unroutable\0\0\x12connection-refused\0\0\x15connection-termin\ +ated\0\0\x12connection-timeout\0\0\x17connection-read-timeout\0\0\x18connection-\ +write-timeout\0\0\x18connection-limit-reached\0\0\x12TLS-protocol-error\0\0\x15T\ +LS-certificate-error\0\0\x12TLS-alert-received\x01\x14\0\x13HTTP-request-denied\0\ +\0\x1cHTTP-request-length-required\0\0\x16HTTP-request-body-size\x01\x18\0\x1bHT\ +TP-request-method-invalid\0\0\x18HTTP-request-URI-invalid\0\0\x19HTTP-request-UR\ +I-too-long\0\0\x20HTTP-request-header-section-size\x01\x15\0\x18HTTP-request-hea\ +der-size\x01\x19\0!HTTP-request-trailer-section-size\x01\x15\0\x19HTTP-request-t\ +railer-size\x01\x17\0\x18HTTP-response-incomplete\0\0!HTTP-response-header-secti\ +on-size\x01\x15\0\x19HTTP-response-header-size\x01\x17\0\x17HTTP-response-body-s\ +ize\x01\x18\0\"HTTP-response-trailer-section-size\x01\x15\0\x1aHTTP-response-tra\ +iler-size\x01\x17\0\x1dHTTP-response-transfer-coding\x01\x0e\0\x1cHTTP-response-\ +content-coding\x01\x0e\0\x15HTTP-response-timeout\0\0\x13HTTP-upgrade-failed\0\0\ +\x13HTTP-protocol-error\0\0\x0dloop-detected\0\0\x13configuration-error\0\0\x0ei\ +nternal-error\x01\x0e\0\x04\0\x0aerror-code\x03\0\x1a\x01q\x03\x0einvalid-syntax\ +\0\0\x09forbidden\0\0\x09immutable\0\0\x04\0\x0cheader-error\x03\0\x1c\x01s\x04\0\ +\x09field-key\x03\0\x1e\x01p}\x04\0\x0bfield-value\x03\0\x20\x04\0\x06fields\x03\ +\x01\x04\0\x07headers\x03\0\"\x04\0\x08trailers\x03\0\"\x04\0\x10incoming-reques\ +t\x03\x01\x04\0\x10outgoing-request\x03\x01\x04\0\x0frequest-options\x03\x01\x04\ +\0\x11response-outparam\x03\x01\x01{\x04\0\x0bstatus-code\x03\0)\x04\0\x11incomi\ +ng-response\x03\x01\x04\0\x0dincoming-body\x03\x01\x04\0\x0ffuture-trailers\x03\x01\ +\x04\0\x11outgoing-response\x03\x01\x04\0\x0doutgoing-body\x03\x01\x04\0\x18futu\ +re-incoming-response\x03\x01\x01i\"\x01@\0\01\x04\0\x13[constructor]fields\x012\x01\ +o\x02\x1f!\x01p3\x01j\x011\x01\x1d\x01@\x01\x07entries4\05\x04\0\x18[static]fiel\ +ds.from-list\x016\x01h\"\x01p!\x01@\x02\x04self7\x04name\x1f\08\x04\0\x12[method\ +]fields.get\x019\x01@\x02\x04self7\x04name\x1f\0\x7f\x04\0\x12[method]fields.has\ +\x01:\x01j\0\x01\x1d\x01@\x03\x04self7\x04name\x1f\x05value8\0;\x04\0\x12[method\ +]fields.set\x01<\x01@\x02\x04self7\x04name\x1f\0;\x04\0\x15[method]fields.delete\ +\x01=\x01@\x03\x04self7\x04name\x1f\x05value!\0;\x04\0\x15[method]fields.append\x01\ +>\x01@\x01\x04self7\04\x04\0\x16[method]fields.entries\x01?\x01@\x01\x04self7\01\ +\x04\0\x14[method]fields.clone\x01@\x01h%\x01@\x01\x04self\xc1\0\0\x0b\x04\0\x1f\ +[method]incoming-request.method\x01B\x01@\x01\x04self\xc1\0\0\x0e\x04\0([method]\ +incoming-request.path-with-query\x01C\x01k\x0d\x01@\x01\x04self\xc1\0\0\xc4\0\x04\ +\0\x1f[method]incoming-request.scheme\x01E\x04\0\"[method]incoming-request.autho\ +rity\x01C\x01i#\x01@\x01\x04self\xc1\0\0\xc6\0\x04\0\x20[method]incoming-request\ +.headers\x01G\x01i,\x01j\x01\xc8\0\0\x01@\x01\x04self\xc1\0\0\xc9\0\x04\0\x20[me\ +thod]incoming-request.consume\x01J\x01i&\x01@\x01\x07headers\xc6\0\0\xcb\0\x04\0\ +\x1d[constructor]outgoing-request\x01L\x01h&\x01i/\x01j\x01\xce\0\0\x01@\x01\x04\ +self\xcd\0\0\xcf\0\x04\0\x1d[method]outgoing-request.body\x01P\x01@\x01\x04self\xcd\ +\0\0\x0b\x04\0\x1f[method]outgoing-request.method\x01Q\x01j\0\0\x01@\x02\x04self\ +\xcd\0\x06method\x0b\0\xd2\0\x04\0#[method]outgoing-request.set-method\x01S\x01@\ +\x01\x04self\xcd\0\0\x0e\x04\0([method]outgoing-request.path-with-query\x01T\x01\ +@\x02\x04self\xcd\0\x0fpath-with-query\x0e\0\xd2\0\x04\0,[method]outgoing-reques\ +t.set-path-with-query\x01U\x01@\x01\x04self\xcd\0\0\xc4\0\x04\0\x1f[method]outgo\ +ing-request.scheme\x01V\x01@\x02\x04self\xcd\0\x06scheme\xc4\0\0\xd2\0\x04\0#[me\ +thod]outgoing-request.set-scheme\x01W\x04\0\"[method]outgoing-request.authority\x01\ +T\x01@\x02\x04self\xcd\0\x09authority\x0e\0\xd2\0\x04\0&[method]outgoing-request\ +.set-authority\x01X\x01@\x01\x04self\xcd\0\0\xc6\0\x04\0\x20[method]outgoing-req\ +uest.headers\x01Y\x01i'\x01@\0\0\xda\0\x04\0\x1c[constructor]request-options\x01\ +[\x01h'\x01k\x01\x01@\x01\x04self\xdc\0\0\xdd\0\x04\0'[method]request-options.co\ +nnect-timeout\x01^\x01@\x02\x04self\xdc\0\x08duration\xdd\0\0\xd2\0\x04\0+[metho\ +d]request-options.set-connect-timeout\x01_\x04\0*[method]request-options.first-b\ +yte-timeout\x01^\x04\0.[method]request-options.set-first-byte-timeout\x01_\x04\0\ +-[method]request-options.between-bytes-timeout\x01^\x04\01[method]request-option\ +s.set-between-bytes-timeout\x01_\x01i(\x01i.\x01j\x01\xe1\0\x01\x1b\x01@\x02\x05\ +param\xe0\0\x08response\xe2\0\x01\0\x04\0\x1d[static]response-outparam.set\x01c\x01\ +h+\x01@\x01\x04self\xe4\0\0*\x04\0\x20[method]incoming-response.status\x01e\x01@\ +\x01\x04self\xe4\0\0\xc6\0\x04\0![method]incoming-response.headers\x01f\x01@\x01\ +\x04self\xe4\0\0\xc9\0\x04\0![method]incoming-response.consume\x01g\x01h,\x01i\x03\ +\x01j\x01\xe9\0\0\x01@\x01\x04self\xe8\0\0\xea\0\x04\0\x1c[method]incoming-body.\ +stream\x01k\x01i-\x01@\x01\x04this\xc8\0\0\xec\0\x04\0\x1c[static]incoming-body.\ +finish\x01m\x01h-\x01i\x09\x01@\x01\x04self\xee\0\0\xef\0\x04\0![method]future-t\ +railers.subscribe\x01p\x01i$\x01k\xf1\0\x01j\x01\xf2\0\x01\x1b\x01j\x01\xf3\0\0\x01\ +k\xf4\0\x01@\x01\x04self\xee\0\0\xf5\0\x04\0\x1b[method]future-trailers.get\x01v\ +\x01@\x01\x07headers\xc6\0\0\xe1\0\x04\0\x1e[constructor]outgoing-response\x01w\x01\ +h.\x01@\x01\x04self\xf8\0\0*\x04\0%[method]outgoing-response.status-code\x01y\x01\ +@\x02\x04self\xf8\0\x0bstatus-code*\0\xd2\0\x04\0)[method]outgoing-response.set-\ +status-code\x01z\x01@\x01\x04self\xf8\0\0\xc6\0\x04\0![method]outgoing-response.\ +headers\x01{\x01@\x01\x04self\xf8\0\0\xcf\0\x04\0\x1e[method]outgoing-response.b\ +ody\x01|\x01h/\x01i\x05\x01j\x01\xfe\0\0\x01@\x01\x04self\xfd\0\0\xff\0\x04\0\x1b\ +[method]outgoing-body.write\x01\x80\x01\x01j\0\x01\x1b\x01@\x02\x04this\xce\0\x08\ +trailers\xf2\0\0\x81\x01\x04\0\x1c[static]outgoing-body.finish\x01\x82\x01\x01h0\ +\x01@\x01\x04self\x83\x01\0\xef\0\x04\0*[method]future-incoming-response.subscri\ +be\x01\x84\x01\x01i+\x01j\x01\x85\x01\x01\x1b\x01j\x01\x86\x01\0\x01k\x87\x01\x01\ +@\x01\x04self\x83\x01\0\x88\x01\x04\0$[method]future-incoming-response.get\x01\x89\ +\x01\x01h\x07\x01k\x1b\x01@\x01\x03err\x8a\x01\0\x8b\x01\x04\0\x0fhttp-error-cod\ +e\x01\x8c\x01\x03\0\x15wasi:http/types@0.2.0\x05\x1e\x02\x03\0\x0a\x10outgoing-r\ +equest\x02\x03\0\x0a\x0frequest-options\x02\x03\0\x0a\x18future-incoming-respons\ +e\x02\x03\0\x0a\x0aerror-code\x01B\x0f\x02\x03\x02\x01\x1f\x04\0\x10outgoing-req\ +uest\x03\0\0\x02\x03\x02\x01\x20\x04\0\x0frequest-options\x03\0\x02\x02\x03\x02\x01\ +!\x04\0\x18future-incoming-response\x03\0\x04\x02\x03\x02\x01\"\x04\0\x0aerror-c\ +ode\x03\0\x06\x01i\x01\x01i\x03\x01k\x09\x01i\x05\x01j\x01\x0b\x01\x07\x01@\x02\x07\ +request\x08\x07options\x0a\0\x0c\x04\0\x06handle\x01\x0d\x03\0\x20wasi:http/outg\ +oing-handler@0.2.0\x05#\x02\x03\0\x01\x10evm-chain-config\x02\x03\0\x01\x13cosmo\ +s-chain-config\x02\x03\0\x02\x17service-and-workflow-id\x02\x03\0\x02\x18workflo\ +w-and-workflow-id\x02\x03\0\0\x09log-level\x02\x03\0\x03\x08event-id\x01B\x1d\x02\ +\x03\x02\x01$\x04\0\x10evm-chain-config\x03\0\0\x02\x03\x02\x01%\x04\0\x13cosmos\ +-chain-config\x03\0\x02\x02\x03\x02\x01&\x04\0\x17service-and-workflow-id\x03\0\x04\ +\x02\x03\x02\x01'\x04\0\x18workflow-and-workflow-id\x03\0\x06\x02\x03\x02\x01(\x04\ +\0\x09log-level\x03\0\x08\x02\x03\x02\x01)\x04\0\x08event-id\x03\0\x0a\x01k\x01\x01\ +@\x01\x09chain-keys\0\x0c\x04\0\x14get-evm-chain-config\x01\x0d\x01k\x03\x01@\x01\ +\x09chain-keys\0\x0e\x04\0\x17get-cosmos-chain-config\x01\x0f\x01ks\x01@\x01\x03\ +keys\0\x10\x04\0\x0aconfig-var\x01\x11\x01@\x02\x05level\x09\x07messages\x01\0\x04\ +\0\x03log\x01\x12\x01@\0\0\x05\x04\0\x0bget-service\x01\x13\x01@\0\0\x07\x04\0\x0c\ +get-workflow\x01\x14\x01@\0\0\x0b\x04\0\x0cget-event-id\x01\x15\x03\0\x04host\x05\ +*\x01B\x0a\x01o\x02ss\x01p\0\x01@\0\0\x01\x04\0\x0fget-environment\x01\x02\x01ps\ +\x01@\0\0\x03\x04\0\x0dget-arguments\x01\x04\x01ks\x01@\0\0\x05\x04\0\x0binitial\ +-cwd\x01\x06\x03\0\x1awasi:cli/environment@0.2.0\x05+\x01B\x03\x01j\0\0\x01@\x01\ +\x06status\0\x01\0\x04\0\x04exit\x01\x01\x03\0\x13wasi:cli/exit@0.2.0\x05,\x01B\x05\ +\x02\x03\x02\x01\x1c\x04\0\x0cinput-stream\x03\0\0\x01i\x01\x01@\0\0\x02\x04\0\x09\ +get-stdin\x01\x03\x03\0\x14wasi:cli/stdin@0.2.0\x05-\x01B\x05\x02\x03\x02\x01\x1d\ +\x04\0\x0doutput-stream\x03\0\0\x01i\x01\x01@\0\0\x02\x04\0\x0aget-stdout\x01\x03\ +\x03\0\x15wasi:cli/stdout@0.2.0\x05.\x01B\x05\x02\x03\x02\x01\x1d\x04\0\x0doutpu\ +t-stream\x03\0\0\x01i\x01\x01@\0\0\x02\x04\0\x0aget-stderr\x01\x03\x03\0\x15wasi\ +:cli/stderr@0.2.0\x05/\x01B\x01\x04\0\x0eterminal-input\x03\x01\x03\0\x1dwasi:cl\ +i/terminal-input@0.2.0\x050\x01B\x01\x04\0\x0fterminal-output\x03\x01\x03\0\x1ew\ +asi:cli/terminal-output@0.2.0\x051\x02\x03\0\x12\x0eterminal-input\x01B\x06\x02\x03\ +\x02\x012\x04\0\x0eterminal-input\x03\0\0\x01i\x01\x01k\x02\x01@\0\0\x03\x04\0\x12\ +get-terminal-stdin\x01\x04\x03\0\x1dwasi:cli/terminal-stdin@0.2.0\x053\x02\x03\0\ +\x13\x0fterminal-output\x01B\x06\x02\x03\x02\x014\x04\0\x0fterminal-output\x03\0\ +\0\x01i\x01\x01k\x02\x01@\0\0\x03\x04\0\x13get-terminal-stdout\x01\x04\x03\0\x1e\ +wasi:cli/terminal-stdout@0.2.0\x055\x01B\x06\x02\x03\x02\x014\x04\0\x0fterminal-\ +output\x03\0\0\x01i\x01\x01k\x02\x01@\0\0\x03\x04\0\x13get-terminal-stderr\x01\x04\ +\x03\0\x1ewasi:cli/terminal-stderr@0.2.0\x056\x01B\x05\x01r\x02\x07secondsw\x0bn\ +anosecondsy\x04\0\x08datetime\x03\0\0\x01@\0\0\x01\x04\0\x03now\x01\x02\x04\0\x0a\ +resolution\x01\x02\x03\0\x1cwasi:clocks/wall-clock@0.2.0\x057\x02\x03\0\x09\x05e\ +rror\x02\x03\0\x17\x08datetime\x01Br\x02\x03\x02\x01\x1c\x04\0\x0cinput-stream\x03\ +\0\0\x02\x03\x02\x01\x1d\x04\0\x0doutput-stream\x03\0\x02\x02\x03\x02\x018\x04\0\ +\x05error\x03\0\x04\x02\x03\x02\x019\x04\0\x08datetime\x03\0\x06\x01w\x04\0\x08f\ +ilesize\x03\0\x08\x01m\x08\x07unknown\x0cblock-device\x10character-device\x09dir\ +ectory\x04fifo\x0dsymbolic-link\x0cregular-file\x06socket\x04\0\x0fdescriptor-ty\ +pe\x03\0\x0a\x01n\x06\x04read\x05write\x13file-integrity-sync\x13data-integrity-\ +sync\x14requested-write-sync\x10mutate-directory\x04\0\x10descriptor-flags\x03\0\ +\x0c\x01n\x01\x0esymlink-follow\x04\0\x0apath-flags\x03\0\x0e\x01n\x04\x06create\ +\x09directory\x09exclusive\x08truncate\x04\0\x0aopen-flags\x03\0\x10\x01w\x04\0\x0a\ +link-count\x03\0\x12\x01k\x07\x01r\x06\x04type\x0b\x0alink-count\x13\x04size\x09\ +\x15data-access-timestamp\x14\x1bdata-modification-timestamp\x14\x17status-chang\ +e-timestamp\x14\x04\0\x0fdescriptor-stat\x03\0\x15\x01q\x03\x09no-change\0\0\x03\ +now\0\0\x09timestamp\x01\x07\0\x04\0\x0dnew-timestamp\x03\0\x17\x01r\x02\x04type\ +\x0b\x04names\x04\0\x0fdirectory-entry\x03\0\x19\x01m%\x06access\x0bwould-block\x07\ +already\x0ebad-descriptor\x04busy\x08deadlock\x05quota\x05exist\x0efile-too-larg\ +e\x15illegal-byte-sequence\x0bin-progress\x0binterrupted\x07invalid\x02io\x0cis-\ +directory\x04loop\x0etoo-many-links\x0cmessage-size\x0dname-too-long\x09no-devic\ +e\x08no-entry\x07no-lock\x13insufficient-memory\x12insufficient-space\x0dnot-dir\ +ectory\x09not-empty\x0fnot-recoverable\x0bunsupported\x06no-tty\x0eno-such-devic\ +e\x08overflow\x0dnot-permitted\x04pipe\x09read-only\x0cinvalid-seek\x0etext-file\ +-busy\x0ccross-device\x04\0\x0aerror-code\x03\0\x1b\x01m\x06\x06normal\x0asequen\ +tial\x06random\x09will-need\x09dont-need\x08no-reuse\x04\0\x06advice\x03\0\x1d\x01\ +r\x02\x05lowerw\x05upperw\x04\0\x13metadata-hash-value\x03\0\x1f\x04\0\x0adescri\ +ptor\x03\x01\x04\0\x16directory-entry-stream\x03\x01\x01h!\x01i\x01\x01j\x01$\x01\ +\x1c\x01@\x02\x04self#\x06offset\x09\0%\x04\0\"[method]descriptor.read-via-strea\ +m\x01&\x01i\x03\x01j\x01'\x01\x1c\x01@\x02\x04self#\x06offset\x09\0(\x04\0#[meth\ +od]descriptor.write-via-stream\x01)\x01@\x01\x04self#\0(\x04\0$[method]descripto\ +r.append-via-stream\x01*\x01j\0\x01\x1c\x01@\x04\x04self#\x06offset\x09\x06lengt\ +h\x09\x06advice\x1e\0+\x04\0\x19[method]descriptor.advise\x01,\x01@\x01\x04self#\ +\0+\x04\0\x1c[method]descriptor.sync-data\x01-\x01j\x01\x0d\x01\x1c\x01@\x01\x04\ +self#\0.\x04\0\x1c[method]descriptor.get-flags\x01/\x01j\x01\x0b\x01\x1c\x01@\x01\ +\x04self#\00\x04\0\x1b[method]descriptor.get-type\x011\x01@\x02\x04self#\x04size\ +\x09\0+\x04\0\x1b[method]descriptor.set-size\x012\x01@\x03\x04self#\x15data-acce\ +ss-timestamp\x18\x1bdata-modification-timestamp\x18\0+\x04\0\x1c[method]descript\ +or.set-times\x013\x01p}\x01o\x024\x7f\x01j\x015\x01\x1c\x01@\x03\x04self#\x06len\ +gth\x09\x06offset\x09\06\x04\0\x17[method]descriptor.read\x017\x01j\x01\x09\x01\x1c\ +\x01@\x03\x04self#\x06buffer4\x06offset\x09\08\x04\0\x18[method]descriptor.write\ +\x019\x01i\"\x01j\x01:\x01\x1c\x01@\x01\x04self#\0;\x04\0![method]descriptor.rea\ +d-directory\x01<\x04\0\x17[method]descriptor.sync\x01-\x01@\x02\x04self#\x04path\ +s\0+\x04\0&[method]descriptor.create-directory-at\x01=\x01j\x01\x16\x01\x1c\x01@\ +\x01\x04self#\0>\x04\0\x17[method]descriptor.stat\x01?\x01@\x03\x04self#\x0apath\ +-flags\x0f\x04paths\0>\x04\0\x1a[method]descriptor.stat-at\x01@\x01@\x05\x04self\ +#\x0apath-flags\x0f\x04paths\x15data-access-timestamp\x18\x1bdata-modification-t\ +imestamp\x18\0+\x04\0\x1f[method]descriptor.set-times-at\x01A\x01@\x05\x04self#\x0e\ +old-path-flags\x0f\x08old-paths\x0enew-descriptor#\x08new-paths\0+\x04\0\x1a[met\ +hod]descriptor.link-at\x01B\x01i!\x01j\x01\xc3\0\x01\x1c\x01@\x05\x04self#\x0apa\ +th-flags\x0f\x04paths\x0aopen-flags\x11\x05flags\x0d\0\xc4\0\x04\0\x1a[method]de\ +scriptor.open-at\x01E\x01j\x01s\x01\x1c\x01@\x02\x04self#\x04paths\0\xc6\0\x04\0\ +\x1e[method]descriptor.readlink-at\x01G\x04\0&[method]descriptor.remove-director\ +y-at\x01=\x01@\x04\x04self#\x08old-paths\x0enew-descriptor#\x08new-paths\0+\x04\0\ +\x1c[method]descriptor.rename-at\x01H\x01@\x03\x04self#\x08old-paths\x08new-path\ +s\0+\x04\0\x1d[method]descriptor.symlink-at\x01I\x04\0![method]descriptor.unlink\ +-file-at\x01=\x01@\x02\x04self#\x05other#\0\x7f\x04\0![method]descriptor.is-same\ +-object\x01J\x01j\x01\x20\x01\x1c\x01@\x01\x04self#\0\xcb\0\x04\0\x20[method]des\ +criptor.metadata-hash\x01L\x01@\x03\x04self#\x0apath-flags\x0f\x04paths\0\xcb\0\x04\ +\0#[method]descriptor.metadata-hash-at\x01M\x01h\"\x01k\x1a\x01j\x01\xcf\0\x01\x1c\ +\x01@\x01\x04self\xce\0\0\xd0\0\x04\03[method]directory-entry-stream.read-direct\ +ory-entry\x01Q\x01h\x05\x01k\x1c\x01@\x01\x03err\xd2\0\0\xd3\0\x04\0\x15filesyst\ +em-error-code\x01T\x03\0\x1bwasi:filesystem/types@0.2.0\x05:\x02\x03\0\x18\x0ade\ +scriptor\x01B\x07\x02\x03\x02\x01;\x04\0\x0adescriptor\x03\0\0\x01i\x01\x01o\x02\ +\x02s\x01p\x03\x01@\0\0\x04\x04\0\x0fget-directories\x01\x05\x03\0\x1ewasi:files\ +ystem/preopens@0.2.0\x05<\x01B\x11\x04\0\x07network\x03\x01\x01m\x15\x07unknown\x0d\ +access-denied\x0dnot-supported\x10invalid-argument\x0dout-of-memory\x07timeout\x14\ +concurrency-conflict\x0fnot-in-progress\x0bwould-block\x0dinvalid-state\x10new-s\ +ocket-limit\x14address-not-bindable\x0eaddress-in-use\x12remote-unreachable\x12c\ +onnection-refused\x10connection-reset\x12connection-aborted\x12datagram-too-larg\ +e\x11name-unresolvable\x1atemporary-resolver-failure\x1apermanent-resolver-failu\ +re\x04\0\x0aerror-code\x03\0\x01\x01m\x02\x04ipv4\x04ipv6\x04\0\x11ip-address-fa\ +mily\x03\0\x03\x01o\x04}}}}\x04\0\x0cipv4-address\x03\0\x05\x01o\x08{{{{{{{{\x04\ +\0\x0cipv6-address\x03\0\x07\x01q\x02\x04ipv4\x01\x06\0\x04ipv6\x01\x08\0\x04\0\x0a\ +ip-address\x03\0\x09\x01r\x02\x04port{\x07address\x06\x04\0\x13ipv4-socket-addre\ +ss\x03\0\x0b\x01r\x04\x04port{\x09flow-infoy\x07address\x08\x08scope-idy\x04\0\x13\ +ipv6-socket-address\x03\0\x0d\x01q\x02\x04ipv4\x01\x0c\0\x04ipv6\x01\x0e\0\x04\0\ +\x11ip-socket-address\x03\0\x0f\x03\0\x1awasi:sockets/network@0.2.0\x05=\x02\x03\ +\0\x1a\x07network\x01B\x05\x02\x03\x02\x01>\x04\0\x07network\x03\0\0\x01i\x01\x01\ +@\0\0\x02\x04\0\x10instance-network\x01\x03\x03\0#wasi:sockets/instance-network@\ +0.2.0\x05?\x02\x03\0\x1a\x0aerror-code\x02\x03\0\x1a\x11ip-socket-address\x02\x03\ +\0\x1a\x11ip-address-family\x01BD\x02\x03\x02\x01\x16\x04\0\x08pollable\x03\0\0\x02\ +\x03\x02\x01>\x04\0\x07network\x03\0\x02\x02\x03\x02\x01@\x04\0\x0aerror-code\x03\ +\0\x04\x02\x03\x02\x01A\x04\0\x11ip-socket-address\x03\0\x06\x02\x03\x02\x01B\x04\ +\0\x11ip-address-family\x03\0\x08\x01p}\x01r\x02\x04data\x0a\x0eremote-address\x07\ +\x04\0\x11incoming-datagram\x03\0\x0b\x01k\x07\x01r\x02\x04data\x0a\x0eremote-ad\ +dress\x0d\x04\0\x11outgoing-datagram\x03\0\x0e\x04\0\x0audp-socket\x03\x01\x04\0\ +\x18incoming-datagram-stream\x03\x01\x04\0\x18outgoing-datagram-stream\x03\x01\x01\ +h\x10\x01h\x03\x01j\0\x01\x05\x01@\x03\x04self\x13\x07network\x14\x0dlocal-addre\ +ss\x07\0\x15\x04\0\x1d[method]udp-socket.start-bind\x01\x16\x01@\x01\x04self\x13\ +\0\x15\x04\0\x1e[method]udp-socket.finish-bind\x01\x17\x01i\x11\x01i\x12\x01o\x02\ +\x18\x19\x01j\x01\x1a\x01\x05\x01@\x02\x04self\x13\x0eremote-address\x0d\0\x1b\x04\ +\0\x19[method]udp-socket.stream\x01\x1c\x01j\x01\x07\x01\x05\x01@\x01\x04self\x13\ +\0\x1d\x04\0\x20[method]udp-socket.local-address\x01\x1e\x04\0![method]udp-socke\ +t.remote-address\x01\x1e\x01@\x01\x04self\x13\0\x09\x04\0![method]udp-socket.add\ +ress-family\x01\x1f\x01j\x01}\x01\x05\x01@\x01\x04self\x13\0\x20\x04\0$[method]u\ +dp-socket.unicast-hop-limit\x01!\x01@\x02\x04self\x13\x05value}\0\x15\x04\0([met\ +hod]udp-socket.set-unicast-hop-limit\x01\"\x01j\x01w\x01\x05\x01@\x01\x04self\x13\ +\0#\x04\0&[method]udp-socket.receive-buffer-size\x01$\x01@\x02\x04self\x13\x05va\ +luew\0\x15\x04\0*[method]udp-socket.set-receive-buffer-size\x01%\x04\0#[method]u\ +dp-socket.send-buffer-size\x01$\x04\0'[method]udp-socket.set-send-buffer-size\x01\ +%\x01i\x01\x01@\x01\x04self\x13\0&\x04\0\x1c[method]udp-socket.subscribe\x01'\x01\ +h\x11\x01p\x0c\x01j\x01)\x01\x05\x01@\x02\x04self(\x0bmax-resultsw\0*\x04\0([met\ +hod]incoming-datagram-stream.receive\x01+\x01@\x01\x04self(\0&\x04\0*[method]inc\ +oming-datagram-stream.subscribe\x01,\x01h\x12\x01@\x01\x04self-\0#\x04\0+[method\ +]outgoing-datagram-stream.check-send\x01.\x01p\x0f\x01@\x02\x04self-\x09datagram\ +s/\0#\x04\0%[method]outgoing-datagram-stream.send\x010\x01@\x01\x04self-\0&\x04\0\ +*[method]outgoing-datagram-stream.subscribe\x011\x03\0\x16wasi:sockets/udp@0.2.0\ +\x05C\x02\x03\0\x1c\x0audp-socket\x01B\x0c\x02\x03\x02\x01>\x04\0\x07network\x03\ +\0\0\x02\x03\x02\x01@\x04\0\x0aerror-code\x03\0\x02\x02\x03\x02\x01B\x04\0\x11ip\ +-address-family\x03\0\x04\x02\x03\x02\x01D\x04\0\x0audp-socket\x03\0\x06\x01i\x07\ +\x01j\x01\x08\x01\x03\x01@\x01\x0eaddress-family\x05\0\x09\x04\0\x11create-udp-s\ +ocket\x01\x0a\x03\0$wasi:sockets/udp-create-socket@0.2.0\x05E\x01BT\x02\x03\x02\x01\ +\x1c\x04\0\x0cinput-stream\x03\0\0\x02\x03\x02\x01\x1d\x04\0\x0doutput-stream\x03\ +\0\x02\x02\x03\x02\x01\x16\x04\0\x08pollable\x03\0\x04\x02\x03\x02\x01\x1b\x04\0\ +\x08duration\x03\0\x06\x02\x03\x02\x01>\x04\0\x07network\x03\0\x08\x02\x03\x02\x01\ +@\x04\0\x0aerror-code\x03\0\x0a\x02\x03\x02\x01A\x04\0\x11ip-socket-address\x03\0\ +\x0c\x02\x03\x02\x01B\x04\0\x11ip-address-family\x03\0\x0e\x01m\x03\x07receive\x04\ +send\x04both\x04\0\x0dshutdown-type\x03\0\x10\x04\0\x0atcp-socket\x03\x01\x01h\x12\ +\x01h\x09\x01j\0\x01\x0b\x01@\x03\x04self\x13\x07network\x14\x0dlocal-address\x0d\ +\0\x15\x04\0\x1d[method]tcp-socket.start-bind\x01\x16\x01@\x01\x04self\x13\0\x15\ +\x04\0\x1e[method]tcp-socket.finish-bind\x01\x17\x01@\x03\x04self\x13\x07network\ +\x14\x0eremote-address\x0d\0\x15\x04\0\x20[method]tcp-socket.start-connect\x01\x18\ +\x01i\x01\x01i\x03\x01o\x02\x19\x1a\x01j\x01\x1b\x01\x0b\x01@\x01\x04self\x13\0\x1c\ +\x04\0![method]tcp-socket.finish-connect\x01\x1d\x04\0\x1f[method]tcp-socket.sta\ +rt-listen\x01\x17\x04\0\x20[method]tcp-socket.finish-listen\x01\x17\x01i\x12\x01\ +o\x03\x1e\x19\x1a\x01j\x01\x1f\x01\x0b\x01@\x01\x04self\x13\0\x20\x04\0\x19[meth\ +od]tcp-socket.accept\x01!\x01j\x01\x0d\x01\x0b\x01@\x01\x04self\x13\0\"\x04\0\x20\ +[method]tcp-socket.local-address\x01#\x04\0![method]tcp-socket.remote-address\x01\ +#\x01@\x01\x04self\x13\0\x7f\x04\0\x1f[method]tcp-socket.is-listening\x01$\x01@\x01\ +\x04self\x13\0\x0f\x04\0![method]tcp-socket.address-family\x01%\x01@\x02\x04self\ +\x13\x05valuew\0\x15\x04\0*[method]tcp-socket.set-listen-backlog-size\x01&\x01j\x01\ +\x7f\x01\x0b\x01@\x01\x04self\x13\0'\x04\0%[method]tcp-socket.keep-alive-enabled\ +\x01(\x01@\x02\x04self\x13\x05value\x7f\0\x15\x04\0)[method]tcp-socket.set-keep-\ +alive-enabled\x01)\x01j\x01\x07\x01\x0b\x01@\x01\x04self\x13\0*\x04\0'[method]tc\ +p-socket.keep-alive-idle-time\x01+\x01@\x02\x04self\x13\x05value\x07\0\x15\x04\0\ ++[method]tcp-socket.set-keep-alive-idle-time\x01,\x04\0&[method]tcp-socket.keep-\ +alive-interval\x01+\x04\0*[method]tcp-socket.set-keep-alive-interval\x01,\x01j\x01\ +y\x01\x0b\x01@\x01\x04self\x13\0-\x04\0#[method]tcp-socket.keep-alive-count\x01.\ +\x01@\x02\x04self\x13\x05valuey\0\x15\x04\0'[method]tcp-socket.set-keep-alive-co\ +unt\x01/\x01j\x01}\x01\x0b\x01@\x01\x04self\x13\00\x04\0\x1c[method]tcp-socket.h\ +op-limit\x011\x01@\x02\x04self\x13\x05value}\0\x15\x04\0\x20[method]tcp-socket.s\ +et-hop-limit\x012\x01j\x01w\x01\x0b\x01@\x01\x04self\x13\03\x04\0&[method]tcp-so\ +cket.receive-buffer-size\x014\x04\0*[method]tcp-socket.set-receive-buffer-size\x01\ +&\x04\0#[method]tcp-socket.send-buffer-size\x014\x04\0'[method]tcp-socket.set-se\ +nd-buffer-size\x01&\x01i\x05\x01@\x01\x04self\x13\05\x04\0\x1c[method]tcp-socket\ +.subscribe\x016\x01@\x02\x04self\x13\x0dshutdown-type\x11\0\x15\x04\0\x1b[method\ +]tcp-socket.shutdown\x017\x03\0\x16wasi:sockets/tcp@0.2.0\x05F\x02\x03\0\x1e\x0a\ +tcp-socket\x01B\x0c\x02\x03\x02\x01>\x04\0\x07network\x03\0\0\x02\x03\x02\x01@\x04\ +\0\x0aerror-code\x03\0\x02\x02\x03\x02\x01B\x04\0\x11ip-address-family\x03\0\x04\ +\x02\x03\x02\x01G\x04\0\x0atcp-socket\x03\0\x06\x01i\x07\x01j\x01\x08\x01\x03\x01\ +@\x01\x0eaddress-family\x05\0\x09\x04\0\x11create-tcp-socket\x01\x0a\x03\0$wasi:\ +sockets/tcp-create-socket@0.2.0\x05H\x02\x03\0\x1a\x0aip-address\x01B\x16\x02\x03\ +\x02\x01\x16\x04\0\x08pollable\x03\0\0\x02\x03\x02\x01>\x04\0\x07network\x03\0\x02\ +\x02\x03\x02\x01@\x04\0\x0aerror-code\x03\0\x04\x02\x03\x02\x01I\x04\0\x0aip-add\ +ress\x03\0\x06\x04\0\x16resolve-address-stream\x03\x01\x01h\x08\x01k\x07\x01j\x01\ +\x0a\x01\x05\x01@\x01\x04self\x09\0\x0b\x04\03[method]resolve-address-stream.res\ +olve-next-address\x01\x0c\x01i\x01\x01@\x01\x04self\x09\0\x0d\x04\0([method]reso\ +lve-address-stream.subscribe\x01\x0e\x01h\x03\x01i\x08\x01j\x01\x10\x01\x05\x01@\ +\x02\x07network\x0f\x04names\0\x11\x04\0\x11resolve-addresses\x01\x12\x03\0!wasi\ +:sockets/ip-name-lookup@0.2.0\x05J\x01B\x05\x01p}\x01@\x01\x03lenw\0\0\x04\0\x10\ +get-random-bytes\x01\x01\x01@\0\0w\x04\0\x0eget-random-u64\x01\x02\x03\0\x18wasi\ +:random/random@0.2.0\x05K\x01B\x05\x01p}\x01@\x01\x03lenw\0\0\x04\0\x19get-insec\ +ure-random-bytes\x01\x01\x01@\0\0w\x04\0\x17get-insecure-random-u64\x01\x02\x03\0\ +\x1awasi:random/insecure@0.2.0\x05L\x01B\x03\x01o\x02ww\x01@\0\0\0\x04\0\x0dinse\ +cure-seed\x01\x01\x03\0\x1fwasi:random/insecure-seed@0.2.0\x05M\x01B\x1c\x01q\x03\ +\x0dno-such-store\0\0\x0daccess-denied\0\0\x05other\x01s\0\x04\0\x05error\x03\0\0\ +\x01ps\x01ks\x01r\x02\x04keys\x02\x06cursor\x03\x04\0\x0ckey-response\x03\0\x04\x04\ +\0\x06bucket\x03\x01\x01h\x06\x01p}\x01k\x08\x01j\x01\x09\x01\x01\x01@\x02\x04se\ +lf\x07\x03keys\0\x0a\x04\0\x12[method]bucket.get\x01\x0b\x01j\0\x01\x01\x01@\x03\ +\x04self\x07\x03keys\x05value\x08\0\x0c\x04\0\x12[method]bucket.set\x01\x0d\x01@\ +\x02\x04self\x07\x03keys\0\x0c\x04\0\x15[method]bucket.delete\x01\x0e\x01j\x01\x7f\ +\x01\x01\x01@\x02\x04self\x07\x03keys\0\x0f\x04\0\x15[method]bucket.exists\x01\x10\ +\x01j\x01\x05\x01\x01\x01@\x02\x04self\x07\x06cursor\x03\0\x11\x04\0\x18[method]\ +bucket.list-keys\x01\x12\x01i\x06\x01j\x01\x13\x01\x01\x01@\x01\x0aidentifiers\0\ +\x14\x04\0\x04open\x01\x15\x03\0\x20wasi:keyvalue/store@0.2.0-draft2\x05N\x02\x03\ +\0$\x06bucket\x02\x03\0$\x05error\x01B\x18\x02\x03\x02\x01O\x04\0\x06bucket\x03\0\ +\0\x02\x03\x02\x01P\x04\0\x05error\x03\0\x02\x04\0\x03cas\x03\x01\x01i\x04\x01q\x02\ +\x0bstore-error\x01\x03\0\x0acas-failed\x01\x05\0\x04\0\x09cas-error\x03\0\x06\x01\ +h\x01\x01j\x01\x05\x01\x03\x01@\x02\x06bucket\x08\x03keys\0\x09\x04\0\x0f[static\ +]cas.new\x01\x0a\x01h\x04\x01p}\x01k\x0c\x01j\x01\x0d\x01\x03\x01@\x01\x04self\x0b\ +\0\x0e\x04\0\x13[method]cas.current\x01\x0f\x01j\x01x\x01\x03\x01@\x03\x06bucket\ +\x08\x03keys\x05deltax\0\x10\x04\0\x09increment\x01\x11\x01j\0\x01\x07\x01@\x02\x03\ +cas\x05\x05value\x0c\0\x12\x04\0\x04swap\x01\x13\x03\0\"wasi:keyvalue/atomics@0.\ +2.0-draft2\x05Q\x01B\x13\x02\x03\x02\x01O\x04\0\x06bucket\x03\0\0\x02\x03\x02\x01\ +P\x04\0\x05error\x03\0\x02\x01h\x01\x01ps\x01p}\x01o\x02s\x06\x01k\x07\x01p\x08\x01\ +j\x01\x09\x01\x03\x01@\x02\x06bucket\x04\x04keys\x05\0\x0a\x04\0\x08get-many\x01\ +\x0b\x01p\x07\x01j\0\x01\x03\x01@\x02\x06bucket\x04\x0akey-values\x0c\0\x0d\x04\0\ +\x08set-many\x01\x0e\x01@\x02\x06bucket\x04\x04keys\x05\0\x0d\x04\0\x0bdelete-ma\ +ny\x01\x0f\x03\0\x20wasi:keyvalue/batch@0.2.0-draft2\x05R\x01k\x14\x01j\x01\xd3\0\ +\x01s\x01@\x01\x0etrigger-action\x11\0\xd4\0\x04\0\x03run\x01U\x04\0\x1ewavs:ope\ +rator/wavs-world@1.2.0\x04\0\x0b\x10\x01\0\x0awavs-world\x03\0\0\0G\x09producers\ +\x01\x0cprocessed-by\x02\x0dwit-component\x070.227.1\x10wit-bindgen-rust\x060.41\ +.0"; #[inline(never)] #[doc(hidden)] pub fn __link_custom_section_describing_imports() { diff --git a/components/evm-price-oracle/src/lib.rs b/components/evm-price-oracle/src/lib.rs index ebd278fc..2b0554c6 100644 --- a/components/evm-price-oracle/src/lib.rs +++ b/components/evm-price-oracle/src/lib.rs @@ -1,36 +1,24 @@ +#[rustfmt::skip] +pub mod bindings; +pub mod solidity; mod trigger; + +use crate::bindings::{export, Guest, TriggerAction, WasmResponse}; +use alloy_sol_types::SolValue; +use anyhow::Result; +use serde::{Deserialize, Serialize}; use trigger::{decode_trigger_event, encode_trigger_output, Destination}; use wavs_wasi_utils::{ evm::alloy_primitives::hex, http::{fetch_json, http_request_get}, }; -pub mod bindings; -use crate::bindings::{export, Guest, TriggerAction, WasmResponse}; -use alloy_sol_types::SolValue; -use serde::{Deserialize, Serialize}; use wstd::{http::HeaderValue, runtime::block_on}; struct Component; export!(Component with_types_in bindings); impl Guest for Component { - /// Main entry point for the price oracle component. - /// WAVS is subscribed to watch for events emitted by the blockchain. - /// When WAVS observes an event is emitted, it will internally route the event and its data to this function (component). - /// The processing then occurs before the output is returned back to WAVS to be submitted to the blockchain by the operator(s). - /// - /// This is why the `Destination::Ethereum` requires the encoded trigger output, it must be ABI encoded for the solidity contract. - /// Failure to do so will result in a failed submission as the signature will not match the saved output. - /// - /// After the data is properly set by the operator through WAVS, any user can query the price data from the blockchain in the solidity contract. - /// You can also return `None` as the output if nothing needs to be saved to the blockchain. (great for performing some off chain action) - /// - /// This function: - /// 1. Receives a trigger action containing encoded data - /// 2. Decodes the input to get a cryptocurrency ID (in hex) - /// 3. Fetches current price data from CoinMarketCap - /// 4. Returns the encoded response based on the destination - fn run(action: TriggerAction) -> std::result::Result, String> { + fn run(action: TriggerAction) -> Result, String> { let (trigger_id, req, dest) = decode_trigger_event(action.data).map_err(|e| e.to_string())?; @@ -68,28 +56,6 @@ impl Guest for Component { } } -/// Fetches cryptocurrency price data from CoinMarketCap's API -/// -/// # Arguments -/// * `id` - CoinMarketCap's unique identifier for the cryptocurrency -/// -/// # Returns -/// * `PriceFeedData` containing: -/// - symbol: The cryptocurrency's ticker symbol (e.g., "BTC") -/// - price: Current price in USD -/// - timestamp: Server timestamp of the price data -/// -/// # Implementation Details -/// - Uses CoinMarketCap's v3 API endpoint -/// - Includes necessary headers to avoid rate limiting: -/// * User-Agent to mimic a browser -/// * Random cookie with current timestamp -/// * JSON content type headers -/// -/// As of writing (Mar 31, 2025), the CoinMarketCap API is free to use and has no rate limits. -/// This may change in the future so be aware of issues that you may encounter going forward. -/// There is a more proper API for pro users that you can use -/// - async fn get_price_feed(id: u64) -> Result { let url = format!( "https://api.coinmarketcap.com/data-api/v3/cryptocurrency/detail?id={}&range=1h", @@ -118,19 +84,13 @@ async fn get_price_feed(id: u64) -> Result { Ok(PriceFeedData { symbol: json.data.symbol, price, timestamp: timestamp.to_string() }) } -/// Represents the price feed response data structure -/// This is the simplified version of the data that will be sent to the blockchain -/// via the Submission of the operator(s). -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Serialize, Deserialize, Clone)] pub struct PriceFeedData { symbol: String, timestamp: String, price: f64, } -/// Root response structure from CoinMarketCap API -/// Generated from the API response using -/// Contains detailed cryptocurrency information including price statistics #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Root { pub data: Data, diff --git a/components/evm-price-oracle/src/solidity.rs b/components/evm-price-oracle/src/solidity.rs new file mode 100644 index 00000000..51c207ed --- /dev/null +++ b/components/evm-price-oracle/src/solidity.rs @@ -0,0 +1,13 @@ +use alloy_sol_macro::sol; + +// Import all types from ITypes.sol +pub use ITypes::*; + +// The objects here will be generated automatically into Rust types. +// If you update the .sol file, you must re-run `cargo build` to see the changes. +sol!("../../src/interfaces/ITypes.sol"); + +// Define a simple struct representing the function that encodes string input +sol! { + function addTrigger(string data) external; +} diff --git a/components/evm-price-oracle/src/trigger.rs b/components/evm-price-oracle/src/trigger.rs index db7cc8b1..546697ae 100644 --- a/components/evm-price-oracle/src/trigger.rs +++ b/components/evm-price-oracle/src/trigger.rs @@ -1,6 +1,6 @@ -use crate::bindings::wavs::worker::layer_types::{ - TriggerData, TriggerDataEvmContractEvent, WasmResponse, -}; +use crate::bindings::wavs::types::events::{TriggerData, TriggerDataEvmContractEvent}; +use crate::bindings::WasmResponse; +use crate::solidity; use alloy_sol_types::SolValue; use anyhow::Result; use wavs_wasi_utils::decode_event_log_data; @@ -34,7 +34,7 @@ pub enum Destination { pub fn decode_trigger_event(trigger_data: TriggerData) -> Result<(u64, Vec, Destination)> { match trigger_data { TriggerData::EvmContractEvent(TriggerDataEvmContractEvent { log, .. }) => { - let event: solidity::NewTrigger = decode_event_log_data!(log)?; + let event: solidity::NewTrigger = decode_event_log_data!(log.data)?; let trigger_info = ::abi_decode(&event._triggerInfo)?; Ok((trigger_info.triggerId, trigger_info.data.to_vec(), Destination::Ethereum)) @@ -62,30 +62,3 @@ pub fn encode_trigger_output(trigger_id: u64, output: impl AsRef<[u8]>) -> WasmR ordering: None, } } - -/// Private module containing Solidity type definitions -/// -/// The `sol!` macro from alloy_sol_macro reads a Solidity interface file -/// and generates corresponding Rust types and encoding/decoding functions. -/// -/// In this case, it reads "../../src/interfaces/ITypes.sol" which defines: -/// - NewTrigger event -/// - TriggerInfo struct -/// - DataWithId struct -/// -/// Documentation: -/// - -/// (You can also just sol! arbitrary solidity types like `event` or `struct` too) -pub mod solidity { - use alloy_sol_macro::sol; - pub use ITypes::*; - - // The objects here will be generated automatically into Rust types. - // If you update the .sol file, you must re-run `cargo build` to see the changes. - sol!("../../src/interfaces/ITypes.sol"); - - // Define a simple struct representing the function that encodes string input - sol! { - function addTrigger(string data) external; - } -} diff --git a/config/components.json b/config/components.json new file mode 100644 index 00000000..3f20e6e8 --- /dev/null +++ b/config/components.json @@ -0,0 +1,27 @@ +{ + "components": [ + { + "filename": "evm_price_oracle.wasm", + "package_name": "evmpriceoracle", + "package_version": "0.1.0", + "trigger_event": "NewTrigger(bytes)", + "trigger_json_path": "evmpriceoracle_trigger.deployedTo", + "submit_json_path": "evmpriceoracle_submit.deployedTo", + "config_values": { + "chain_name": "${CHAIN_NAME}" + }, + "env_variables": [] + } + ], + "aggregator_components": [ + { + "filename": "aggregator.wasm", + "package_name": "aggregator", + "package_version": "0.3.4", + "config_values": { + "timer_delay_secs": "${AGGREGATOR_TIMER_DELAYER_SECS}" + }, + "env_variables": ["WAVS_ENV_ETHERSCAN_API_KEY"] + } + ] +} diff --git a/docker-compose.yml b/docker-compose.yml index 1e9be55f..05f7ad91 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -24,7 +24,7 @@ services: command: ["--rm"] ipfs: - image: ipfs/kubo:v0.34.1 + image: ipfs/kubo:v0.37.0 container_name: ipfs network_mode: host ports: diff --git a/foundry.toml b/foundry.toml index a7f57413..444d32e9 100644 --- a/foundry.toml +++ b/foundry.toml @@ -2,10 +2,20 @@ src = 'src' out = 'out' libs = ['lib'] -solidity_version = '0.8.22' -evm_version = 'shanghai' -# via_ir = true -ffi = false # false by default for security reasons -fs_permissions = [{ access = "read-write", path = "./" },] +auto_detect_solc = true +solc_version = '0.8.27' +evm_version = 'cancun' +optimizer = true +optimizer_runs = 200 +via_ir = true +ffi = false # false by default for security reasons +fs_permissions = [ + { access = "read-write", path = "./" }, + { access = "read-write", path = "../.docker" } +] + +[lint] +exclude_lints = ["asm-keccak256", "mixed-case-function", "mixed-case-variable"] + # See more config options https://github.com/foundry-rs/foundry/tree/master/config diff --git a/package-lock.json b/package-lock.json index b0bdd192..586a605a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -13,7 +13,7 @@ "@commitlint/config-conventional": "19.2.2", "@defi-wonderland/natspec-smells": "1.1.6", "@openzeppelin/contracts": "^5.2.0", - "@wavs/solidity": "0.4.0", + "@wavs/solidity": "0.5.0-beta.10", "forge-std": "github:foundry-rs/forge-std#v1.9.6", "lint-staged": ">=10", "solhint-community": "4.0.0", @@ -21,22 +21,24 @@ } }, "node_modules/@babel/code-frame": { - "version": "7.26.2", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.26.2.tgz", - "integrity": "sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz", + "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==", + "license": "MIT", "dependencies": { - "@babel/helper-validator-identifier": "^7.25.9", + "@babel/helper-validator-identifier": "^7.27.1", "js-tokens": "^4.0.0", - "picocolors": "^1.0.0" + "picocolors": "^1.1.1" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-identifier": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz", - "integrity": "sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz", + "integrity": "sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==", + "license": "MIT", "engines": { "node": ">=6.9.0" } @@ -277,6 +279,15 @@ "natspec-smells": "lib/main.js" } }, + "node_modules/@humanwhocodes/momoa": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@humanwhocodes/momoa/-/momoa-2.0.4.tgz", + "integrity": "sha512-RE815I4arJFtt+FVeU1Tgp9/Xvecacji8w/V6XtXsWWH/wz/eNkNbhb+ny/+PlVZjV0rxQpRSQKNKE3lcktHEA==", + "license": "Apache-2.0", + "engines": { + "node": ">=10.10.0" + } + }, "node_modules/@noble/curves": { "version": "1.4.2", "resolved": "https://registry.npmjs.org/@noble/curves/-/curves-1.4.2.tgz", @@ -336,6 +347,47 @@ "resolved": "https://registry.npmjs.org/@openzeppelin/contracts/-/contracts-5.3.0.tgz", "integrity": "sha512-zj/KGoW7zxWUE8qOI++rUM18v+VeLTTzKs/DJFkSzHpQFPD/jKKF0TrMxBfGLl3kpdELCNccvB3zmofSzm4nlA==" }, + "node_modules/@pnpm/config.env-replace": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@pnpm/config.env-replace/-/config.env-replace-1.1.0.tgz", + "integrity": "sha512-htyl8TWnKL7K/ESFa1oW2UB5lVDxuF5DpM7tBi6Hu2LNL3mWkIzNLG6N4zoCUP1lCKNxWy/3iu8mS8MvToGd6w==", + "license": "MIT", + "engines": { + "node": ">=12.22.0" + } + }, + "node_modules/@pnpm/network.ca-file": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@pnpm/network.ca-file/-/network.ca-file-1.0.2.tgz", + "integrity": "sha512-YcPQ8a0jwYU9bTdJDpXjMi7Brhkr1mXsXrUJvjqM2mQDgkRiz8jFaQGOdaLxgjtUfQgZhKy/O3cG/YwmgKaxLA==", + "license": "MIT", + "dependencies": { + "graceful-fs": "4.2.10" + }, + "engines": { + "node": ">=12.22.0" + } + }, + "node_modules/@pnpm/network.ca-file/node_modules/graceful-fs": { + "version": "4.2.10", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz", + "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==", + "license": "ISC" + }, + "node_modules/@pnpm/npm-conf": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/@pnpm/npm-conf/-/npm-conf-2.3.1.tgz", + "integrity": "sha512-c83qWb22rNRuB0UaVCI0uRPNRr8Z0FWnEIvT47jiHAmOIUHbBOg5XvV7pM5x+rKn9HRpjxquDbXYSXr3fAKFcw==", + "license": "MIT", + "dependencies": { + "@pnpm/config.env-replace": "^1.1.0", + "@pnpm/network.ca-file": "^1.0.1", + "config-chain": "^1.1.11" + }, + "engines": { + "node": ">=12" + } + }, "node_modules/@scure/base": { "version": "1.1.9", "resolved": "https://registry.npmjs.org/@scure/base/-/base-1.1.9.tgz", @@ -369,6 +421,18 @@ "url": "https://paulmillr.com/funding/" } }, + "node_modules/@sindresorhus/is": { + "version": "5.6.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-5.6.0.tgz", + "integrity": "sha512-TV7t8GKYaJWsn00tFDqBw8+Uqmr8A0fRU1tvTQhyZzGv0sJCGRQL3JGMI3ucuKo3XIZdUP+Lx7/gh2t3lewy7g==", + "license": "MIT", + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sindresorhus/is?sponsor=1" + } + }, "node_modules/@solidity-parser/parser": { "version": "0.16.2", "resolved": "https://registry.npmjs.org/@solidity-parser/parser/-/parser-0.16.2.tgz", @@ -377,6 +441,18 @@ "antlr4ts": "^0.5.0-alpha.4" } }, + "node_modules/@szmarczak/http-timer": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-5.0.1.tgz", + "integrity": "sha512-+PmQX0PiAYPMeVYe237LJAYvOMYW1j2rH5YROyS3b4CTVJum34HfRvKvAzozHAQG0TnHNdUfY9nCeUyRAs//cw==", + "license": "MIT", + "dependencies": { + "defer-to-connect": "^2.0.1" + }, + "engines": { + "node": ">=14.16" + } + }, "node_modules/@types/conventional-commits-parser": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/@types/conventional-commits-parser/-/conventional-commits-parser-5.0.1.tgz", @@ -385,6 +461,12 @@ "@types/node": "*" } }, + "node_modules/@types/http-cache-semantics": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/@types/http-cache-semantics/-/http-cache-semantics-4.0.4.tgz", + "integrity": "sha512-1m0bIFVc7eJWyve9S0RnuRgcQqF/Xd5QsUZAZeQFr1Q3/p9JWoQQEqmVy+DPTNpGXwhgIetAoYF8JSc33q29QA==", + "license": "MIT" + }, "node_modules/@types/node": { "version": "22.15.3", "resolved": "https://registry.npmjs.org/@types/node/-/node-22.15.3.tgz", @@ -394,10 +476,13 @@ } }, "node_modules/@wavs/solidity": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/@wavs/solidity/-/solidity-0.4.0.tgz", - "integrity": "sha512-IZMxK/hShGORmQE0Eb5gxsMRLEQo16L2cMDvb0IwB3YvNIM02CsOyB4TDFSSqE3KpyR5ZT7vUo1iuVqlkaHoCQ==", - "license": "MIT" + "version": "0.5.0-beta.10", + "resolved": "https://registry.npmjs.org/@wavs/solidity/-/solidity-0.5.0-beta.10.tgz", + "integrity": "sha512-59TCS2waA/mw/W6EHMY5DxyJ6x5epmBQeoAa6ce9of/SdOijTSk8c9zYpwq44I4Lgzsw6bnNp8YO80/Q3igUVw==", + "license": "MIT", + "dependencies": { + "solhint": "^6.0.0" + } }, "node_modules/abitype": { "version": "0.7.1", @@ -428,6 +513,15 @@ "url": "https://github.com/sponsors/epoberezkin" } }, + "node_modules/ajv-errors": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/ajv-errors/-/ajv-errors-1.0.1.tgz", + "integrity": "sha512-DCRfO/4nQ+89p/RK43i8Ezd41EqdGIU4ld7nGF8OQ14oc/we5rEntLCUa7+jrn3nn83BosfwZA0wb4pon2o8iQ==", + "license": "MIT", + "peerDependencies": { + "ajv": ">=5.0.0" + } + }, "node_modules/ansi-escapes": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-7.0.0.tgz", @@ -534,6 +628,56 @@ "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" }, + "node_modules/better-ajv-errors": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/better-ajv-errors/-/better-ajv-errors-2.0.2.tgz", + "integrity": "sha512-1cLrJXEq46n0hjV8dDYwg9LKYjDb3KbeW7nZTv4kvfoDD9c2DXHIE31nxM+Y/cIfXMggLUfmxbm6h/JoM/yotA==", + "license": "Apache-2.0", + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@humanwhocodes/momoa": "^2.0.4", + "chalk": "^4.1.2", + "jsonpointer": "^5.0.1", + "leven": "^3.1.0 < 4" + }, + "engines": { + "node": ">= 18.20.6" + }, + "peerDependencies": { + "ajv": "4.11.8 - 8" + } + }, + "node_modules/better-ajv-errors/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/better-ajv-errors/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, "node_modules/brace-expansion": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", @@ -553,6 +697,45 @@ "node": ">=8" } }, + "node_modules/cacheable-lookup": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/cacheable-lookup/-/cacheable-lookup-7.0.0.tgz", + "integrity": "sha512-+qJyx4xiKra8mZrcwhjMRMUhD5NR1R8esPkzIYxX96JiecFoxAXFuz/GpR3+ev4PE1WamHip78wV0vcmPQtp8w==", + "license": "MIT", + "engines": { + "node": ">=14.16" + } + }, + "node_modules/cacheable-request": { + "version": "10.2.14", + "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-10.2.14.tgz", + "integrity": "sha512-zkDT5WAF4hSSoUgyfg5tFIxz8XQK+25W/TLVojJTMKBaxevLBBtLxgqguAuVQB8PVW79FVjHcU+GJ9tVbDZ9mQ==", + "license": "MIT", + "dependencies": { + "@types/http-cache-semantics": "^4.0.2", + "get-stream": "^6.0.1", + "http-cache-semantics": "^4.1.1", + "keyv": "^4.5.3", + "mimic-response": "^4.0.0", + "normalize-url": "^8.0.0", + "responselike": "^3.0.0" + }, + "engines": { + "node": ">=14.16" + } + }, + "node_modules/cacheable-request/node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/call-bind": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.8.tgz", @@ -787,6 +970,22 @@ "dot-prop": "^5.1.0" } }, + "node_modules/config-chain": { + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/config-chain/-/config-chain-1.1.13.tgz", + "integrity": "sha512-qj+f8APARXHrM0hraqXYb2/bOVSV4PvJQlNZ/DVj0QrmNM2q2euizkeuVckQ57J+W0mRH6Hvi+k50M4Jul2VRQ==", + "license": "MIT", + "dependencies": { + "ini": "^1.3.4", + "proto-list": "~1.2.1" + } + }, + "node_modules/config-chain/node_modules/ini": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", + "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==", + "license": "ISC" + }, "node_modules/conventional-changelog-angular": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/conventional-changelog-angular/-/conventional-changelog-angular-7.0.0.tgz", @@ -912,6 +1111,51 @@ "resolved": "https://registry.npmjs.org/decimal.js/-/decimal.js-10.5.0.tgz", "integrity": "sha512-8vDa8Qxvr/+d94hSh5P3IJwI5t8/c0KsMp+g8bNw9cY2icONa5aPfvKeieW1WlG0WQYwwhJ7mjui2xtiePQSXw==" }, + "node_modules/decompress-response": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", + "integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==", + "license": "MIT", + "dependencies": { + "mimic-response": "^3.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/decompress-response/node_modules/mimic-response": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", + "integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==", + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/deep-extend": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", + "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==", + "license": "MIT", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/defer-to-connect": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-2.0.1.tgz", + "integrity": "sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg==", + "license": "MIT", + "engines": { + "node": ">=10" + } + }, "node_modules/define-data-property": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", @@ -1273,6 +1517,15 @@ "node": ">= 6" } }, + "node_modules/form-data-encoder": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/form-data-encoder/-/form-data-encoder-2.1.4.tgz", + "integrity": "sha512-yDYSgNMraqvnxiEXO4hi88+YZxaHC6QKzb5N84iRCTDeRO7ZALpir/lVmf/uXUhnwUr2O4HU8s/n6x+yNjQkHw==", + "license": "MIT", + "engines": { + "node": ">= 14.17" + } + }, "node_modules/fs-extra": { "version": "11.3.0", "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.0.tgz", @@ -1516,6 +1769,43 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/got": { + "version": "12.6.1", + "resolved": "https://registry.npmjs.org/got/-/got-12.6.1.tgz", + "integrity": "sha512-mThBblvlAF1d4O5oqyvN+ZxLAYwIJK7bpMxgYqPD9okW0C3qm5FFn7k811QrcuEBwaogR3ngOFoCfs6mRv7teQ==", + "license": "MIT", + "dependencies": { + "@sindresorhus/is": "^5.2.0", + "@szmarczak/http-timer": "^5.0.1", + "cacheable-lookup": "^7.0.0", + "cacheable-request": "^10.2.8", + "decompress-response": "^6.0.0", + "form-data-encoder": "^2.1.2", + "get-stream": "^6.0.1", + "http2-wrapper": "^2.1.10", + "lowercase-keys": "^3.0.0", + "p-cancelable": "^3.0.0", + "responselike": "^3.0.0" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sindresorhus/got?sponsor=1" + } + }, + "node_modules/got/node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/graceful-fs": { "version": "4.2.11", "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", @@ -1587,6 +1877,25 @@ "node": ">=0.10.0" } }, + "node_modules/http-cache-semantics": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.2.0.tgz", + "integrity": "sha512-dTxcvPXqPvXBQpq5dUr6mEMJX4oIEFv6bwom3FDwKRDsuIjjJGANqhBuoAn9c1RQJIdAKav33ED65E2ys+87QQ==", + "license": "BSD-2-Clause" + }, + "node_modules/http2-wrapper": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/http2-wrapper/-/http2-wrapper-2.2.1.tgz", + "integrity": "sha512-V5nVw1PAOgfI3Lmeaj2Exmeg7fenjhRUgz1lPSezy1CuhPYbgQtbQj4jZfEAEMlaL+vupsvhjqCyjzob0yxsmQ==", + "license": "MIT", + "dependencies": { + "quick-lru": "^5.1.1", + "resolve-alpn": "^1.2.0" + }, + "engines": { + "node": ">=10.19.0" + } + }, "node_modules/human-signals": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-5.0.0.tgz", @@ -1866,6 +2175,12 @@ "node": ">=0.10.0" } }, + "node_modules/json-buffer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "license": "MIT" + }, "node_modules/json-parse-even-better-errors": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", @@ -1895,6 +2210,15 @@ "node >= 0.2.0" ] }, + "node_modules/jsonpointer": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/jsonpointer/-/jsonpointer-5.0.1.tgz", + "integrity": "sha512-p/nXbhSEcu3pZRdkW1OfJhpsVtW1gd4Wa1fnQc9YLiTfAjn0312eMKimbdIQzuZl9aa9xUGaRlP9T/CJE/ditQ==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/JSONStream": { "version": "1.3.5", "resolved": "https://registry.npmjs.org/JSONStream/-/JSONStream-1.3.5.tgz", @@ -1910,6 +2234,39 @@ "node": "*" } }, + "node_modules/keyv": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "license": "MIT", + "dependencies": { + "json-buffer": "3.0.1" + } + }, + "node_modules/latest-version": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/latest-version/-/latest-version-7.0.0.tgz", + "integrity": "sha512-KvNT4XqAMzdcL6ka6Tl3i2lYeFDgXNCuIX+xNx6ZMVR1dFq+idXd9FLKNMOIx0t9mJ9/HudyX4oZWXZQ0UJHeg==", + "license": "MIT", + "dependencies": { + "package-json": "^8.1.0" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/leven": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", + "integrity": "sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/lilconfig": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.3.tgz", @@ -2084,6 +2441,18 @@ "url": "https://github.com/chalk/slice-ansi?sponsor=1" } }, + "node_modules/lowercase-keys": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-3.0.0.tgz", + "integrity": "sha512-ozCC6gdQ+glXOQsveKD0YsDy8DSQFjDTz4zyzEHNV5+JP5D62LmfDZ6o1cycFx9ouG940M5dE8C8CTewdj2YWQ==", + "license": "MIT", + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/math-intrinsics": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", @@ -2177,6 +2546,18 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/mimic-response": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-4.0.0.tgz", + "integrity": "sha512-e5ISH9xMYU0DzrT+jl8q2ze9D6eWBto+I8CNpe+VI+K2J/F/k3PdkdTdz4wvGVH4NTpo+NRYTVIuMQEMMcsLqg==", + "license": "MIT", + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/minimatch": { "version": "5.1.6", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", @@ -2201,6 +2582,18 @@ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" }, + "node_modules/normalize-url": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-8.1.0.tgz", + "integrity": "sha512-X06Mfd/5aKsRHc0O0J5CUedwnPmnDtLF2+nq+KN9KSDlJHkPuh0JUviWjEWMe0SW/9TDdSLVPuk7L5gGTIA1/w==", + "license": "MIT", + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/npm-run-path": { "version": "5.3.0", "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-5.3.0.tgz", @@ -2256,6 +2649,15 @@ "node": ">=0.10.0" } }, + "node_modules/p-cancelable": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-3.0.0.tgz", + "integrity": "sha512-mlVgR3PGuzlo0MmTdk4cXqXWlwQDLnONTAg6sm62XkMJEiRxN3GL3SffkYvqwonbkJBcrI7Uvv5Zh9yjvn2iUw==", + "license": "MIT", + "engines": { + "node": ">=12.20" + } + }, "node_modules/p-limit": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-4.0.0.tgz", @@ -2284,6 +2686,24 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/package-json": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/package-json/-/package-json-8.1.1.tgz", + "integrity": "sha512-cbH9IAIJHNj9uXi196JVsRlt7cHKak6u/e6AkL/bkRelZ7rlL3X1YKxsZwa36xipOEKAsdtmaG6aAJoM1fx2zA==", + "license": "MIT", + "dependencies": { + "got": "^12.1.0", + "registry-auth-token": "^5.0.1", + "registry-url": "^6.0.0", + "semver": "^7.3.7" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/parent-module": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", @@ -2402,6 +2822,12 @@ "url": "https://github.com/prettier/prettier?sponsor=1" } }, + "node_modules/proto-list": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/proto-list/-/proto-list-1.2.4.tgz", + "integrity": "sha512-vtK/94akxsTMhe0/cbfpR+syPuszcuwhqVjJq26CuNDgFGj682oRBXOP5MJpv2r7JtE8MsiepGIqvvOTBwn2vA==", + "license": "ISC" + }, "node_modules/proxy-from-env": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", @@ -2434,6 +2860,66 @@ } ] }, + "node_modules/quick-lru": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-5.1.1.tgz", + "integrity": "sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==", + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/rc": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz", + "integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==", + "license": "(BSD-2-Clause OR MIT OR Apache-2.0)", + "dependencies": { + "deep-extend": "^0.6.0", + "ini": "~1.3.0", + "minimist": "^1.2.0", + "strip-json-comments": "~2.0.1" + }, + "bin": { + "rc": "cli.js" + } + }, + "node_modules/rc/node_modules/ini": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", + "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==", + "license": "ISC" + }, + "node_modules/registry-auth-token": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/registry-auth-token/-/registry-auth-token-5.1.0.tgz", + "integrity": "sha512-GdekYuwLXLxMuFTwAPg5UKGLW/UXzQrZvH/Zj791BQif5T05T0RsaLfHc9q3ZOKi7n+BoprPD9mJ0O0k4xzUlw==", + "license": "MIT", + "dependencies": { + "@pnpm/npm-conf": "^2.1.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/registry-url": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/registry-url/-/registry-url-6.0.1.tgz", + "integrity": "sha512-+crtS5QjFRqFCoQmvGduwYWEBng99ZvmFvF+cUJkGYF1L1BfU8C6Zp9T7f5vPAwyLkUExpvK+ANVZmGU49qi4Q==", + "license": "MIT", + "dependencies": { + "rc": "1.2.8" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/require-directory": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", @@ -2450,6 +2936,12 @@ "node": ">=0.10.0" } }, + "node_modules/resolve-alpn": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/resolve-alpn/-/resolve-alpn-1.2.1.tgz", + "integrity": "sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g==", + "license": "MIT" + }, "node_modules/resolve-dir": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/resolve-dir/-/resolve-dir-1.0.1.tgz", @@ -2470,6 +2962,21 @@ "node": ">=8" } }, + "node_modules/responselike": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/responselike/-/responselike-3.0.0.tgz", + "integrity": "sha512-40yHxbNcl2+rzXvZuVkrYohathsSJlMTXKryG5y8uciHv1+xDLHQpgjG64JUO9nrEq2jGLH6IZ8BcZyw3wrweg==", + "license": "MIT", + "dependencies": { + "lowercase-keys": "^3.0.0" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/restore-cursor": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-5.1.0.tgz", @@ -2698,6 +3205,39 @@ "semver": "bin/semver" } }, + "node_modules/solhint": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/solhint/-/solhint-6.0.1.tgz", + "integrity": "sha512-Lew5nhmkXqHPybzBzkMzvvWkpOJSSLTkfTZwRriWvfR2naS4YW2PsjVGaoX9tZFmHh7SuS+e2GEGo5FPYYmJ8g==", + "license": "MIT", + "dependencies": { + "@solidity-parser/parser": "^0.20.2", + "ajv": "^6.12.6", + "ajv-errors": "^1.0.1", + "antlr4": "^4.13.1-patch-1", + "ast-parents": "^0.0.1", + "better-ajv-errors": "^2.0.2", + "chalk": "^4.1.2", + "commander": "^10.0.0", + "cosmiconfig": "^8.0.0", + "fast-diff": "^1.2.0", + "glob": "^8.0.3", + "ignore": "^5.2.4", + "js-yaml": "^4.1.0", + "latest-version": "^7.0.0", + "lodash": "^4.17.21", + "pluralize": "^8.0.0", + "semver": "^7.5.2", + "table": "^6.8.1", + "text-table": "^0.2.0" + }, + "bin": { + "solhint": "solhint.js" + }, + "optionalDependencies": { + "prettier": "^2.8.3" + } + }, "node_modules/solhint-community": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/solhint-community/-/solhint-community-4.0.0.tgz", @@ -2837,6 +3377,100 @@ "node": ">=8" } }, + "node_modules/solhint/node_modules/@solidity-parser/parser": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@solidity-parser/parser/-/parser-0.20.2.tgz", + "integrity": "sha512-rbu0bzwNvMcwAjH86hiEAcOeRI2EeK8zCkHDrFykh/Al8mvJeFmjy3UrE7GYQjNwOgbGUUtCn5/k8CB8zIu7QA==", + "license": "MIT" + }, + "node_modules/solhint/node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/solhint/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/solhint/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/solhint/node_modules/commander": { + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-10.0.1.tgz", + "integrity": "sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==", + "license": "MIT", + "engines": { + "node": ">=14" + } + }, + "node_modules/solhint/node_modules/cosmiconfig": { + "version": "8.3.6", + "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-8.3.6.tgz", + "integrity": "sha512-kcZ6+W5QzcJ3P1Mt+83OUv/oHFqZHIx8DuxG6eZ5RGMERoLqp4BuGjhHLYGK+Kf5XVkQvqBSmAy/nGWN3qDgEA==", + "license": "MIT", + "dependencies": { + "import-fresh": "^3.3.0", + "js-yaml": "^4.1.0", + "parse-json": "^5.2.0", + "path-type": "^4.0.0" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/d-fischer" + }, + "peerDependencies": { + "typescript": ">=4.9.5" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/solhint/node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "license": "MIT" + }, "node_modules/sort-object-keys": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/sort-object-keys/-/sort-object-keys-1.1.3.tgz", @@ -2922,6 +3556,15 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/strip-json-comments": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", + "integrity": "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", diff --git a/script/Common.s.sol b/script/Common.s.sol index f861db50..66824090 100644 --- a/script/Common.s.sol +++ b/script/Common.s.sol @@ -1,5 +1,5 @@ // SPDX-License-Identifier: MIT -pragma solidity 0.8.22; +pragma solidity 0.8.27; import {Script} from "forge-std/Script.sol"; diff --git a/script/avs-signing-key.sh b/script/avs-signing-key.sh deleted file mode 100644 index 35b9afd7..00000000 --- a/script/avs-signing-key.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/bin/bash - -export DEFAULT_ENV_FILE=${DEFAULT_ENV_FILE:-"infra/wavs-1/.env"} - -SERVICE_INDEX=${SERVICE_INDEX:-0} - -SERVICE_ID=`curl -s http://localhost:8000/app | jq -r ".services[${SERVICE_INDEX}].id"` -if [ -z "$SERVICE_ID" ] || [ "$SERVICE_ID" == "null" ]; then - echo "Error: SERVICE_ID is null or not found for index ${SERVICE_INDEX}." - return -fi - -HD_INDEX=`curl -s http://localhost:8000/service-key/${SERVICE_ID} | jq -rc '.secp256k1.hd_index'` - -source ${DEFAULT_ENV_FILE} -export OPERATOR_PRIVATE_KEY=`cast wallet private-key --mnemonic "$WAVS_SUBMISSION_MNEMONIC" --mnemonic-index 0` -export AVS_SIGNING_ADDRESS=`cast wallet address --mnemonic-path "$WAVS_SUBMISSION_MNEMONIC" --mnemonic-index ${HD_INDEX}` - -echo "HD_INDEX=${HD_INDEX}" -echo "SERVICE_ID=${SERVICE_ID}" -echo "OPERATOR_PRIVATE_KEY=*HIDDEN*" -echo "AVS_SIGNING_ADDRESS=${AVS_SIGNING_ADDRESS}" diff --git a/script/build-service.sh b/script/build-service.sh index f000d93b..d5002056 100644 --- a/script/build-service.sh +++ b/script/build-service.sh @@ -9,11 +9,10 @@ sh ./build_service.sh # Overrides: - FILE_LOCATION: The save location of the configuration file -- TRIGGER_ADDRESS: The address to trigger the service -- SUBMIT_ADDRESS: The address to submit the service - TRIGGER_EVENT: The event to trigger the service (e.g. "NewTrigger(bytes)") - FUEL_LIMIT: The fuel limit (wasm compute metering) for the service - MAX_GAS: The maximum chain gas for the submission Tx +- AGGREGATOR_URL: The URL of the aggregator service ''' # == Defaults == @@ -21,61 +20,264 @@ sh ./build_service.sh FUEL_LIMIT=${FUEL_LIMIT:-1000000000000} MAX_GAS=${MAX_GAS:-5000000} FILE_LOCATION=${FILE_LOCATION:-".docker/service.json"} -TRIGGER_EVENT=${TRIGGER_EVENT:-"NewTrigger(bytes)"} -TRIGGER_CHAIN=${TRIGGER_CHAIN:-"local"} -SUBMIT_CHAIN=${SUBMIT_CHAIN:-"local"} +TRIGGER_CHAIN=${TRIGGER_CHAIN:-"evm:31337"} +SUBMIT_CHAIN=${SUBMIT_CHAIN:-"evm:31337"} AGGREGATOR_URL=${AGGREGATOR_URL:-""} DEPLOY_ENV=${DEPLOY_ENV:-""} REGISTRY=${REGISTRY:-"wa.dev"} +WAVS_SERVICE_MANAGER_ADDRESS=${WAVS_SERVICE_MANAGER_ADDRESS:-`task config:service-manager-address`} -BASE_CMD="docker run --rm --network host -w /data -v $(pwd):/data ghcr.io/lay3rlabs/wavs:35c96a4 wavs-cli service --json true --home /data --file /data/${FILE_LOCATION}" +# Function to substitute variables in config values +substitute_config_vars() { + local config_str="$1" + + # Replace all ${VAR_NAME} patterns with their environment variable values + while [[ "$config_str" =~ \$\{([^}]+)\} ]]; do + var_name="${BASH_REMATCH[1]}" + var_value="${!var_name}" + if [ -z "$var_value" ]; then + echo "⚠️ Warning: Variable ${var_name} is not set, using empty string" >&2 + var_value="" + fi + config_str="${config_str//\$\{${var_name}\}/${var_value}}" + done + + echo "$config_str" +} + +# Function to build config arguments from JSON object +build_config_args() { + local config_json="$1" + local args="" + + if [ -n "$config_json" ] && [ "$config_json" != "null" ] && [ "$config_json" != "{}" ]; then + # Process each key-value pair + echo "$config_json" | jq -c 'to_entries[]' | while IFS= read -r line; do + key=$(echo "$line" | jq -r '.key') + value=$(echo "$line" | jq -r '.value') + # Substitute variables in the value + value=$(substitute_config_vars "$value") + args="${args} --values \"${key}=${value}\"" + done + fi + + echo "$args" +} + +# Function to build environment variable arguments +build_env_args() { + local env_json="$1" + local args="" + + if [ -n "$env_json" ] && [ "$env_json" != "null" ] && [ "$env_json" != "[]" ]; then + # Process each environment variable + echo "$env_json" | jq -c '.[]' | while IFS= read -r env_var; do + env_var=$(echo "$env_var" | jq -r '.') + args="${args} --values \"${env_var}\"" + done + fi + + echo "$args" +} + +BASE_CMD="docker run --rm --network host -w /data -v $(pwd):/data ghcr.io/lay3rlabs/wavs:1.4.1 wavs-cli service --json true --home /data --file /data/${FILE_LOCATION}" if [ -z "$WAVS_SERVICE_MANAGER_ADDRESS" ]; then - export WAVS_SERVICE_MANAGER_ADDRESS=$(jq -r .addresses.WavsServiceManager ./.nodes/avs_deploy.json) + export WAVS_SERVICE_MANAGER_ADDRESS=$(jq -r '.contract' .docker/poa_sm_deploy.json) if [ -z "$WAVS_SERVICE_MANAGER_ADDRESS" ]; then echo "WAVS_SERVICE_MANAGER_ADDRESS is not set. Please set it to the address of the service manager." return fi fi - -if [ -z "$TRIGGER_ADDRESS" ]; then - TRIGGER_ADDRESS=`make get-trigger-from-deploy` -fi -if [ -z "$SUBMIT_ADDRESS" ]; then - SUBMIT_ADDRESS=`make get-submit-from-deploy` -fi if [ -z "$DEPLOY_ENV" ]; then - DEPLOY_ENV=$(sh ./script/get-deploy-status.sh) + DEPLOY_ENV=$(task get-deploy-status) fi + # === Core === -TRIGGER_EVENT_HASH=`cast keccak ${TRIGGER_EVENT}` +# Get PKG_NAMESPACE +if [ -z "$PKG_NAMESPACE" ]; then + export PKG_NAMESPACE=`task get-wasi-namespace` + if [ -z "$PKG_NAMESPACE" ]; then + echo "PKG_NAMESPACE is not set. Please set the PKG_NAMESPACE environment variable." + exit 1 + fi +fi -export SERVICE_ID=`eval "${BASE_CMD} init --name demo" | jq -r .service.id` -echo "Service ID: ${SERVICE_ID}" +eval "${BASE_CMD} init --name en0va" -WORKFLOW_ID=`eval "$BASE_CMD workflow add" | jq -r .workflow_id` -echo "Workflow ID: ${WORKFLOW_ID}" +# Process component configurations from JSON file +if [ -z "${COMPONENT_CONFIGS_FILE}" ] || [ ! -f "${COMPONENT_CONFIGS_FILE}" ]; then + # Try default location + COMPONENT_CONFIGS_FILE="config/components.json" + if [ ! -f "${COMPONENT_CONFIGS_FILE}" ]; then + # Try .docker location + COMPONENT_CONFIGS_FILE=".docker/components-config.json" + if [ ! -f "${COMPONENT_CONFIGS_FILE}" ]; then + echo "❌ Component configuration file not found" + echo "Please specify COMPONENT_CONFIGS_FILE or ensure config/components.json or .docker/components-config.json exists" + exit 1 + fi + fi +fi -eval "$BASE_CMD workflow trigger --id ${WORKFLOW_ID} set-evm --address ${TRIGGER_ADDRESS} --chain-name ${TRIGGER_CHAIN} --event-hash ${TRIGGER_EVENT_HASH}" > /dev/null +echo "Reading component configurations from: ${COMPONENT_CONFIGS_FILE}" -# If no aggregator is set, use the default -SUB_CMD="set-evm" -if [ -n "$AGGREGATOR_URL" ]; then - SUB_CMD="set-aggregator --url ${AGGREGATOR_URL}" -fi -eval "$BASE_CMD workflow submit --id ${WORKFLOW_ID} ${SUB_CMD} --address ${SUBMIT_ADDRESS} --chain-name ${SUBMIT_CHAIN} --max-gas ${MAX_GAS}" > /dev/null +# Function to get aggregator component configuration +get_aggregator_config() { + local config_file="$1" + local config_json="{}" + + if [ -f "$config_file" ]; then + config_json=$(jq '.aggregator_components[0] // {}' "$config_file") + fi + + echo "$config_json" +} + +# Get aggregator component configuration +AGGREGATOR_COMPONENT=$(get_aggregator_config "${COMPONENT_CONFIGS_FILE}") +AGG_PKG_NAME=$(echo "$AGGREGATOR_COMPONENT" | jq -r '.package_name // "aggregator"') +AGG_PKG_VERSION=$(echo "$AGGREGATOR_COMPONENT" | jq -r '.package_version // "0.1.0"') +AGG_CONFIG_VALUES=$(echo "$AGGREGATOR_COMPONENT" | jq '.config_values // {}') +AGG_ENV_VARIABLES=$(echo "$AGGREGATOR_COMPONENT" | jq '.env_variables // []') + +# Export all required variables that might be used in config value substitutions +# These should be set by deploy-script.sh before calling this script +# echo "📋 Available configuration variables:" +# [ -n "${CHAIN_NAME}" ] && echo " CHAIN_NAME: ${CHAIN_NAME}" +echo "" + +jq -c '.components[]' "${COMPONENT_CONFIGS_FILE}" | while IFS= read -r component; do + COMP_DISABLED=$(echo "$component" | jq -r '.disabled // false') + if [ "$COMP_DISABLED" = "true" ]; then + continue + fi + + COMP_FILENAME=$(echo "$component" | jq -r '.filename') + COMP_PKG_NAME=$(echo "$component" | jq -r '.package_name') + COMP_PKG_VERSION=$(echo "$component" | jq -r '.package_version') + COMP_SUBMIT_JSON_PATH=$(echo "$component" | jq -r '.submit_json_path') + COMP_TRIGGER_BLOCK_INTERVAL=$(echo "$component" | jq -r '.trigger_block_interval // ""') + COMP_TRIGGER_CRON_SCHEDULE=$(echo "$component" | jq -r '.trigger_cron.schedule // ""') + COMP_TRIGGER_CRON_START_TIME=$(echo "$component" | jq -r '.trigger_cron.start_time // ""') + COMP_TRIGGER_CRON_END_TIME=$(echo "$component" | jq -r '.trigger_cron.end_time // ""') + + # Extract component-specific config values and env variables + COMP_CONFIG_VALUES=$(echo "$component" | jq '.config_values // {}') + COMP_ENV_VARIABLES=$(echo "$component" | jq '.env_variables // []') + + echo "Creating workflow for component: ${COMP_FILENAME}" + WORKFLOW_ID=`eval "$BASE_CMD workflow add" | jq -r .workflow_id` + + echo " Workflow ID: ${WORKFLOW_ID}" + echo " Package: ${PKG_NAMESPACE}:${COMP_PKG_NAME}@${COMP_PKG_VERSION}" + echo " Submit: ${COMP_SUBMIT_ADDRESS}" + + if [ -n "$COMP_TRIGGER_BLOCK_INTERVAL" ]; then + eval "$BASE_CMD workflow trigger --id ${WORKFLOW_ID} set-block-interval --chain ${TRIGGER_CHAIN} --n-blocks ${COMP_TRIGGER_BLOCK_INTERVAL}" > /dev/null + + echo " Trigger block interval: ${COMP_TRIGGER_BLOCK_INTERVAL}" + elif [ -n "$COMP_TRIGGER_CRON_SCHEDULE" ]; then + # Build cron command arguments + CRON_CMD_ARGS="--schedule '${COMP_TRIGGER_CRON_SCHEDULE}'" + + if [ "$COMP_TRIGGER_CRON_START_TIME" != "null" ] && [ -n "$COMP_TRIGGER_CRON_START_TIME" ]; then + CRON_CMD_ARGS="${CRON_CMD_ARGS} --start-time ${COMP_TRIGGER_CRON_START_TIME}" + fi + + if [ "$COMP_TRIGGER_CRON_END_TIME" != "null" ] && [ -n "$COMP_TRIGGER_CRON_END_TIME" ]; then + CRON_CMD_ARGS="${CRON_CMD_ARGS} --end-time ${COMP_TRIGGER_CRON_END_TIME}" + fi + + eval "$BASE_CMD workflow trigger --id ${WORKFLOW_ID} set-cron ${CRON_CMD_ARGS}" > /dev/null + + echo " Trigger cron: ${COMP_TRIGGER_CRON_SCHEDULE}" + if [ "$COMP_TRIGGER_CRON_START_TIME" != "null" ] && [ -n "$COMP_TRIGGER_CRON_START_TIME" ]; then + echo " Start time: ${COMP_TRIGGER_CRON_START_TIME}" + fi + if [ "$COMP_TRIGGER_CRON_END_TIME" != "null" ] && [ -n "$COMP_TRIGGER_CRON_END_TIME" ]; then + echo " End time: ${COMP_TRIGGER_CRON_END_TIME}" + fi + else + COMP_TRIGGER_EVENT=$(echo "$component" | jq -r '.trigger_event') + COMP_TRIGGER_JSON_PATH=$(echo "$component" | jq -r '.trigger_json_path') + + # Extract addresses from JSON paths + COMP_TRIGGER_ADDRESS=`jq -r ".${COMP_TRIGGER_JSON_PATH}" .docker/deployment_summary.json` + COMP_SUBMIT_ADDRESS=`jq -r ".${COMP_SUBMIT_JSON_PATH}" .docker/deployment_summary.json` + + # Validate addresses + if [ -z "$COMP_TRIGGER_ADDRESS" ] || [ "$COMP_TRIGGER_ADDRESS" = "null" ]; then + echo "❌ Trigger address not found for component: ${COMP_FILENAME} at path: ${COMP_TRIGGER_JSON_PATH}" + exit 1 + fi + if [ -z "$COMP_SUBMIT_ADDRESS" ] || [ "$COMP_SUBMIT_ADDRESS" = "null" ]; then + echo "❌ Submit address not found for component: ${COMP_FILENAME} at path: ${COMP_SUBMIT_JSON_PATH}" + exit 1 + fi + + COMP_TRIGGER_EVENT_HASH=`cast keccak ${COMP_TRIGGER_EVENT}` + + echo " Trigger: ${COMP_TRIGGER_ADDRESS} (${COMP_TRIGGER_EVENT})" + + eval "$BASE_CMD workflow trigger --id ${WORKFLOW_ID} set-evm --address ${COMP_TRIGGER_ADDRESS} --chain ${TRIGGER_CHAIN} --event-hash ${COMP_TRIGGER_EVENT_HASH}" > /dev/null + fi + + # Set submit to use aggregator component + if [ -n "$AGGREGATOR_URL" ]; then + eval "$BASE_CMD workflow submit --id ${WORKFLOW_ID} set-aggregator --url ${AGGREGATOR_URL}" > /dev/null + + # Configure aggregator component for this workflow + echo " 📋 Configuring aggregator component" + eval "$BASE_CMD workflow submit --id ${WORKFLOW_ID} component set-source-registry --domain ${REGISTRY} --package ${PKG_NAMESPACE}:${AGG_PKG_NAME} --version ${AGG_PKG_VERSION}" > /dev/null + eval "$BASE_CMD workflow submit --id ${WORKFLOW_ID} component permissions --http-hosts '*' --file-system true" > /dev/null + + # Set aggregator component environment variables + AGG_ENV_ARGS=$(build_env_args "$AGG_ENV_VARIABLES") + if [ -n "$AGG_ENV_ARGS" ]; then + eval "$BASE_CMD workflow submit --id ${WORKFLOW_ID} component env ${AGG_ENV_ARGS}" > /dev/null + fi + + # Set aggregator component configuration routing + # (( --values is found in ${AGG_CONFIG_ARGS} already )) + AGG_CONFIG_ARGS=$(build_config_args "$AGG_CONFIG_VALUES") + if [ -n "$AGG_CONFIG_ARGS" ]; then + echo " 📋 Configuring aggregator (vars: ${AGG_CONFIG_ARGS})" + fi + eval "$BASE_CMD workflow submit --id ${WORKFLOW_ID} component config --values \"${SUBMIT_CHAIN}=${COMP_SUBMIT_ADDRESS}\" ${AGG_CONFIG_ARGS}" > /dev/null + else + eval "$BASE_CMD workflow submit --id ${WORKFLOW_ID} set-none" > /dev/null + fi + eval "$BASE_CMD workflow component --id ${WORKFLOW_ID} set-source-registry --domain ${REGISTRY} --package ${PKG_NAMESPACE}:${COMP_PKG_NAME} --version ${COMP_PKG_VERSION}" + + eval "$BASE_CMD workflow component --id ${WORKFLOW_ID} permissions --http-hosts '*' --file-system true" > /dev/null + eval "$BASE_CMD workflow component --id ${WORKFLOW_ID} time-limit --seconds 30" > /dev/null + + # Set component-specific environment variables + ENV_ARGS=$(build_env_args "$COMP_ENV_VARIABLES") + if [ -n "$ENV_ARGS" ]; then + echo " 📋 Setting environment variables" + eval "$BASE_CMD workflow component --id ${WORKFLOW_ID} env ${ENV_ARGS}" > /dev/null + fi + + # Set component-specific config values + CONFIG_ARGS=$(build_config_args "$COMP_CONFIG_VALUES") + if [ -n "$CONFIG_ARGS" ]; then + echo " 📋 Configuring component" + eval "$BASE_CMD workflow component --id ${WORKFLOW_ID} config ${CONFIG_ARGS}" > /dev/null + # else + # echo " ⚠️ No configuration values specified for ${COMP_FILENAME}" + fi -eval "$BASE_CMD workflow component --id ${WORKFLOW_ID} set-source-registry --domain ${REGISTRY} --package ${PKG_NAMESPACE}:${PKG_NAME} --version ${PKG_VERSION}" + eval "$BASE_CMD workflow component --id ${WORKFLOW_ID} fuel-limit --fuel ${FUEL_LIMIT}" > /dev/null -eval "$BASE_CMD workflow component --id ${WORKFLOW_ID} permissions --http-hosts '*' --file-system true" > /dev/null -eval "$BASE_CMD workflow component --id ${WORKFLOW_ID} time-limit --seconds 30" > /dev/null -eval "$BASE_CMD workflow component --id ${WORKFLOW_ID} env --values WAVS_ENV_SOME_SECRET" > /dev/null -eval "$BASE_CMD workflow component --id ${WORKFLOW_ID} config --values 'key=value,key2=value2'" > /dev/null -eval "$BASE_CMD workflow component --id ${WORKFLOW_ID} fuel-limit --fuel ${FUEL_LIMIT}" > /dev/null + echo " ✅ Workflow configured for ${COMP_FILENAME}" + echo "" +done -eval "$BASE_CMD manager set-evm --chain-name ${SUBMIT_CHAIN} --address `cast --to-checksum ${WAVS_SERVICE_MANAGER_ADDRESS}`" > /dev/null +eval "$BASE_CMD manager set-evm --chain ${SUBMIT_CHAIN} --address `cast --to-checksum ${WAVS_SERVICE_MANAGER_ADDRESS}`" > /dev/null eval "$BASE_CMD validate" > /dev/null echo "Configuration file created ${FILE_LOCATION}. Watching events from '${TRIGGER_CHAIN}' & submitting to '${SUBMIT_CHAIN}'." diff --git a/script/build_components.sh b/script/build_components.sh deleted file mode 100755 index a3f69105..00000000 --- a/script/build_components.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/bash -# -# Called from the Makefile to build all (or some) components -# -# ./script/build_components.sh [WASI_BUILD_DIR] -# -# WASI_BUILD_DIR: the directory to build the component in -# e.g. ./script/build_components.sh components/golang-evm-price-oracle -# - -# Extract arguments -WASI_BUILD_DIR="$1" - -RECIPE="wasi-build" -MAKEFILE_DIRS=`find components/* -maxdepth 1 -name "Makefile" -o -name "makefile"` - -for makefile_path in $MAKEFILE_DIRS; do - if grep -q "^${RECIPE}:" "$makefile_path" 2>/dev/null; then - if [ "$WASI_BUILD_DIR" != "" ] && [[ "$makefile_path" != *"$WASI_BUILD_DIR"* ]]; then - continue - fi; - parent_dir=$(dirname "$makefile_path") - make -s -C "$parent_dir" $RECIPE - else - echo "Recipe '$RECIPE' not found in $dir" - fi; -done diff --git a/script/create-aggregator.sh b/script/create-aggregator.sh index 073e36e5..91dad30c 100644 --- a/script/create-aggregator.sh +++ b/script/create-aggregator.sh @@ -12,10 +12,10 @@ if [ -z "$AGGREGATOR_INDEX" ]; then fi if [ -z "$DEPLOY_ENV" ]; then - DEPLOY_ENV=$(sh ./script/get-deploy-status.sh) + DEPLOY_ENV=$(task get-deploy-status) fi if [ -z "$RPC_URL" ]; then - RPC_URL=`sh ./script/get-rpc.sh` + RPC_URL=`task get-rpc` fi SP=""; if [[ "$(uname)" == *"Darwin"* ]]; then SP=" "; fi @@ -29,10 +29,29 @@ TEMP_FILENAME=".docker/tmp.json" cast wallet new-mnemonic --json > ${TEMP_FILENAME} export AGG_MNEMONIC=`jq -r .mnemonic ${TEMP_FILENAME}` export AGG_PK=`jq -r .accounts[0].private_key ${TEMP_FILENAME}` + +# if its not a LOCAL deploy, we will see if the user wants to override. if they do, we do. +if [ "$DEPLOY_ENV" != "LOCAL" ]; then + read -p "Enter aggregator mnemonic (leave blank to generate a new one): " INPUT_MNEMONIC + if [ ! -z "$INPUT_MNEMONIC" ]; then + export AGG_MNEMONIC="$INPUT_MNEMONIC" + else + echo "Generating new mnemonic..." + fi + + export AGG_PK=$(cast wallet private-key --mnemonic "$AGG_MNEMONIC") +fi AGGREGATOR_ADDR=`cast wallet address $AGG_PK` # == infra files == AGG_LOC=infra/aggregator-${AGGREGATOR_INDEX} + +if [ -d "${AGG_LOC}" ] && [ "$(ls -A ${AGG_LOC})" ]; then + echo -e "\nRemoving ${AGG_LOC}" + docker kill wavs-${AGG_LOC} > /dev/null 2>&1 || true + echo "Removing dir ${AGG_LOC} ((may prompt for password))" + sudo rm -rf ${AGG_LOC} +fi mkdir -p ${AGG_LOC} ENV_FILENAME="${AGG_LOC}/.env" @@ -45,14 +64,14 @@ cat > "${AGG_LOC}/start.sh" << EOF #!/bin/bash cd \$(dirname "\$0") || return -IMAGE=ghcr.io/lay3rlabs/wavs:35c96a4 +IMAGE=ghcr.io/lay3rlabs/wavs:1.4.1 INSTANCE=wavs-aggregator-${AGGREGATOR_INDEX} IPFS_GATEWAY=\${IPFS_GATEWAY:-"https://gateway.pinata.cloud/ipfs/"} docker kill \${INSTANCE} > /dev/null 2>&1 || true docker rm \${INSTANCE} > /dev/null 2>&1 || true -docker run -d --name \${INSTANCE} --network host --stop-signal SIGKILL --env-file .env --user 1000:1000 -v .:/wavs \\ +docker run -d --name \${INSTANCE} --network host --stop-signal SIGKILL --env-file .env -v .:/wavs \\ \${IMAGE} wavs-aggregator --log-level debug --host 0.0.0.0 --port 8001 --ipfs-gateway \${IPFS_GATEWAY} # give it a chance to start up diff --git a/script/create-deployer.sh b/script/create-deployer.sh index fb003b71..5e4f8738 100644 --- a/script/create-deployer.sh +++ b/script/create-deployer.sh @@ -2,12 +2,12 @@ # set -e SP=""; if [[ "$(uname)" == *"Darwin"* ]]; then SP=" "; fi -# if DEPLOY_ENV is not set, grab it from the ./script/get-deploy-status.sh +# if DEPLOY_ENV is not set, grab it from the `task get-deploy-status` if [ -z "$DEPLOY_ENV" ]; then - DEPLOY_ENV=$(sh ./script/get-deploy-status.sh) + DEPLOY_ENV=$(task get-deploy-status) fi if [ -z "$RPC_URL" ]; then - RPC_URL=`sh ./script/get-rpc.sh` + RPC_URL=`task get-rpc` fi if [ ! -f .env ]; then @@ -21,32 +21,56 @@ fi mkdir -p .docker -# Create new deployer -cast wallet new-mnemonic --json > .docker/deployer.json -export DEPLOYER_PK=`jq -r .accounts[0].private_key .docker/deployer.json` -export DEPLOYER_ADDRESS=`cast wallet address $DEPLOYER_PK` -sed -i${SP}'' -e "s/^FUNDED_KEY=.*$/FUNDED_KEY=$DEPLOYER_PK/" .env +# Create new deployer (if required) +create_funded_key() { + echo "Creating new FUNDED_KEY..." + export FUNDED_KEY=$(cast wallet new-mnemonic --json | jq -r '.accounts[0].private_key') + sed -i${SP}'' -e "s/^FUNDED_KEY=.*$/FUNDED_KEY=$FUNDED_KEY/" .env +} +# Setup deployer key based on environment +if [ "$DEPLOY_ENV" = "LOCAL" ]; then + echo "Setting up LOCAL environment deployer" + create_funded_key +else + # Check for existing key in non-local environments + export FUNDED_KEY=$(task config:funded-key) + + if [ -z "$FUNDED_KEY" ]; then + echo "No FUNDED_KEY found in .env, creating new one" + create_funded_key + else + echo "Using existing FUNDED_KEY from .env" + fi +fi +# Get deployer address +export DEPLOYER_ADDRESS=$(cast wallet address "$FUNDED_KEY") + +# Fund deployer based on environment if [ "$DEPLOY_ENV" = "LOCAL" ]; then - # Good DevEx, auto fund the deployer - cast rpc anvil_setBalance "${DEPLOYER_ADDRESS}" '15000000000000000000' --rpc-url ${RPC_URL} > /dev/null + # Auto-fund deployer in local environment + echo "Funding local deployer..." + cast rpc anvil_setBalance "$DEPLOYER_ADDRESS" '15000000000000000000' --rpc-url "$RPC_URL" > /dev/null - BAL=`cast balance --ether $DEPLOYER_ADDRESS --rpc-url=${RPC_URL}` - echo "Local deployer \`${DEPLOYER_ADDRESS}\` funded with ${BAL}ether" + BALANCE=$(cast balance --ether "$DEPLOYER_ADDRESS" --rpc-url="$RPC_URL") + echo "Local deployer $DEPLOYER_ADDRESS funded with ${BALANCE} ETH" else - # New account on testnet, must be funded externally (i.e. metamask) - echo "Fund deployer ${DEPLOYER_ADDRESS} with some ETH, or change this value in the .env" + # Wait for external funding in testnet/mainnet + echo "Please fund deployer $DEPLOYER_ADDRESS with ETH" + echo "You can change this address in the .env file if needed" sleep 5 + echo "Waiting for funding..." while true; do - BALANCE=`cast balance --ether $DEPLOYER_ADDRESS --rpc-url=${RPC_URL}` + BALANCE=$(cast balance --ether "$DEPLOYER_ADDRESS" --rpc-url="$RPC_URL") + if [ "$BALANCE" != "0.000000000000000000" ]; then - echo "Deployer balance is now $BALANCE" + echo "Deployer funded! Balance: $BALANCE ETH for $DEPLOYER_ADDRESS" break fi - echo " [!] Waiting for balance to be funded by another account to this deployer..." + + echo " [!] Waiting for balance increase $DEPLOYER_ADDRESS... (current: $BALANCE ETH)" sleep 5 done fi - diff --git a/script/create-operator.sh b/script/create-operator.sh index ae492247..7f75e3a3 100644 --- a/script/create-operator.sh +++ b/script/create-operator.sh @@ -17,17 +17,17 @@ OPERATOR_LOC=infra/wavs-${OPERATOR_INDEX} if [ -d "${OPERATOR_LOC}" ] && [ "$(ls -A ${OPERATOR_LOC})" ]; then - read -p "Directory ${OPERATOR_LOC} already exists and is not empty. Do you want to remove it? (y/n): " -n 1 -r - if [[ $REPLY =~ ^[Yy]$ ]]; then + # read -p "Directory ${OPERATOR_LOC} already exists and is not empty. Do you want to remove it? (y/n): " -n 1 -r + # if [[ $REPLY =~ ^[Yy]$ ]]; then echo -e "\nRemoving ${OPERATOR_LOC}" docker kill wavs-${OPERATOR_INDEX} > /dev/null 2>&1 || true echo "Removing dir ${OPERATOR_LOC} ((may prompt for password))" sudo rm -rf ${OPERATOR_LOC} - else - echo -e "\nExiting without changes." - return - fi + # else + # echo -e "\nExiting without changes." + # return + # fi fi mkdir -p ${OPERATOR_LOC} @@ -39,22 +39,34 @@ cp ./script/template/.env.example.operator ${ENV_FILENAME} TEMP_FILENAME=".docker/tmp.json" +# creates a new wallet no matter what cast wallet new-mnemonic --json > ${TEMP_FILENAME} export OPERATOR_MNEMONIC=`jq -r .mnemonic ${TEMP_FILENAME}` export OPERATOR_PK=`jq -r .accounts[0].private_key ${TEMP_FILENAME}` +# if its not a LOCAL deploy, we will see if the user wants to override. if they do, we do. +if [ "$(task get-deploy-status)" != "LOCAL" ]; then + read -p "Enter operator mnemonic (leave blank to generate a new one): " INPUT_MNEMONIC + if [ ! -z "$INPUT_MNEMONIC" ]; then + export OPERATOR_MNEMONIC="$INPUT_MNEMONIC" + else + echo "Generating new mnemonic..." + fi + + export OPERATOR_PK=$(cast wallet private-key --mnemonic "$OPERATOR_MNEMONIC") +fi + sed -i${SP}'' -e "s/^WAVS_SUBMISSION_MNEMONIC=.*$/WAVS_SUBMISSION_MNEMONIC=\"$OPERATOR_MNEMONIC\"/" ${ENV_FILENAME} sed -i${SP}'' -e "s/^WAVS_CLI_EVM_CREDENTIAL=.*$/WAVS_CLI_EVM_CREDENTIAL=\"$OPERATOR_PK\"/" ${ENV_FILENAME} rm ${TEMP_FILENAME} - # Create startup script cat > "${OPERATOR_LOC}/start.sh" << EOF #!/bin/bash cd \$(dirname "\$0") || return -IMAGE=ghcr.io/lay3rlabs/wavs:35c96a4 +IMAGE=ghcr.io/lay3rlabs/wavs:1.4.1 WAVS_INSTANCE=wavs-${OPERATOR_INDEX} IPFS_GATEWAY=\${IPFS_GATEWAY:-"https://gateway.pinata.cloud/ipfs/"} diff --git a/script/deploy-contracts.sh b/script/deploy-contracts.sh deleted file mode 100644 index 13e41939..00000000 --- a/script/deploy-contracts.sh +++ /dev/null @@ -1,33 +0,0 @@ -#!/bin/bash - -if [ -z "$WAVS_SERVICE_MANAGER_ADDRESS" ]; then - if [ -f .nodes/avs_deploy.json ]; then - echo "Using WAVS_SERVICE_MANAGER_ADDRESS from .nodes/avs_deploy.json" - export WAVS_SERVICE_MANAGER_ADDRESS=$(jq -r '.addresses.WavsServiceManager' .nodes/avs_deploy.json) - else - echo "WAVS_SERVICE_MANAGER_ADDRESS is not set." - return - fi -fi - -forge build -if [ $? -ne 0 ]; then - echo "Forge build failed. Running 'npm install' and deleting the 'out/' and 'cache/' directory." - npm install - rm -rf out/ cache/ - forge build -fi - -export DEPLOYER_PK=$(cat .nodes/deployer) - -forge create SimpleSubmit --json --broadcast -r ${RPC_URL} --private-key "${DEPLOYER_PK}" --constructor-args "${WAVS_SERVICE_MANAGER_ADDRESS}" > .docker/submit.json -export SERVICE_SUBMISSION_ADDR=`jq -r '.deployedTo' .docker/submit.json` - -forge create SimpleTrigger --json --broadcast -r ${RPC_URL} --private-key "${DEPLOYER_PK}" > .docker/trigger.json -export SERVICE_TRIGGER_ADDR=`jq -r '.deployedTo' .docker/trigger.json` - -echo "RPC_URL=${RPC_URL}" -echo "WAVS_SERVICE_MANAGER_ADDRESS=${WAVS_SERVICE_MANAGER_ADDRESS}" -echo "SERVICE_SUBMISSION_ADDR=${SERVICE_SUBMISSION_ADDR}" -echo "SERVICE_TRIGGER_ADDR=${SERVICE_TRIGGER_ADDR}" - diff --git a/script/deploy-script.sh b/script/deploy-script.sh index 08c334a9..fc2b6711 100644 --- a/script/deploy-script.sh +++ b/script/deploy-script.sh @@ -1,20 +1,38 @@ #!/bin/bash -# set -e +set -e +# set -x -if [ ! -d compiled/ ] || [ -z "$(find compiled/ -name '*.wasm')" ]; then - echo "No WASM files found in compiled/. Building components." - make wasi-build -fi +STATUS_FILE=".docker/component-upload-status" -if git status --porcelain | grep -q "^.* components/"; then - echo "Found pending changes in components/*, building" - WASI_BUILD_DIR=components/evm-price-oracle make wasi-build +# Store the PID of the background process +if [[ "${SKIP_COMPONENT_UPLOAD}" != "true" ]]; then + bash script/upload-components-background.sh & + UPLOAD_PID=$! fi -### === Deploy Eigenlayer === +# Function to clean up on exit +cleanup() { + echo "Cleaning up..." + # Kill the background upload process if it's still running + if [ -n "$UPLOAD_PID" ] && kill -0 $UPLOAD_PID 2>/dev/null; then + echo "Terminating background upload process (PID: $UPLOAD_PID)..." + kill -TERM $UPLOAD_PID 2>/dev/null + # Give it a moment to terminate gracefully, then force kill if needed + sleep 1 + kill -9 $UPLOAD_PID 2>/dev/null || true + fi + # Clean up the status file + rm -f "$STATUS_FILE" + echo "Cleanup complete" + exit 1 +} + +# Set up trap to handle Ctrl+C (SIGINT) and other termination signals +trap cleanup INT TERM EXIT + # if RPC_URL is not set, use default by calling command if [ -z "$RPC_URL" ]; then - export RPC_URL=$(bash ./script/get-rpc-url.sh) + export RPC_URL=$(task get-rpc) fi if [ -z "$AGGREGATOR_URL" ]; then export AGGREGATOR_URL=http://127.0.0.1:8001 @@ -22,75 +40,176 @@ fi # local: create deployer & auto fund. testnet: create & iterate check balance bash ./script/create-deployer.sh -export DEPLOYER_PK=$(cat .nodes/deployer) -sleep 1 +export FUNDED_KEY=$(task config:funded-key) -## Deploy Eigenlayer from Deployer -COMMAND=deploy make wavs-middleware -sleep 1 +if [[ "${SKIP_CONTRACT_UPLOAD}" != "true" ]]; then + echo "🟢 Deploying POA Service Manager..." + POA_MIDDLEWARE="docker run --rm --network host -v ./.nodes:/root/.nodes --env-file .env ghcr.io/lay3rlabs/poa-middleware:1.0.1" + $POA_MIDDLEWARE deploy + sleep 1 # for Base + $POA_MIDDLEWARE owner_operation updateStakeThreshold 1000 + sleep 1 # for Base + $POA_MIDDLEWARE owner_operation updateQuorum 2 3 +fi -### === Deploy Service === ### +if [ "$(task get-deploy-status)" = "LOCAL" ]; then + # required for the checkpoint stuff, ref: aurtur / https://github.com/Lay3rLabs/EN0VA/pull/31/commits/d205e9c65f91fb5b0b5bca672d8d28d6c7f672f9#diff-e3d8246ec3421fa3a204fe7a8f0586acfad4888ae82f5b8c6d130cb907705c80R75-R78 + cast rpc anvil_mine --rpc-url $(task get-rpc) +fi -# Forge deploy SimpleSubmit & SimpleTrigger -source script/deploy-contracts.sh -sleep 1 +WAVS_SERVICE_MANAGER_ADDRESS=`task config:service-manager-address` +echo "ℹ️ Using WAVS Service Manager address: ${WAVS_SERVICE_MANAGER_ADDRESS}" -### === Deploy Service === -export COMPONENT_FILENAME=evm_price_oracle.wasm -if [ "$(sh ./script/get-deploy-status.sh)" = "TESTNET" ]; then - read -p "Enter the component filename (default: ${COMPONENT_FILENAME}): " input_filename - if [ -n "$input_filename" ]; then - export COMPONENT_FILENAME="$input_filename" - fi -fi +### === Deploy Contract === ### -export PKG_NAME="evmrustoracle" -if [ "$(sh ./script/get-deploy-status.sh)" = "TESTNET" ]; then - read -p "Enter the package name (default: ${PKG_NAME}): " input_pkg_name - if [ -n "$input_pkg_name" ]; then - export PKG_NAME="$input_pkg_name" - fi +# TODO: remove deploy-contracts.sh once this works +export DEPLOY_ENV=`task get-deploy-status` +if [[ "${SKIP_CONTRACT_UPLOAD}" != "true" ]]; then + echo "🚀 Starting contract deployment..." + + # Build the contracts + echo "Building contracts..." + forge build + + echo "🔧 Configuration:" + echo " RPC_URL: ${RPC_URL}" + echo " WAVS_SERVICE_MANAGER_ADDRESS: ${WAVS_SERVICE_MANAGER_ADDRESS}" + + mkdir -p .docker + echo "📦 Deploying contracts..." + + # TODO: we can probably put this in the components.json to simple and make it scale right? + forge create SimpleSubmit --json --broadcast \ + --rpc-url $RPC_URL \ + --private-key $FUNDED_KEY \ + --constructor-args "${WAVS_SERVICE_MANAGER_ADDRESS}" > .docker/submit.json + + forge create SimpleTrigger --json --broadcast \ + --rpc-url $RPC_URL \ + --private-key $FUNDED_KEY \ + --constructor-args "${WAVS_SERVICE_MANAGER_ADDRESS}" > .docker/trigger.json + + # Create simplified deployment summary + jq -n \ + --arg service_id "" \ + --arg rpc_url "${RPC_URL}" \ + --arg wavs_service_manager "${WAVS_SERVICE_MANAGER_ADDRESS}" \ + --slurpfile submit .docker/submit.json \ + --slurpfile trigger .docker/trigger.json \ + '{ + service_id: $service_id, + rpc_url: $rpc_url, + wavs_service_manager: $wavs_service_manager, + evmpriceoracle_submit: $submit[0], + evmpriceoracle_trigger: $trigger[0], + }' \ + > .docker/deployment_summary.json + + sleep 1 fi -export PKG_VERSION="0.1.0" -if [ "$(sh ./script/get-deploy-status.sh)" = "TESTNET" ]; then - read -p "Enter the package version (default: ${PKG_VERSION}): " input_pkg_version - if [ -n "$input_pkg_version" ]; then - export PKG_VERSION="$input_pkg_version" - fi +### === Deploy Services === + +# Require component configuration file +COMPONENT_CONFIGS_FILE="config/components.json" + +if [ ! -f "$COMPONENT_CONFIGS_FILE" ]; then + echo "❌ Component configuration file not found: $COMPONENT_CONFIGS_FILE" + echo "Please run 'script/configure-components.sh init' to create the configuration." + exit 1 fi -# ** Testnet Setup: https://wa.dev/account/credentials/new -> warg login -source script/upload-to-wasi-registry.sh || true -sleep 1 +echo "Using component configuration from: $COMPONENT_CONFIGS_FILE" # Testnet: set values (default: local if not set) -if [ "$(sh ./script/get-deploy-status.sh)" = "TESTNET" ]; then - export TRIGGER_CHAIN=holesky - export SUBMIT_CHAIN=holesky +if [ "$(task get-deploy-status)" = "TESTNET" ]; then + export TRIGGER_CHAIN=evm:$(task get-chain-id) + export SUBMIT_CHAIN=evm:$(task get-chain-id) +fi + +# Determine chain name based on deployment environment +if [ "$(task get-deploy-status)" = "TESTNET" ]; then + export CHAIN_NAME=evm:$(task get-chain-id) + export AGGREGATOR_TIMER_DELAYER_SECS=3 # base wait ~1 block +else + export CHAIN_NAME=evm:31337 # local + export AGGREGATOR_TIMER_DELAYER_SECS=0 +fi +echo "✅ Chain Name: ${CHAIN_NAME}" + +echo "📋 All configuration variables exported for component-specific substitution" + +# wait for STATUS_FILE to contain the status COMPLETED in its content, check every 0.5 seconds for up to 60 seconds then error +if [[ "${SKIP_COMPONENT_UPLOAD}" != "true" ]]; then + echo "Waiting for component uploads to complete..." + timeout 300 bash -c " + trap 'exit 130' INT TERM + while ! grep -q 'COMPLETED' '$STATUS_FILE' 2>/dev/null; do + sleep 0.5 + done + " + if [ $? -ne 0 ]; then + echo "❌ Component uploads did not complete in time or failed." + exit 1 + fi + echo "✅ All components uploaded successfully" + # clear tmp file + rm -f $STATUS_FILE + + seconds=2 + echo "Waiting for ${seconds} seconds for registry to update..." + sleep ${seconds} fi -# Package not found with wa.dev? -- make sure it is public -REGISTRY=${REGISTRY} source ./script/build-service.sh +# Create service with multiple workflows +echo "Creating service with multiple component workflows..." +export COMPONENT_CONFIGS_FILE="$COMPONENT_CONFIGS_FILE" +# All required variables are now exported for component-specific substitution +REGISTRY=$(task get-registry) source ./script/build-service.sh sleep 1 # === Upload service.json to IPFS === # local: 127.0.0.1:5001 | testnet: https://app.pinata.cloud/. set PINATA_API_KEY to JWT token in .env echo "Uploading to IPFS..." + +export PINATA_API_KEY=$(grep ^WAVS_ENV_PINATA_API_KEY= .env | cut -d '=' -f2-) +# if not LOCAL, ensure PINATA_API_KEY is set or PINATA_API_KEY. If neither, require input +if [ "$(task get-deploy-status)" != "LOCAL" ]; then + if [ -z "$PINATA_API_KEY" ]; then + read -p "Enter your Pinata JWT API Key (or set WAVS_ENV_PINATA_API_KEY in .env): " PINATA_API_KEY + if [ -z "$PINATA_API_KEY" ]; then + echo "❌ Pinata API Key is required for TESTNET deployments." + exit 1 + fi + export PINATA_API_KEY + fi + + read -p "Make any changes you want to the service.json now. Press [Enter] to continue upload to IPFS..." +fi + export ipfs_cid=`SERVICE_FILE=.docker/service.json make upload-to-ipfs` # LOCAL: http://127.0.0.1:8080 | TESTNET: https://gateway.pinata.cloud/ -export IPFS_GATEWAY="$(bash script/get-ipfs-gateway.sh)" +export IPFS_GATEWAY="$(task get-ipfs-gateway)" export IPFS_URI="ipfs://${ipfs_cid}" IPFS_URL="${IPFS_GATEWAY}${ipfs_cid}" echo "IPFS_URL=${IPFS_URL}" echo "Querying to verify IPFS upload... (120 second timeout)" curl ${IPFS_URL} --connect-timeout 120 --max-time 120 --show-error --fail +while [ $? -ne 0 ]; do + echo "IPFS upload not yet available. Please ensure the CID is correct and try again." + read -p "Enter the IPFS URI (e.g., ipfs://bafkreicglpmavzsomzghbmemauv4i4jkxgaxsqefruxtplulul7o2sg33e): " IPFS_URI + ipfs_cid=$(echo $IPFS_URI | sed 's|ipfs://||') + IPFS_URL="${IPFS_GATEWAY}${ipfs_cid}" + curl ${IPFS_URL} --connect-timeout 120 --max-time 120 --show-error --fail +done -if [ "$DEPLOYER_PK" ]; then + +if [ "$FUNDED_KEY" ]; then echo "" echo "Setting service URI on WAVS Service Manager..." - cast send ${WAVS_SERVICE_MANAGER_ADDRESS} 'setServiceURI(string)' "${IPFS_URI}" -r ${RPC_URL} --private-key ${DEPLOYER_PK} + # if ` Error: Failed to estimate gas: server returned an error response: error code 3: execution reverted, data: "0x"`, then ServiceManager upload failed. retry + cast send ${WAVS_SERVICE_MANAGER_ADDRESS} 'setServiceURI(string)' "${IPFS_URI}" -r ${RPC_URL} --private-key ${FUNDED_KEY} fi echo "IPFS_GATEWAY=${IPFS_GATEWAY}" @@ -102,42 +221,62 @@ sleep 1 bash ./script/create-aggregator.sh 1 IPFS_GATEWAY=${IPFS_GATEWAY} bash ./infra/aggregator-1/start.sh -sleep 1 -wget -q --header="Content-Type: application/json" --post-data="{\"uri\": \"${IPFS_URI}\"}" ${AGGREGATOR_URL}/register-service -O - +sleep 3 +curl -s -X POST -H "Content-Type: application/json" -d "{ + \"service_manager\": { + \"evm\": { + \"chain\": \"${CHAIN_NAME}\", + \"address\": \"${WAVS_SERVICE_MANAGER_ADDRESS}\" + } + } +}" ${AGGREGATOR_URL}/services ### === Start WAVS === bash ./script/create-operator.sh 1 IPFS_GATEWAY=${IPFS_GATEWAY} bash ./infra/wavs-1/start.sh -sleep 5 +sleep 3 # Deploy the service JSON to WAVS so it now watches and submits. # 'opt in' for WAVS to watch (this is before we register to Eigenlayer) WAVS_ENDPOINT=http://127.0.0.1:8000 SERVICE_URL=${IPFS_URI} IPFS_GATEWAY=${IPFS_GATEWAY} make deploy-service +sleep 3 + +export SERVICE_ID=${SERVICE_ID:-`task config:service-id`} +if [ -z "$SERVICE_ID" ]; then + echo "❌ Failed to retrieve service ID" + exit 1 +fi +echo "✅ Service ID: ${SERVICE_ID}" + +# Update the deployment summary with the service ID +jq ".service_id = \"${SERVICE_ID}\"" .docker/deployment_summary.json > .docker/deployment_summary.json.tmp +mv .docker/deployment_summary.json.tmp .docker/deployment_summary.json ### === Register service specific operator === # OPERATOR_PRIVATE_KEY, AVS_SIGNING_ADDRESS -SERVICE_INDEX=0 source ./script/avs-signing-key.sh - -# TODO: move this check into the middleware (?) -if [ "$(sh ./script/get-deploy-status.sh)" = "TESTNET" ]; then - export OPERATOR_ADDRESS=$(cast wallet address --private-key ${OPERATOR_PRIVATE_KEY}) - while true; do - BALANCE=$(cast balance ${OPERATOR_ADDRESS} --rpc-url ${RPC_URL} --ether) - if [ "$BALANCE" != "0" ]; then - echo "OPERATOR_ADDRESS has balance: $BALANCE" - break - else - echo "Waiting for ${OPERATOR_ADDRESS} (operator) to have a balance..." - sleep 5 - fi - done -fi +eval "$(task setup-avs-signing HD_INDEX=1 | tail -4)" -export WAVS_SERVICE_MANAGER_ADDRESS=$(jq -r .addresses.WavsServiceManager ./.nodes/avs_deploy.json) -COMMAND="register ${OPERATOR_PRIVATE_KEY} ${AVS_SIGNING_ADDRESS} 0.001ether" make wavs-middleware +# Reset registry after deployment is complete +echo "Cleaning up registry data..." +REGISTRY=$(task get-registry) +if [ -n "$REGISTRY" ]; then + PROTOCOL="https" + if [[ "$REGISTRY" == *"localhost"* ]] || [[ "$REGISTRY" == *"127.0.0.1"* ]]; then + PROTOCOL="http" + fi + warg reset --registry ${PROTOCOL}://${REGISTRY} || echo "Registry reset failed (non-critical)" +fi -# Verify registration -COMMAND="list_operators" PAST_BLOCKS=500 make wavs-middleware +# Remove trap for normal exit +trap - INT TERM EXIT echo "✅ Deployment complete!" + +# if post-deploy.sh exists, run it +if [ -f "script/post-deploy.sh" ]; then + echo "Running post-deploy.sh..." + bash script/post-deploy.sh + + echo "✅ post-deploy.sh completed!" +fi diff --git a/script/get-deploy-status.sh b/script/get-deploy-status.sh deleted file mode 100644 index a2e3cc71..00000000 --- a/script/get-deploy-status.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/bash - -cd `git rev-parse --show-toplevel` || exit - -if [ ! -f .env ]; then - cp .env.example .env - if [ $? -ne 0 ]; then - echo "Failed to copy .env.example to .env" - return - fi -fi - -# Extract DEPLOY_ENV from the file -DEPLOY_ENV=$(grep "^DEPLOY_ENV=" .env | cut -d '=' -f2) - -DEPLOY_ENV=$(echo "$DEPLOY_ENV" | tr '[:lower:]' '[:upper:]') - -echo "$DEPLOY_ENV" diff --git a/script/get-ipfs-gateway.sh b/script/get-ipfs-gateway.sh deleted file mode 100644 index 9d26f9fb..00000000 --- a/script/get-ipfs-gateway.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash - -cd `git rev-parse --show-toplevel` || exit - -DEPLOY_ENV=$(sh ./script/get-deploy-status.sh) - -if [ "$DEPLOY_ENV" = "LOCAL" ]; then - IPFS_GATEWAY=http://127.0.0.1:8080/ipfs/ -elif [ "$DEPLOY_ENV" = "TESTNET" ]; then - IPFS_GATEWAY=https://gateway.pinata.cloud/ipfs/ -else - echo "Unknown DEPLOY_ENV: $DEPLOY_ENV" - return -fi - -echo "${IPFS_GATEWAY}" diff --git a/script/get-registry.sh b/script/get-registry.sh deleted file mode 100644 index 2471c444..00000000 --- a/script/get-registry.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash - -cd `git rev-parse --show-toplevel` || exit - -DEPLOY_ENV=$(sh ./script/get-deploy-status.sh) - -if [ "$DEPLOY_ENV" = "LOCAL" ]; then - REGISTRY=localhost:8090 -elif [ "$DEPLOY_ENV" = "TESTNET" ]; then - REGISTRY=wa.dev -else - echo "Unknown DEPLOY_ENV: $DEPLOY_ENV" - return -fi - -echo "${REGISTRY}" diff --git a/script/get-rpc.sh b/script/get-rpc.sh deleted file mode 100644 index e0ad65c3..00000000 --- a/script/get-rpc.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash - -cd `git rev-parse --show-toplevel` || exit - -DEPLOY_ENV=$(sh ./script/get-deploy-status.sh) - -if [ "$DEPLOY_ENV" = "LOCAL" ]; then - RPC_URL=$(grep "^LOCAL_ETHEREUM_RPC_URL=" .env) -elif [ "$DEPLOY_ENV" = "TESTNET" ]; then - RPC_URL=$(grep "^TESTNET_RPC_URL=" .env) -else - echo "Unknown DEPLOY_ENV: $DEPLOY_ENV" - return -fi - -echo "${RPC_URL}" | cut -d '=' -f2 diff --git a/script/get-wasi-namespace.sh b/script/get-wasi-namespace.sh deleted file mode 100644 index 20b8670d..00000000 --- a/script/get-wasi-namespace.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/bin/bash - -if [ -z "$REGISTRY" ]; then - echo "REGISTRY is not set. Please set the REGISTRY environment variable." && return -fi - -# === - -cd `git rev-parse --show-toplevel` || exit - -DEPLOY_ENV=$(sh ./script/get-deploy-status.sh) - -if [ "$DEPLOY_ENV" = "LOCAL" ]; then - export PKG_NAMESPACE="example" - echo ${PKG_NAMESPACE} - exit 0 -else - read -p "Enter the PKG_NAMESPACE for ${REGISTRY}: " namespace - - export PKG_NAMESPACE="${namespace}" - echo "${PKG_NAMESPACE}" -fi diff --git a/script/start_all.sh b/script/start_all.sh deleted file mode 100644 index b4d076c3..00000000 --- a/script/start_all.sh +++ /dev/null @@ -1,44 +0,0 @@ -#!/bin/bash - -set -e - -if [ -f .env ] && grep -q '^TESTNET_RPC_URL=' .env; then - TESTNET_RPC_URL=$(grep -E '^TESTNET_RPC_URL=' .env | cut -d '=' -f2- | tr -d '"') -else - rpc_url="https://holesky.drpc.org" - echo "No TESTNET_RPC_URL found in .env, using default ${rpc_url}" - TESTNET_RPC_URL=${rpc_url} -fi - -PORT=8545 -MIDDLEWARE_IMAGE=ghcr.io/lay3rlabs/wavs-middleware:0.4.0 -FORK_RPC_URL=${FORK_RPC_URL:-"${TESTNET_RPC_URL}"} -DEPLOY_ENV=$(sh ./script/get-deploy-status.sh) - -## == Start watcher == -rm $LOG_FILE 2> /dev/null || true - - -## == Base Anvil Testnet Fork == -if [ "$DEPLOY_ENV" = "TESTNET" ]; then - echo "Running in testnet mode, nothing to do" - exit 0 -fi - -if [ "$DEPLOY_ENV" = "LOCAL" ]; then - anvil --fork-url ${FORK_RPC_URL} --port ${PORT} & - anvil_pid=$! - trap "kill -9 $anvil_pid && echo -e '\nKilled anvil'" EXIT - while ! cast block-number --rpc-url http://localhost:${PORT} > /dev/null 2>&1 - do - sleep 0.25 - done - - FILES="-f docker-compose.yml -f telemetry/docker-compose.yml" - docker compose ${FILES} pull - docker compose ${FILES} up --force-recreate -d - trap "docker compose ${FILES} down --remove-orphans && docker kill wavs-1 wavs-aggregator-1 > /dev/null 2>&1 && echo -e '\nKilled IPFS + Local WARG, and wavs instances'" EXIT - - echo "Started..." - wait -fi diff --git a/script/upload-components-background.sh b/script/upload-components-background.sh new file mode 100755 index 00000000..ab6c20db --- /dev/null +++ b/script/upload-components-background.sh @@ -0,0 +1,209 @@ +#!/bin/bash + +log() { + echo "🔧 [Upload] $1" +} + +log "Starting component upload..." + +REPO_ROOT=$(git rev-parse --show-toplevel) || exit 1 +cd "$REPO_ROOT" || exit 1 + +STATUS_FILE=${STATUS_FILE:-".docker/component-upload-status"} +COMPONENT_CONFIGS_FILE="config/components.json" + +mkdir -p .docker +echo "UPLOADING" > "$STATUS_FILE" + +cleanup_on_error() { + log "❌ Upload failed" + echo "ERROR" > "$STATUS_FILE" + # Clean up any remaining temp directories + find /tmp -maxdepth 1 -name "warg_home_*" -type d -exec rm -rf {} + 2>/dev/null || true + # Kill background jobs + jobs -p | xargs -r kill 2>/dev/null || true + exit 1 +} + +trap cleanup_on_error INT TERM + +if [ ! -f "$COMPONENT_CONFIGS_FILE" ]; then + log "❌ Config file not found: $COMPONENT_CONFIGS_FILE" + echo "ERROR" > "$STATUS_FILE" + exit 1 +fi + +log "📦 Building components if needed..." + +# Function to get modified components from git status +get_modified_components() { + git status --porcelain | grep -v "bindings.rs" | grep "^.* components/" | \ + sed 's|^.* components/||' | cut -d'/' -f1 | sort -u +} + +# Function to build specific component +build_component() { + local component="$1" + local component_dir="components/$component" + + if [ -f "$component_dir/Makefile" ] && grep -q "^wasi-build:" "$component_dir/Makefile" 2>/dev/null; then + log "Building component: $component" + make -s -C "$component_dir" wasi-build + return $? + else + log "⚠️ No wasi-build target found for component : $component" + return 1 + fi +} + +# Check if compiled directory exists or is empty +if [ ! -d compiled/ ] || [ -z "$(find compiled/ -name '*.wasm')" ]; then + log "Building all components (compiled/ missing or empty)..." + warg reset || echo "warg reset failed (warg server not started), continuing..." + task build:wasi +else + # Check for modified components + modified_components=$(get_modified_components) + + if [ -n "$modified_components" ]; then + log "Changes detected in components: $(echo $modified_components | tr '\n' ' ')" + warg reset || echo "warg reset failed (warg server not started), continuing..." + + # Build only modified components + build_failed=0 + for component in $modified_components; do + if ! build_component "$component"; then + build_failed=1 + log "❌ Failed to build component: $component" + fi + done + + if [ $build_failed -eq 1 ]; then + log "⚠️ Some components failed to build, falling back to full build..." + task build:wasi + fi + else + log "✅ No component changes detected, skipping build" + fi +fi + +upload_package() { + local component_json="$1" + local num="$2" + + local DISABLED=$(echo "$component_json" | jq -r '.disabled // false') + local PKG_NAME=$(echo "$component_json" | jq -r '.package_name') + + if [ "$DISABLED" = "true" ]; then + log "[$num] ⚠️ ${PKG_NAME} is disabled, skipping upload" + return 0 + fi + + local COMPONENT_FILENAME=$(echo "$component_json" | jq -r '.filename') + local PKG_VERSION=$(echo "$component_json" | jq -r '.package_version') + local component_file="./compiled/${COMPONENT_FILENAME}" + + if [ ! -f "$component_file" ]; then + log "[$num] ❌ File not found: $component_file" + return 1 + fi + + local REGISTRY=$(task get-registry) + local PKG_NAMESPACE=$(task get-wasi-namespace REGISTRY="$REGISTRY") + + if [ -z "$REGISTRY" ] || [ -z "$PKG_NAMESPACE" ]; then + log "[$num] ❌ Registry config missing for ${PKG_NAME}" + return 1 + fi + + local PROTOCOL="https" + if [[ "$REGISTRY" == *"localhost"* ]] || [[ "$REGISTRY" == *"127.0.0.1"* ]]; then + PROTOCOL="http" + fi + + local REGISTRY_URL="${PROTOCOL}://${REGISTRY}" + local FULL_PKG_NAME="${PKG_NAMESPACE}:${PKG_NAME}" + + # Create unique temp directory for warg storage (bypasses locking) + local temp_home=$(mktemp -d -t warg_home_XXXXXX) + + log "[$num] 🚀 Uploading ${PKG_NAME} (${FULL_PKG_NAME}@${PKG_VERSION}) to ${REGISTRY_URL}..." + + # Set environment to use isolated storage (avoids .lock files) + export WARG_HOME="${temp_home}" + local output=$(warg config --registry "${REGISTRY_URL}" 2>&1 && \ + warg publish release --name "${FULL_PKG_NAME}" --version "${PKG_VERSION}" "${component_file}" --no-wait 2>&1) + local exit_code=$? + rm -rf "${temp_home}" + + if [ $exit_code -eq 0 ] || [[ "$output" =~ "already released" ]] || [[ "$output" =~ "failed to prove inclusion" ]]; then + log "[$num] ✅ ${PKG_NAME} uploaded" + return 0 + else + log "[$num] ❌ ${PKG_NAME} failed: ${output}" + return 1 + fi +} + +log "📤 Starting component uploads..." + +if ! command -v warg >/dev/null 2>&1; then + log "❌ warg command not found" + echo "ERROR" > "$STATUS_FILE" + exit 1 +fi + +TOTAL_COMPONENTS=$(jq -r '.components | length' "$COMPONENT_CONFIGS_FILE") +TOTAL_AGGREGATOR_COMPONENTS=$(jq -r '.aggregator_components | length' "$COMPONENT_CONFIGS_FILE") + +if [ $TOTAL_COMPONENTS -eq 0 ] && [ $TOTAL_AGGREGATOR_COMPONENTS -eq 0 ]; then + log "❌ No components found" + echo "ERROR" > "$STATUS_FILE" + exit 1 +fi + +log "📊 Found $TOTAL_COMPONENTS components and $TOTAL_AGGREGATOR_COMPONENTS aggregator components" + +# Upload components in parallel +pids=() +component_num=0 + +# Upload regular components +while IFS= read -r component; do + component_num=$((component_num + 1)) + upload_package "$component" "$component_num" & + pids+=($!) +done < <(jq -r '.components | unique_by(.filename)[] | @json' "$COMPONENT_CONFIGS_FILE") + +# Upload aggregator components +while IFS= read -r component; do + component_num=$((component_num + 1)) + upload_package "$component" "$component_num" & + pids+=($!) +done < <(jq -r '.aggregator_components | unique_by(.filename)[] | @json' "$COMPONENT_CONFIGS_FILE") + +# Wait for all uploads +successful=0 +failed=0 + +for pid in "${pids[@]}"; do + if wait $pid 2>/dev/null; then + successful=$((successful + 1)) + else + failed=$((failed + 1)) + fi +done + +log "" +log "📊 Results: ✅ ${successful} success, ❌ ${failed} failed" + +if [ $failed -eq 0 ]; then + log "🎉 All uploads completed!" + log "(( if this hangs it's the solidity, not the uploads! Wait a lil bit buddy... ))" + echo "COMPLETED" > "$STATUS_FILE" + exit 0 +else + log "⚠️ Some uploads failed" + echo "ERROR" > "$STATUS_FILE" + exit 1 +fi diff --git a/script/upload-to-wasi-registry.sh b/script/upload-to-wasi-registry.sh deleted file mode 100644 index 2e20c9a4..00000000 --- a/script/upload-to-wasi-registry.sh +++ /dev/null @@ -1,48 +0,0 @@ -#!/bin/bash - -export REGISTRY=`bash ./script/get-registry.sh` -if [ -z "$REGISTRY" ]; then - echo "REGISTRY is not set. Please set the REGISTRY environment variable." && return -fi -export PKG_NAMESPACE=`bash ./script/get-wasi-namespace.sh` -if [ -z "$PKG_NAMESPACE" ]; then - echo "PKG_NAMESPACE is not set. Please set the PKG_NAMESPACE environment variable." && return -fi - - -if [ -z "$PKG_NAME" ]; then - echo "PKG_NAME is not set. Please set the PKG_NAME environment variable." && return -fi -if [ -z "$PKG_VERSION" ]; then - echo "PKG_VERSION is not set. Please set the PKG_VERSION environment variable." && return -fi -if [ -z "$COMPONENT_FILENAME" ]; then - echo "COMPONENT_FILENAME is not set. Please set the COMPONENT_FILENAME environment variable." && return -fi - -# === - -cd `git rev-parse --show-toplevel` || exit - -PROTOCOL="https" -if [[ "$REGISTRY" == *"localhost"* ]] || [[ "$REGISTRY" == *"127.0.0.1"* ]]; then - PROTOCOL="http" -fi -echo "Publishing to registry (${PROTOCOL}://${REGISTRY})..." - - -output=$(warg publish release --registry ${PROTOCOL}://${REGISTRY} --name ${PKG_NAMESPACE}:${PKG_NAME} --version ${PKG_VERSION} ./compiled/${COMPONENT_FILENAME} 2>&1) -exit_code=$? -warg reset --registry ${PROTOCOL}://${REGISTRY} - -# Check for specific error conditions in the output -if [[ $exit_code -ne 0 ]]; then - if [[ "$output" =~ "failed to prove inclusion" ]]; then - echo "Package uploaded to local registry successfully..." - elif [[ "$output" =~ "error sending request for url" ]]; then - echo "NOTE: Check to make sure you are running the registry locally" - echo "${output}" - else - echo "Unknown error occurred ${output}" - fi -fi diff --git a/src/common/CmdRunner.sol b/src/common/CmdRunner.sol deleted file mode 100644 index f783b471..00000000 --- a/src/common/CmdRunner.sol +++ /dev/null @@ -1,29 +0,0 @@ - - -// SPDX-License-Identifier: MIT -pragma solidity 0.8.22; - -import "forge-std/Script.sol"; - -contract CmdRunner is Script{ - // `ffi = true` must be set in foundry.toml - // - // Run a command and return the output by creating a temporary script with - // the entire command and running it via bash. This gets around the limits - // of FFI, such as not being able to pipe between two commands. - // string memory entry = runCmd(string.concat("curl -s ", url, " | jq -c .tree[0]")); - function runCmd(string memory cmd) external returns (string memory) { - string memory script = string.concat(vm.projectRoot(), "/.ffirun.sh"); - // Save the cmd to a file - vm.writeFile(script, cmd); - // Run the cmd - string[] memory exec = new string[](2); - exec[0] = "bash"; - exec[1] = script; - string memory result = string(vm.ffi(exec)); - // Delete the file - vm.removeFile(script); - // Return the result - return result; - } -} diff --git a/src/contracts/WavsSubmit.sol b/src/contracts/WavsSubmit.sol index 95bb1999..4174c6ea 100644 --- a/src/contracts/WavsSubmit.sol +++ b/src/contracts/WavsSubmit.sol @@ -1,8 +1,8 @@ // SPDX-License-Identifier: MIT -pragma solidity 0.8.22; +pragma solidity 0.8.27; -import {IWavsServiceManager} from "@wavs/interfaces/IWavsServiceManager.sol"; -import {IWavsServiceHandler} from "@wavs/interfaces/IWavsServiceHandler.sol"; +import {IWavsServiceManager} from "@wavs/src/eigenlayer/ecdsa/interfaces/IWavsServiceManager.sol"; +import {IWavsServiceHandler} from "@wavs/src/eigenlayer/ecdsa/interfaces/IWavsServiceHandler.sol"; import {ITypes} from "interfaces/ITypes.sol"; contract SimpleSubmit is ITypes, IWavsServiceHandler { diff --git a/src/contracts/WavsTrigger.sol b/src/contracts/WavsTrigger.sol index 58cd0c83..39cf6c9d 100644 --- a/src/contracts/WavsTrigger.sol +++ b/src/contracts/WavsTrigger.sol @@ -1,5 +1,5 @@ // SPDX-License-Identifier: MIT -pragma solidity 0.8.22; +pragma solidity 0.8.27; import {ISimpleTrigger} from "interfaces/IWavsTrigger.sol"; diff --git a/src/interfaces/ITypes.sol b/src/interfaces/ITypes.sol index 816aa6c5..80a9fa1b 100644 --- a/src/interfaces/ITypes.sol +++ b/src/interfaces/ITypes.sol @@ -1,5 +1,5 @@ // SPDX-License-Identifier: MIT -pragma solidity 0.8.22; +pragma solidity 0.8.27; interface ITypes { /** diff --git a/src/interfaces/IWavsTrigger.sol b/src/interfaces/IWavsTrigger.sol index 1bd9e36d..b7ad88bb 100644 --- a/src/interfaces/IWavsTrigger.sol +++ b/src/interfaces/IWavsTrigger.sol @@ -1,5 +1,5 @@ // SPDX-License-Identifier: MIT -pragma solidity 0.8.22; +pragma solidity 0.8.27; import {ITypes} from "interfaces/ITypes.sol"; diff --git a/script/ShowResult.s.sol b/src/script/ShowResult.s.sol similarity index 98% rename from script/ShowResult.s.sol rename to src/script/ShowResult.s.sol index 837f27ba..4394de3b 100644 --- a/script/ShowResult.s.sol +++ b/src/script/ShowResult.s.sol @@ -1,5 +1,5 @@ // SPDX-License-Identifier: MIT -pragma solidity 0.8.22; +pragma solidity 0.8.27; import {SimpleTrigger} from "contracts/WavsTrigger.sol"; import {SimpleSubmit} from "contracts/WavsSubmit.sol"; diff --git a/script/Trigger.s.sol b/src/script/Trigger.s.sol similarity index 96% rename from script/Trigger.s.sol rename to src/script/Trigger.s.sol index 7a592a97..6844e42a 100644 --- a/script/Trigger.s.sol +++ b/src/script/Trigger.s.sol @@ -1,5 +1,5 @@ // SPDX-License-Identifier: MIT -pragma solidity 0.8.22; +pragma solidity 0.8.27; import {SimpleTrigger} from "contracts/WavsTrigger.sol"; import {ITypes} from "interfaces/ITypes.sol"; diff --git a/test/unit/WavsTrigger.t.sol b/test/unit/WavsTrigger.t.sol index 624fa3d1..9c9a5e1f 100644 --- a/test/unit/WavsTrigger.t.sol +++ b/test/unit/WavsTrigger.t.sol @@ -1,5 +1,5 @@ // SPDX-License-Identifier: MIT -pragma solidity 0.8.22; +pragma solidity 0.8.27; import {Test} from "forge-std/Test.sol"; import {SimpleTrigger} from "contracts/WavsTrigger.sol"; diff --git a/wavs.toml b/wavs.toml index c12a1f86..a97b3970 100644 --- a/wavs.toml +++ b/wavs.toml @@ -29,67 +29,50 @@ log_level = ["info", "wavs=debug"] # Chain configurations # ---------------------------- -# == Ethereum chains == - -# Mainnet -[default.chains.evm.ethereum] -chain_id = "1" -ws_endpoint = "wss://eth.drpc.org" -http_endpoint = "https://eth.drpc.org" - -[default.chains.evm.base] -chain_id = "8453" -ws_endpoint = "wss://base.drpc.org" -http_endpoint = "https://base.drpc.org" - -[default.chains.evm.optimism] -chain_id = "10" -ws_endpoint = "wss://optimism.drpc.org" -http_endpoint = "https://optimism.drpc.org" - -[default.chains.evm.arbitrum] -chain_id = "42161" -ws_endpoint = "wss://arbitrum.drpc.org" -http_endpoint = "https://arbitrum.drpc.org" - -# Local / Testnet -[default.chains.evm.local] -chain_id = "31337" +# Cosmos chains +# [default.chains.cosmos.layer-local] +# bech32_prefix = "layer" +# rpc_endpoint = "http://localhost:26657" +# grpc_endpoint = "http://localhost:9090" +# gas_price = 0.025 +# gas_denom = "uslay" +# faucet_endpoint = "http://localhost:8000" + +# [default.chains.cosmos.pion-1] +# bech32_prefix = "neutron" +# rpc_endpoint = "https://rpc-falcron.pion-1.ntrn.tech" +# grpc_endpoint = "http://grpc-falcron.pion-1.ntrn.tech:80" +# gas_price = 0.0053 +# gas_denom = "untrn" + +# EVM chains +[default.chains.evm.31337] # Local: anvil ws_endpoint = "ws://localhost:8545" http_endpoint = "http://localhost:8545" poll_interval_ms = 7000 -[default.chains.evm.local2] -chain_id = "31338" -ws_endpoint = "ws://localhost:8645" -http_endpoint = "http://localhost:8645" -poll_interval_ms = 7000 - -[default.chains.evm.sepolia] -chain_id = "11155111" -ws_endpoint = "wss://ethereum-sepolia-rpc.publicnode.com" -http_endpoint = "https://ethereum-sepolia-rpc.publicnode.com" - -[default.chains.evm.holesky] -chain_id = "17000" -ws_endpoint = "wss://ethereum-holesky-rpc.publicnode.com" -http_endpoint = "https://ethereum-holesky-rpc.publicnode.com" - -[default.chains.evm.holesky-fork] -chain_id = "17000" -ws_endpoint = "ws://localhost:8545" -http_endpoint = "http://localhost:8545" -poll_interval_ms = 7000 - -# == Cosmos chains == - -[default.chains.cosmos.neutron] -chain_id = "pion-1" -bech32_prefix = "neutron" -rpc_endpoint = "https://rpc-falcron.pion-1.ntrn.tech" -grpc_endpoint = "http://grpc-falcron.pion-1.ntrn.tech:80" -gas_price = 0.0053 -gas_denom = "untrn" +# [default.chains.evm.17000] # Testnet: holesky | https://holesky-faucet.pk910.de/ +# ws_endpoint = "wss://ethereum-holesky-rpc.publicnode.com" +# http_endpoint = "https://ethereum-holesky-rpc.publicnode.com" +# poll_interval_ms = 7000 + +# [default.chains.evm.11155111] # Testnet: Sepolia | https://sepolia-faucet.pk910.de/ +# ws_endpoint = "wss://ethereum-sepolia-rpc.publicnode.com" +# http_endpoint = "https://ethereum-sepolia-rpc.publicnode.com" +# poll_interval_ms = 7000 + +# [default.chains.evm.8453] # Base L2 Mainnet | https://superbridge.app/base +# ws_endpoint = "wss://base-rpc.publicnode.com" +# http_endpoint = "https://base-rpc.publicnode.com" +# poll_interval_ms = 7000 +# event_channel_size = 100000 + +# [default.chains.dev.holesky-fork] +# type = "evm" +# chain_id = "17000" +# ws_endpoint = "ws://localhost:8545" +# http_endpoint = "http://localhost:8545" +# poll_interval_ms = 7000 # ---------------------------- # WAVS specific settings @@ -99,6 +82,11 @@ gas_denom = "untrn" # The directory to store the data. Default is "/var/wavs" data = "~/wavs" +# Optional bearer token to protect mutating HTTP endpoints +# If set here or via env var `WAVS_BEARER_TOKEN`, POST/DELETE endpoints require `Authorization: Bearer ` +# Generate with `openssl rand -hex 32` +# bearer_token = "change-me" + cors_allowed_origins = [ "https://lay3rlabs.github.io/*", "http://localhost:*", @@ -140,6 +128,11 @@ cors_allowed_origins = [ "http://127.0.0.1:*", ] +# Optional bearer token to protect mutating HTTP endpoints +# If set here or via env var `WAVS_AGGREGATOR_BEARER_TOKEN`, POST endpoints require `Authorization: Bearer ` +# Generate with `openssl rand -hex 32` +# bearer_token = "change-me" + # Aggregator-specific chain overrides (if needed) # Example: # [aggregator.chains.evm.local] From f400b52338bfa54ef311ad389864f5b69cfe0675 Mon Sep 17 00:00:00 2001 From: Reece Williams Date: Wed, 1 Oct 2025 13:10:14 -0500 Subject: [PATCH 5/6] remove go & js from v1 (will add back later) --- components/golang-evm-price-oracle/Makefile | 29 - components/golang-evm-price-oracle/README.md | 80 - .../golang-evm-price-oracle/config.json | 14 - components/golang-evm-price-oracle/go.mod | 30 - components/golang-evm-price-oracle/go.sum | 35 - components/golang-evm-price-oracle/src/cmc.go | 30 - .../golang-evm-price-oracle/src/main.go | 157 -- components/golang-evm-price-oracle/wkg.lock | 12 - components/js-evm-price-oracle/Makefile | 36 - components/js-evm-price-oracle/README.md | 49 - components/js-evm-price-oracle/config.json | 14 - components/js-evm-price-oracle/index.ts | 177 -- .../js-evm-price-oracle/package-lock.json | 2067 ----------------- components/js-evm-price-oracle/package.json | 18 - components/js-evm-price-oracle/trigger.ts | 97 - 15 files changed, 2845 deletions(-) delete mode 100644 components/golang-evm-price-oracle/Makefile delete mode 100644 components/golang-evm-price-oracle/README.md delete mode 100644 components/golang-evm-price-oracle/config.json delete mode 100644 components/golang-evm-price-oracle/go.mod delete mode 100644 components/golang-evm-price-oracle/go.sum delete mode 100644 components/golang-evm-price-oracle/src/cmc.go delete mode 100644 components/golang-evm-price-oracle/src/main.go delete mode 100644 components/golang-evm-price-oracle/wkg.lock delete mode 100644 components/js-evm-price-oracle/Makefile delete mode 100644 components/js-evm-price-oracle/README.md delete mode 100644 components/js-evm-price-oracle/config.json delete mode 100644 components/js-evm-price-oracle/index.ts delete mode 100644 components/js-evm-price-oracle/package-lock.json delete mode 100644 components/js-evm-price-oracle/package.json delete mode 100644 components/js-evm-price-oracle/trigger.ts diff --git a/components/golang-evm-price-oracle/Makefile b/components/golang-evm-price-oracle/Makefile deleted file mode 100644 index d981b359..00000000 --- a/components/golang-evm-price-oracle/Makefile +++ /dev/null @@ -1,29 +0,0 @@ -WAVS_PACKAGE=wavs:worker@0.4.0 -WAVS_WIT_WORLD=wavs:worker/layer-trigger-world -# get the first line of the go.mod and get the last element after the last /, then replace - with _ -GO_MOD_NAME?=$(shell head -n 1 go.mod | cut -d' ' -f2 | rev | cut -d'/' -f1 | rev | tr '-' '_') - -OUTPUT_DIR?=../../compiled - -check-package: - @if [ ! -f $(WAVS_PACKAGE).wasm ]; then \ - echo "Downloading WAVS package: $(WAVS_PACKAGE)"; \ - wkg get ${WAVS_PACKAGE} --overwrite --format wasm --output ${WAVS_PACKAGE}.wasm; \ - fi - -## wasi-build: building the WAVS wasi component -wasi-build: check-package - @echo "Building component: $(GO_MOD_NAME) [takes a few seconds...]" - @go mod tidy - @mkdir -p $(OUTPUT_DIR) - @tinygo build -target=wasip2 -o $(OUTPUT_DIR)/$(GO_MOD_NAME).wasm --wit-package $(WAVS_PACKAGE).wasm --wit-world $(WAVS_WIT_WORLD) ./src - @echo "WASI component built: $(GO_MOD_NAME).wasm" -.PHONY: wasi-build - -.PHONY: help -help: Makefile - @echo - @echo " Choose a command run" - @echo - @sed -n 's/^##//p' $< | column -t -s ':' | sed -e 's/^/ /' - @echo diff --git a/components/golang-evm-price-oracle/README.md b/components/golang-evm-price-oracle/README.md deleted file mode 100644 index b7122b0b..00000000 --- a/components/golang-evm-price-oracle/README.md +++ /dev/null @@ -1,80 +0,0 @@ -# Golang Ethereum Price Oracle - -A WAVS component that fetches the price of a crypto currency from CoinMarketCap and returns it to the Ethereum contract, in Go. - -## System Setup - -### Mac - -```bash docci-os=mac -brew tap tinygo-org/tools -brew install tinygo -``` - -### Arch Linux - -```bash docci-ignore -sudo pacman -Sy tinygo -``` - -### Ubuntu Linux - -```bash docci-os=linux docci-if-not-installed="tinygo" -# https://tinygo.org/getting-started/install/linux/ -wget --quiet https://github.com/tinygo-org/tinygo/releases/download/v0.37.0/tinygo_0.37.0_amd64.deb -sudo dpkg -i tinygo_0.37.0_amd64.deb && rm tinygo_0.37.0_amd64.deb -``` - -## Core Packages - -```bash docci-if-not-installed="cast" -curl -L https://foundry.paradigm.xyz | bash && $HOME/.foundry/bin/foundryup -``` - -```bash -make setup -``` - -```bash docci-if-not-installed="cargo-binstall" -cargo install cargo-binstall -``` - -```bash docci-if-not-installed="wasm-tools" -cargo binstall wasm-tools --no-confirm -``` - - -```bash occi-if-not-installed="wit-bindgen-go" -go install go.bytecodealliance.org/cmd/wit-bindgen-go@ecfa620df5beee882fb7be0740959e5dfce9ae26 -wit-bindgen-go --version -``` - -## Verify installs - -```bash -tinygo version -wkg --version -``` - -## Build Component - -Build all wasi components from the root of the repo. You can also run this command within each component directory. - -```bash -# Builds only this component, not all. -WASI_BUILD_DIR=golang-evm-price-oracle make wasi-build -``` - -## Execute Component - -Run the component with the `wasi-exec` command in the root of the repo - -```bash docci-output-contains="LTC" -COMPONENT_FILENAME=golang_evm_price_oracle.wasm INPUT_DATA=2 make wasi-exec-fixed -``` - ---- - -## Run main README - -Run through the main readme, but use `export COMPONENT_FILENAME=golang_evm_price_oracle.wasm` instead of the default. diff --git a/components/golang-evm-price-oracle/config.json b/components/golang-evm-price-oracle/config.json deleted file mode 100644 index 86a49bce..00000000 --- a/components/golang-evm-price-oracle/config.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "__tool": "github.com/reecepbcups/docci", - "paths": [ - "components/golang-evm-price-oracle/README.md" - ], - "env_vars": {}, - "pre_cmds": [], - "log_level": "ERROR", - "cleanup_cmds": [ - "killall anvil || true", - "docker compose rm --stop --force --volumes || true", - "docker rm -f wavs wavs-aggregator wavs-deploy-service-manager wavs-deploy-eigenlayer || true" - ] - } diff --git a/components/golang-evm-price-oracle/go.mod b/components/golang-evm-price-oracle/go.mod deleted file mode 100644 index 29fddb4a..00000000 --- a/components/golang-evm-price-oracle/go.mod +++ /dev/null @@ -1,30 +0,0 @@ -module github.com/Lay3rLabs/wavs-foundry-template/components/golang-evm-price-oracle - -go 1.24.3 - -replace ( - // fix: tinygo >0.35 support - github.com/defiweb/go-eth => github.com/Reecepbcups/go-eth v0.7.1 - // fix: assignment mismatch: 3 variables but rlp.Decode returns 2 values - github.com/defiweb/go-rlp => github.com/defiweb/go-rlp v0.3.0 - // namespace import fix - github.com/dev-wasm/dev-wasm-go/lib => github.com/Reecepbcups/dev-wasm-go/lib v1.0.0 -) - -require ( - github.com/Lay3rLabs/wavs-wasi/go v0.4.0 - github.com/dev-wasm/dev-wasm-go/lib v0.0.0-20240907000152-b653306ed695 - go.bytecodealliance.org/cm v0.2.2 -) - -require ( - github.com/btcsuite/btcd/btcec/v2 v2.3.2 // indirect - github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0 // indirect - github.com/defiweb/go-anymapper v0.3.0 // indirect - github.com/defiweb/go-eth v0.7.0 // indirect - github.com/defiweb/go-rlp v0.3.0 // indirect - github.com/defiweb/go-sigparser v0.6.0 // indirect - github.com/stretchr/testify v1.10.0 // indirect - golang.org/x/crypto v0.36.0 // indirect - golang.org/x/sys v0.31.0 // indirect -) diff --git a/components/golang-evm-price-oracle/go.sum b/components/golang-evm-price-oracle/go.sum deleted file mode 100644 index 3d259d89..00000000 --- a/components/golang-evm-price-oracle/go.sum +++ /dev/null @@ -1,35 +0,0 @@ -github.com/Lay3rLabs/wavs-wasi/go v0.4.0 h1:2GFt3coPnonCY7fa5nY5kErZIBKZ725zlXnoH1NVIik= -github.com/Lay3rLabs/wavs-wasi/go v0.4.0/go.mod h1:+YuQa+Asl1vAeeXewN3ZAnLaP19aQMGrQU0bWWmvSpw= -github.com/Reecepbcups/dev-wasm-go/lib v1.0.0 h1:wehTQiW+QP71bX24iVZB5xxZdpbg9qcVb81zYvrvf4Y= -github.com/Reecepbcups/dev-wasm-go/lib v1.0.0/go.mod h1:IAZDQaELmHlCjhQkmfnFesp2pAymqgvlPak5JEdNu4c= -github.com/Reecepbcups/go-eth v0.7.1 h1:dH1E85arpYZAG33m1FPvFsEClgwPF3qXzWQW3oCmV1I= -github.com/Reecepbcups/go-eth v0.7.1/go.mod h1:3WyudW93MqSWCPn69jWe4fbmKNIx1Q9hEp2kxY24Alo= -github.com/btcsuite/btcd v0.24.0 h1:gL3uHE/IaFj6fcZSu03SvqPMSx7s/dPzfpG/atRwWdo= -github.com/btcsuite/btcd/btcec/v2 v2.3.2 h1:5n0X6hX0Zk+6omWcihdYvdAlGf2DfasC0GMf7DClJ3U= -github.com/btcsuite/btcd/btcec/v2 v2.3.2/go.mod h1:zYzJ8etWJQIv1Ogk7OzpWjowwOdXY1W/17j2MW85J04= -github.com/btcsuite/btcd/chaincfg/chainhash v1.1.0 h1:59Kx4K6lzOW5w6nFlA0v5+lk/6sjybR934QNHSJZPTQ= -github.com/btcsuite/btcd/chaincfg/chainhash v1.1.0/go.mod h1:7SFka0XMvUgj3hfZtydOrQY2mwhPclbT2snogU7SQQc= -github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= -github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/decred/dcrd/crypto/blake256 v1.1.0 h1:zPMNGQCm0g4QTY27fOCorQW7EryeQ/U0x++OzVrdms8= -github.com/decred/dcrd/crypto/blake256 v1.1.0/go.mod h1:2OfgNZ5wDpcsFmHmCK5gZTPcCXqlm2ArzUIkw9czNJo= -github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0 h1:NMZiJj8QnKe1LgsbDayM4UoHwbvwDRwnI3hwNaAHRnc= -github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0/go.mod h1:ZXNYxsqcloTdSy/rNShjYzMhyjf0LaoftYK0p+A3h40= -github.com/defiweb/go-anymapper v0.3.0 h1:sWbTvhpdBaCHQGn+kuKYDnb+mPmeDNzzEXnC+CPhe6k= -github.com/defiweb/go-anymapper v0.3.0/go.mod h1:EeQDyOsFd63Pt2uu9Yb8NFrChuZ9JBChjGKbDhRPHAQ= -github.com/defiweb/go-rlp v0.3.0 h1:0q+EuR5SdSDu7XLx5Cu68EwVSaNA+CkRCFcE+17HNxA= -github.com/defiweb/go-rlp v0.3.0/go.mod h1:nLGzk10jAgynPvN2hL+tLnnyZ5Fcshv0wmpWDRtV0PA= -github.com/defiweb/go-sigparser v0.6.0 h1:HSNAZSUl8xyV+nKfWNKYVAPWLwTuASas6ohtarBbOT4= -github.com/defiweb/go-sigparser v0.6.0/go.mod h1:R1wkfsnASR2M38ZupKHoqqIfv+8HgRbZaFQI9Inr4k8= -github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= -github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= -github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= -go.bytecodealliance.org/cm v0.2.2 h1:M9iHS6qs884mbQbIjtLX1OifgyPG9DuMs2iwz8G4WQA= -go.bytecodealliance.org/cm v0.2.2/go.mod h1:JD5vtVNZv7sBoQQkvBvAAVKJPhR/bqBH7yYXTItMfZI= -golang.org/x/crypto v0.36.0 h1:AnAEvhDddvBdpY+uR+MyHmuZzzNqXSe/GvuDeob5L34= -golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc= -golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik= -golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= -gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= -gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/components/golang-evm-price-oracle/src/cmc.go b/components/golang-evm-price-oracle/src/cmc.go deleted file mode 100644 index e222b3c4..00000000 --- a/components/golang-evm-price-oracle/src/cmc.go +++ /dev/null @@ -1,30 +0,0 @@ -package main - -// Root represents the top-level JSON response from CoinMarketCap API -type Root struct { - Status Status `json:"status"` - Data Data `json:"data"` -} - -// Status contains API response status information -type Status struct { - Timestamp string `json:"timestamp"` -} - -// Data contains cryptocurrency data -type Data struct { - Symbol string `json:"symbol"` - Statistics Statistics `json:"statistics"` -} - -// Statistics contains price statistics -type Statistics struct { - Price float64 `json:"price"` -} - -// PriceFeedData is the output structure with essential price information -type PriceFeedData struct { - Symbol string `json:"symbol"` - Price float64 `json:"price"` - Timestamp string `json:"timestamp"` -} diff --git a/components/golang-evm-price-oracle/src/main.go b/components/golang-evm-price-oracle/src/main.go deleted file mode 100644 index 6a041b9d..00000000 --- a/components/golang-evm-price-oracle/src/main.go +++ /dev/null @@ -1,157 +0,0 @@ -package main - -import ( - "bytes" - "encoding/json" - "fmt" - "io" - "math" - "net/http" - "strconv" - "strings" - "time" - - "github.com/Lay3rLabs/wavs-wasi/go/types" - wavs "github.com/Lay3rLabs/wavs-wasi/go/wavs/worker/layer-trigger-world" - trigger "github.com/Lay3rLabs/wavs-wasi/go/wavs/worker/layer-types" - - wasiclient "github.com/dev-wasm/dev-wasm-go/lib/http/client" - "go.bytecodealliance.org/cm" -) - -func init() { - wavs.Exports.Run = func(triggerAction wavs.TriggerAction) types.TriggerResult { - triggerID, requestInput, dest := decodeTriggerEvent(triggerAction.Data) - - result, err := compute(requestInput.Slice(), dest) - if err != nil { - return cm.Err[types.TriggerResult](err.Error()) - } - fmt.Printf("Computation Result: %v\n", string(result)) - - return routeResult(triggerID, result, dest) - } -} - -// compute is the main function that computes the price of the crypto currency -func compute(input []uint8, dest types.Destination) ([]byte, error) { - if dest == types.CliOutput { - input = bytes.TrimRight(input, "\x00") - } - - id, err := strconv.Atoi(string(input)) - if err != nil { - return nil, fmt.Errorf("failed to parse input: %w", err) - } - - priceFeed, err := fetchCryptoPrice(id) - if err != nil { - return nil, fmt.Errorf("failed to fetch price: %w", err) - } - - priceJson, err := json.Marshal(priceFeed) - if err != nil { - return nil, fmt.Errorf("failed to marshal JSON: %w", err) - } - - return priceJson, nil -} - -// routeResult sends the computation result to the appropriate destination -func routeResult(triggerID uint64, result []byte, dest types.Destination) types.TriggerResult { - switch dest { - case types.CliOutput: - return types.Ok(result, cm.None[uint64]()) - case types.Ethereum: - // WAVS & the contract expects abi encoded data - encoded := types.EncodeTriggerOutput(triggerID, result) - fmt.Printf("Encoded output (raw): %x\n", encoded) - return types.Ok(encoded, cm.None[uint64]()) - default: - return cm.Err[types.TriggerResult](fmt.Sprintf("unsupported destination: %s", dest)) - } -} - -// decodeTriggerEvent is the function that decodes the trigger event from the chain event to Go. -func decodeTriggerEvent(triggerAction trigger.TriggerData) (trigger_id uint64, req cm.List[uint8], dest types.Destination) { - // Handle CLI input case - if triggerAction.Raw() != nil { - raw := *triggerAction.Raw() - fmt.Printf("Raw input: %s\n", string(raw.Slice())) - return 0, raw, types.CliOutput - } - - // Handle Ethereum event case - ethEvent := triggerAction.EvmContractEvent() - if ethEvent == nil { - panic("triggerAction.EthContractEvent() is nil") - } - - // if you modify the contract trigger from the default event, you will need to create a custom `DecodeTriggerInfo` function - // to match the solidity contract data types. - triggerInfo := types.DecodeTriggerInfo(ethEvent.Log.Data.Slice()) - - fmt.Printf("Trigger ID: %v\n", triggerInfo.TriggerID) - fmt.Printf("Creator: %s\n", triggerInfo.Creator.String()) - fmt.Printf("Input Data: %v\n", string(triggerInfo.Data)) - - return triggerInfo.TriggerID, cm.NewList(&triggerInfo.Data[0], len(triggerInfo.Data)), types.Ethereum -} - -// fetchCryptoPrice fetches the price of the crypto currency from the CoinMarketCap API by their ID. -func fetchCryptoPrice(id int) (*PriceFeedData, error) { - // Create a new HTTP client with WASI transport - client := &http.Client{ - Transport: wasiclient.WasiRoundTripper{}, - } - - // Prepare the URL - url := fmt.Sprintf("https://api.coinmarketcap.com/data-api/v3/cryptocurrency/detail?id=%d&range=1h", id) - - // Create a new HTTP request - req, err := http.NewRequest("GET", url, nil) - if err != nil { - return nil, err - } - - // Set the headers - currentTime := time.Now().Unix() - req.Header.Set("Accept", "application/json") - req.Header.Set("Content-Type", "application/json") - req.Header.Set("User-Agent", "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/132.0.0.0 Safari/537.36") - req.Header.Set("Cookie", fmt.Sprintf("myrandom_cookie=%d", currentTime)) - - // Make the request - resp, err := client.Do(req) - if err != nil { - return nil, err - } - defer resp.Body.Close() - - // Read and parse the response - body, err := io.ReadAll(resp.Body) - if err != nil { - return nil, err - } - - // Parse the JSON - var root Root - if err := json.Unmarshal(body, &root); err != nil { - return nil, err - } - - // round to 2 decimal places - price := math.Round(root.Data.Statistics.Price*100) / 100 - - // timestamp is 2025-04-30T19:59:44.161Z, becomes 2025-04-30T19:59:44 - timestamp := strings.Split(root.Status.Timestamp, ".")[0] - - return &PriceFeedData{ - Symbol: root.Data.Symbol, - Price: price, - Timestamp: timestamp, - }, nil -} - -// empty main function to satisfy wasm-ld (wit) -func main() {} diff --git a/components/golang-evm-price-oracle/wkg.lock b/components/golang-evm-price-oracle/wkg.lock deleted file mode 100644 index 2100058e..00000000 --- a/components/golang-evm-price-oracle/wkg.lock +++ /dev/null @@ -1,12 +0,0 @@ -# This file is automatically generated. -# It is not intended for manual editing. -version = 1 - -[[packages]] -name = "wasi:cli" -registry = "wa.dev" - -[[packages.versions]] -requirement = "=0.2.0" -version = "0.2.0" -digest = "sha256:e7e85458e11caf76554b724ebf4f113259decf0f3b1ee2e2930de096f72114a7" diff --git a/components/js-evm-price-oracle/Makefile b/components/js-evm-price-oracle/Makefile deleted file mode 100644 index c404e077..00000000 --- a/components/js-evm-price-oracle/Makefile +++ /dev/null @@ -1,36 +0,0 @@ -WAVS_PACKAGE=wavs:worker@0.4.0 -WAVS_WIT_WORLD=wavs:worker/layer-trigger-world -OUTPUT_DIR?=../../compiled - -check-package: - @if [ ! -f $(WAVS_PACKAGE).wasm ]; then \ - echo "Downloading WAVS package: $(WAVS_PACKAGE)"; \ - wkg get ${WAVS_PACKAGE} --overwrite --format wasm --output $(WAVS_PACKAGE).wasm; \ - fi - -# if @bytecodealliance/jco is not installed, then run npm i here -check-jco: - @npx jco --version || npm i - -# converts the entire .wasm package into a single .wit file, easily consumable by the jco command -convert-wasm-to-wit: - @wasm-tools component wit $(WAVS_PACKAGE).wasm -o $(WAVS_PACKAGE).wit - -## build-bindings: building the WAVS bindings -build-bindings: check-jco check-package - @npx jco types $(WAVS_PACKAGE).wasm --out-dir out/ - -## wasi-build: building the WAVS wasi component -wasi-build: check-jco build-bindings convert-wasm-to-wit - @echo "Building component: js_evm_price_oracle" - @npx tsc --outDir out/ --target es6 --strict --module preserve index.ts - @npx esbuild ./index.js --bundle --outfile=out/out.js --platform=node --format=esm - @npx jco componentize out/out.js --wit $(WAVS_PACKAGE).wit --world-name $(WAVS_WIT_WORLD) --out ../../compiled/js_evm_price_oracle.wasm - -.PHONY: help -help: Makefile - @echo - @echo " Choose a command run" - @echo - @sed -n 's/^##//p' $< | column -t -s ':' | sed -e 's/^/ /' - @echo diff --git a/components/js-evm-price-oracle/README.md b/components/js-evm-price-oracle/README.md deleted file mode 100644 index d0e0dfa4..00000000 --- a/components/js-evm-price-oracle/README.md +++ /dev/null @@ -1,49 +0,0 @@ -# Typescript Ethereum Price Oracle - -A WAVS component that fetches the price of a crypto currency from CoinMarketCap and returns it to the Ethereum contract, in Typescript. - -## System Setup - -Follow the main [README.md](../../README.md) to install all the necessary dependencies. - -## Core Packages - -```bash docci-if-not-installed="cast" -curl -L https://foundry.paradigm.xyz | bash && $HOME/.foundry/bin/foundryup -``` - -```bash docci-if-not-installed="wasm-tools" -cargo binstall wasm-tools --no-confirm -``` - -```bash -make setup -``` - -```bash -npm --prefix ./components/js-evm-price-oracle/ install -``` - -## Build Component - -Build all wasi components from the root of the repo. You can also run this command within each component directory. - -```bash docci-output-contains="Successfully written" -# Builds only this component, not all. -warg reset -WASI_BUILD_DIR=js-evm-price-oracle make wasi-build -``` - -## Execute Component - -Run the component with the `wasi-exec` command in the root of the repo - -```bash docci-output-contains="LTC" -COMPONENT_FILENAME=js_evm_price_oracle.wasm INPUT_DATA=2 make wasi-exec -``` - ---- - -## Run main README - -Run through the main readme, but use `export COMPONENT_FILENAME=js_evm_price_oracle.wasm` instead of the default. diff --git a/components/js-evm-price-oracle/config.json b/components/js-evm-price-oracle/config.json deleted file mode 100644 index d4d599c0..00000000 --- a/components/js-evm-price-oracle/config.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "__tool": "github.com/reecepbcups/docci", - "paths": [ - "components/js-evm-price-oracle/README.md" - ], - "env_vars": {}, - "pre_cmds": [], - "log_level": "ERROR", - "cleanup_cmds": [ - "killall anvil || true", - "docker compose rm --stop --force --volumes || true", - "docker rm -f wavs wavs-aggregator wavs-deploy-service-manager wavs-deploy-eigenlayer || true" - ] - } diff --git a/components/js-evm-price-oracle/index.ts b/components/js-evm-price-oracle/index.ts deleted file mode 100644 index 605b01e9..00000000 --- a/components/js-evm-price-oracle/index.ts +++ /dev/null @@ -1,177 +0,0 @@ -import { TriggerAction, WasmResponse, } from "./out/wavs:worker@0.4.0"; -import { TriggerSource, TriggerSourceManual } from "./out/interfaces/wavs-worker-layer-types"; -import { decodeTriggerEvent, encodeOutput, Destination } from "./trigger"; -import { AbiCoder } from "ethers"; - -async function run(triggerAction: TriggerAction): Promise { - let event = decodeTriggerEvent(triggerAction.data); - let triggerId = event[0].triggerId; - - let num = processInput(event[0].data, triggerAction.config.triggerSource); - let result = await compute(num); - - switch (event[1]) { - case Destination.Cli: - return { - payload: result, - ordering: undefined, - } as WasmResponse; // return raw bytes back - case Destination.Ethereum: - return { - payload: encodeOutput(triggerId, result), - ordering: undefined, - } as WasmResponse; // return encoded bytes back - case Destination.Cosmos: - break; - } - - throw new Error( - "Unknown destination: " + event[1] + " for trigger ID: " + triggerId - ); -} - -async function compute(num: number): Promise { - const priceFeed = await fetchCryptoPrice(num); - const priceJson = priceFeedToJson(priceFeed); - - return new TextEncoder().encode(priceJson); -} - -function processInput(input: Uint8Array, triggerSource: { tag: string }): number { - // Prepare the input data based on trigger type - const processedInput = prepareInputData(input, triggerSource.tag); - - // Single ABI decoding step - const abiCoder = new AbiCoder(); - const res = abiCoder.decode(["string"], processedInput); - const decodedString = res[0] as string; - - console.log("Decoded input:", decodedString, "triggerSource.tag:", triggerSource.tag); - - // Validate the decoded string is a valid number - const num = decodedString.trim(); - if (isNaN(parseInt(num))) { - throw new Error(`Input is not a valid number: ${num}`); - } - - return parseInt(num); // Return the validated number -} - - -function prepareInputData(input: Uint8Array, triggerTag: string): Uint8Array { - if (triggerTag === "manual") { - return input; // Use input directly for manual triggers - } - - // For evm-contract-event: handle potential hex string conversion - try { - const inputStr = new TextDecoder().decode(input); - if (!inputStr.startsWith("0x")) { - throw new Error("Input is not a valid hex string: " + inputStr); - } - - // Convert hex string to bytes - const hexString = inputStr.slice(2); // Remove "0x" prefix - return new Uint8Array( - hexString.match(/.{1,2}/g)!.map(byte => parseInt(byte, 16)) - ); - } catch { - return input; // If UTF-8 decode fails, assume it's already binary - } -} - -// ======================== CMC ======================== - -// Define the types for the CMC API response -interface Root { - status: Status; - data: Data; -} - -interface Status { - timestamp: string; -} - -interface Data { - symbol: string; - statistics: Statistics; -} - -interface Statistics { - price: number; -} - -// Output structure with essential price information -interface PriceFeedData { - symbol: string; - price: number; - timestamp: string; -} - -/** - * Fetches the price of a cryptocurrency from the CoinMarketCap API by their ID. - * @param id The CoinMarketCap ID of the cryptocurrency - * @returns A Promise that resolves to PriceFeedData - */ -async function fetchCryptoPrice(id: number): Promise { - // Prepare the URL - const url = `https://api.coinmarketcap.com/data-api/v3/cryptocurrency/detail?id=${id}&range=1h`; - - // Set the headers - const currentTime = Math.floor(Date.now() / 1000); - const headers = { - Accept: "application/json", - "Content-Type": "application/json", - "User-Agent": - "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/132.0.0.0 Safari/537.36", - Cookie: `myrandom_cookie=${currentTime}`, - }; - - try { - // Make the request - const response = await fetch(url, { - method: "GET", - headers, - }); - - if (!response.ok) { - throw new Error(`HTTP error! Status: ${response.status}`); - } - - // Parse the JSON response - const root: Root = await response.json(); - - // round to 2 decimal places on root.data.statistics.price - let price = Math.round(root.data.statistics.price * 100) / 100; - - // timestamp is 2025-04-30T19:59:44.161Z, becomes 2025-04-30T19:59:44 - let timestamp = root.status.timestamp.split(".")[0]; - - return { - symbol: root.data.symbol, - price: price, - timestamp: timestamp, - }; - } catch (error) { - throw new Error( - `Failed to fetch crypto price: ${ - error instanceof Error ? error.message : String(error) - }` - ); - } -} - -// Example of how to convert the PriceFeedData to JSON -function priceFeedToJson(priceFeed: PriceFeedData): string { - try { - return JSON.stringify(priceFeed); - } catch (error) { - throw new Error( - `Failed to marshal JSON: ${ - error instanceof Error ? error.message : String(error) - }` - ); - } -} - -export { run }; diff --git a/components/js-evm-price-oracle/package-lock.json b/components/js-evm-price-oracle/package-lock.json deleted file mode 100644 index c6064cad..00000000 --- a/components/js-evm-price-oracle/package-lock.json +++ /dev/null @@ -1,2067 +0,0 @@ -{ - "name": "js-evm-price-oracle", - "version": "1.0.0", - "lockfileVersion": 3, - "requires": true, - "packages": { - "": { - "name": "js-evm-price-oracle", - "version": "1.0.0", - "license": "ISC", - "devDependencies": { - "@bytecodealliance/componentize-js": "^0.18.0", - "@bytecodealliance/jco": "^1.10.2", - "esbuild": "^0.25.0", - "ethers": "^6.13.5" - } - }, - "node_modules/@adraffy/ens-normalize": { - "version": "1.10.1", - "resolved": "https://registry.npmjs.org/@adraffy/ens-normalize/-/ens-normalize-1.10.1.tgz", - "integrity": "sha512-96Z2IP3mYmF1Xg2cDm8f1gWGf/HUVedQ3FMifV4kG/PQ4yEP51xDtRAEfhVNt5f/uzpNkZHwWQuUcu6D6K+Ekw==", - "dev": true - }, - "node_modules/@bytecodealliance/componentize-js": { - "version": "0.18.0", - "resolved": "https://registry.npmjs.org/@bytecodealliance/componentize-js/-/componentize-js-0.18.0.tgz", - "integrity": "sha512-yQTbGUjkmWADDCgP4y5hH7im9u2oFRvIjm3QM7z0To9OmDclD9T13yjTnKffWOPkyjJsFS1D0EY3Se5fgYnqtw==", - "dev": true, - "workspaces": [ - "." - ], - "dependencies": { - "@bytecodealliance/jco": "^1.9.1", - "@bytecodealliance/weval": "^0.3.3", - "@bytecodealliance/wizer": "^7.0.5", - "es-module-lexer": "^1.6.0" - }, - "bin": { - "componentize-js": "src/cli.js" - } - }, - "node_modules/@bytecodealliance/jco": { - "version": "1.10.2", - "resolved": "https://registry.npmjs.org/@bytecodealliance/jco/-/jco-1.10.2.tgz", - "integrity": "sha512-ShBb9Jul4CYo4asmfDdjmYE2+4TgJLSHhsRMyVDvROA5j6s+cAKMmlsUjOrS6QM5TL8GJb48G9jsxif6na6qYg==", - "dev": true, - "workspaces": [ - "packages/preview2-shim" - ], - "dependencies": { - "@bytecodealliance/componentize-js": "^0.17.0", - "@bytecodealliance/preview2-shim": "^0.17.2", - "binaryen": "^122.0.0", - "chalk-template": "^1", - "commander": "^12", - "mkdirp": "^3", - "ora": "^8", - "terser": "^5" - }, - "bin": { - "jco": "src/jco.js" - } - }, - "node_modules/@bytecodealliance/jco/node_modules/@bytecodealliance/componentize-js": { - "version": "0.17.0", - "resolved": "https://registry.npmjs.org/@bytecodealliance/componentize-js/-/componentize-js-0.17.0.tgz", - "integrity": "sha512-FDgO5UPipfjyq5OghSB4JW313LkQJK3Sl647WH1jvIuYAyCq1j+bMt+Q66c3UF6IVs6PneNTGfGSjYgzID/k0w==", - "dev": true, - "workspaces": [ - "." - ], - "dependencies": { - "@bytecodealliance/jco": "^1.9.1", - "@bytecodealliance/weval": "^0.3.3", - "@bytecodealliance/wizer": "^7.0.5", - "es-module-lexer": "^1.6.0" - } - }, - "node_modules/@bytecodealliance/preview2-shim": { - "version": "0.17.2", - "resolved": "https://registry.npmjs.org/@bytecodealliance/preview2-shim/-/preview2-shim-0.17.2.tgz", - "integrity": "sha512-mNm/lblgES8UkVle8rGImXOz4TtL3eU3inHay/7TVchkKrb/lgcVvTK0+VAw8p5zQ0rgQsXm1j5dOlAAd+MeoA==", - "dev": true - }, - "node_modules/@bytecodealliance/weval": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/@bytecodealliance/weval/-/weval-0.3.3.tgz", - "integrity": "sha512-hrQI47O1l3ilFscixu0uuSJTj5tbQW0QmCATQWWNW0E8wJxbKH4yo8y57O5gqpRSKk/T+da1sH/GJNrnGHTFNA==", - "dev": true, - "dependencies": { - "@napi-rs/lzma": "^1.1.2", - "decompress": "^4.2.1", - "decompress-tar": "^4.1.1", - "decompress-unzip": "^4.0.1" - }, - "engines": { - "node": ">=16" - } - }, - "node_modules/@bytecodealliance/wizer": { - "version": "7.0.5", - "resolved": "https://registry.npmjs.org/@bytecodealliance/wizer/-/wizer-7.0.5.tgz", - "integrity": "sha512-xIbLzKxmUNaPwDWorcGtdxh1mcgDiXI8fe9KiDaSICKfCl9VtUKVyXIc3ix+VpwFczBbdhek+TlMiiCf+9lpOQ==", - "dev": true, - "bin": { - "wizer": "wizer.js" - }, - "engines": { - "node": ">=16" - }, - "optionalDependencies": { - "@bytecodealliance/wizer-darwin-arm64": "7.0.5", - "@bytecodealliance/wizer-darwin-x64": "7.0.5", - "@bytecodealliance/wizer-linux-arm64": "7.0.5", - "@bytecodealliance/wizer-linux-s390x": "7.0.5", - "@bytecodealliance/wizer-linux-x64": "7.0.5", - "@bytecodealliance/wizer-win32-x64": "7.0.5" - } - }, - "node_modules/@bytecodealliance/wizer-darwin-arm64": { - "version": "7.0.5", - "resolved": "https://registry.npmjs.org/@bytecodealliance/wizer-darwin-arm64/-/wizer-darwin-arm64-7.0.5.tgz", - "integrity": "sha512-Tp0SgVQR568SVPvSfyWDT00yL4ry/w9FS2qy8ZwaP0EauYyjFSZojj6mESX6x9fpYkEnQdprgfdvhw5h1hTwCQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "darwin" - ], - "bin": { - "wizer-darwin-arm64": "wizer" - } - }, - "node_modules/@bytecodealliance/wizer-darwin-x64": { - "version": "7.0.5", - "resolved": "https://registry.npmjs.org/@bytecodealliance/wizer-darwin-x64/-/wizer-darwin-x64-7.0.5.tgz", - "integrity": "sha512-HYmG5Q9SpQJnqR7kimb5J3VAh6E62b30GLG/E+6doS/UwNhSpSmYjaggVfuJvgFDbUxsnD1l36qZny0xMwxikA==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "darwin" - ], - "bin": { - "wizer-darwin-x64": "wizer" - } - }, - "node_modules/@bytecodealliance/wizer-linux-arm64": { - "version": "7.0.5", - "resolved": "https://registry.npmjs.org/@bytecodealliance/wizer-linux-arm64/-/wizer-linux-arm64-7.0.5.tgz", - "integrity": "sha512-01qqaiIWrYXPt2bjrfiluSSOmUL/PMjPtJlYa/XqZgK75g3RVn3sRkSflwoCXtXMRbHdb03qNrJ9w81+F17kvA==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "bin": { - "wizer-linux-arm64": "wizer" - } - }, - "node_modules/@bytecodealliance/wizer-linux-s390x": { - "version": "7.0.5", - "resolved": "https://registry.npmjs.org/@bytecodealliance/wizer-linux-s390x/-/wizer-linux-s390x-7.0.5.tgz", - "integrity": "sha512-smGfD4eJou81g6yDlV7MCRoKgKlqd4SQL00pHxQGrNfUPnfYKhZ4z80N9J9T2B++uo2FM14BFilsRrV5UevKlA==", - "cpu": [ - "s390x" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "bin": { - "wizer-linux-s390x": "wizer" - } - }, - "node_modules/@bytecodealliance/wizer-linux-x64": { - "version": "7.0.5", - "resolved": "https://registry.npmjs.org/@bytecodealliance/wizer-linux-x64/-/wizer-linux-x64-7.0.5.tgz", - "integrity": "sha512-lxMb25jLd6n+hhjPhlqRBnBdGRumKkcEavqJ3p4OAtjr6pEPdbSfSVmYDt9LnvtqmqQSnUCtFRRr5J2BmQ3SkQ==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "bin": { - "wizer-linux-x64": "wizer" - } - }, - "node_modules/@bytecodealliance/wizer-win32-x64": { - "version": "7.0.5", - "resolved": "https://registry.npmjs.org/@bytecodealliance/wizer-win32-x64/-/wizer-win32-x64-7.0.5.tgz", - "integrity": "sha512-eUY9a82HR20qIfyEffWdJZj7k4GH2wGaZpr70dinDy8Q648LeQayL0Z6FW5nApoezjy+CIBj0Mv+rHUASV9Jzw==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "win32" - ], - "bin": { - "wizer-win32-x64": "wizer" - } - }, - "node_modules/@emnapi/core": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/@emnapi/core/-/core-1.3.1.tgz", - "integrity": "sha512-pVGjBIt1Y6gg3EJN8jTcfpP/+uuRksIo055oE/OBkDNcjZqVbfkWCksG1Jp4yZnj3iKWyWX8fdG/j6UDYPbFog==", - "dev": true, - "optional": true, - "dependencies": { - "@emnapi/wasi-threads": "1.0.1", - "tslib": "^2.4.0" - } - }, - "node_modules/@emnapi/runtime": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.3.1.tgz", - "integrity": "sha512-kEBmG8KyqtxJZv+ygbEim+KCGtIq1fC22Ms3S4ziXmYKm8uyoLX0MHONVKwp+9opg390VaKRNt4a7A9NwmpNhw==", - "dev": true, - "optional": true, - "dependencies": { - "tslib": "^2.4.0" - } - }, - "node_modules/@emnapi/wasi-threads": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@emnapi/wasi-threads/-/wasi-threads-1.0.1.tgz", - "integrity": "sha512-iIBu7mwkq4UQGeMEM8bLwNK962nXdhodeScX4slfQnRhEMMzvYivHhutCIk8uojvmASXXPC2WNEjwxFWk72Oqw==", - "dev": true, - "optional": true, - "dependencies": { - "tslib": "^2.4.0" - } - }, - "node_modules/@esbuild/aix-ppc64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.1.tgz", - "integrity": "sha512-kfYGy8IdzTGy+z0vFGvExZtxkFlA4zAxgKEahG9KE1ScBjpQnFsNOX8KTU5ojNru5ed5CVoJYXFtoxaq5nFbjQ==", - "cpu": [ - "ppc64" - ], - "dev": true, - "optional": true, - "os": [ - "aix" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/android-arm": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.1.tgz", - "integrity": "sha512-dp+MshLYux6j/JjdqVLnMglQlFu+MuVeNrmT5nk6q07wNhCdSnB7QZj+7G8VMUGh1q+vj2Bq8kRsuyA00I/k+Q==", - "cpu": [ - "arm" - ], - "dev": true, - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/android-arm64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.1.tgz", - "integrity": "sha512-50tM0zCJW5kGqgG7fQ7IHvQOcAn9TKiVRuQ/lN0xR+T2lzEFvAi1ZcS8DiksFcEpf1t/GYOeOfCAgDHFpkiSmA==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/android-x64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.1.tgz", - "integrity": "sha512-GCj6WfUtNldqUzYkN/ITtlhwQqGWu9S45vUXs7EIYf+7rCiiqH9bCloatO9VhxsL0Pji+PF4Lz2XXCES+Q8hDw==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/darwin-arm64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.1.tgz", - "integrity": "sha512-5hEZKPf+nQjYoSr/elb62U19/l1mZDdqidGfmFutVUjjUZrOazAtwK+Kr+3y0C/oeJfLlxo9fXb1w7L+P7E4FQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/darwin-x64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.1.tgz", - "integrity": "sha512-hxVnwL2Dqs3fM1IWq8Iezh0cX7ZGdVhbTfnOy5uURtao5OIVCEyj9xIzemDi7sRvKsuSdtCAhMKarxqtlyVyfA==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/freebsd-arm64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.1.tgz", - "integrity": "sha512-1MrCZs0fZa2g8E+FUo2ipw6jw5qqQiH+tERoS5fAfKnRx6NXH31tXBKI3VpmLijLH6yriMZsxJtaXUyFt/8Y4A==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/freebsd-x64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.1.tgz", - "integrity": "sha512-0IZWLiTyz7nm0xuIs0q1Y3QWJC52R8aSXxe40VUxm6BB1RNmkODtW6LHvWRrGiICulcX7ZvyH6h5fqdLu4gkww==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-arm": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.1.tgz", - "integrity": "sha512-NdKOhS4u7JhDKw9G3cY6sWqFcnLITn6SqivVArbzIaf3cemShqfLGHYMx8Xlm/lBit3/5d7kXvriTUGa5YViuQ==", - "cpu": [ - "arm" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-arm64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.1.tgz", - "integrity": "sha512-jaN3dHi0/DDPelk0nLcXRm1q7DNJpjXy7yWaWvbfkPvI+7XNSc/lDOnCLN7gzsyzgu6qSAmgSvP9oXAhP973uQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-ia32": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.1.tgz", - "integrity": "sha512-OJykPaF4v8JidKNGz8c/q1lBO44sQNUQtq1KktJXdBLn1hPod5rE/Hko5ugKKZd+D2+o1a9MFGUEIUwO2YfgkQ==", - "cpu": [ - "ia32" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-loong64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.1.tgz", - "integrity": "sha512-nGfornQj4dzcq5Vp835oM/o21UMlXzn79KobKlcs3Wz9smwiifknLy4xDCLUU0BWp7b/houtdrgUz7nOGnfIYg==", - "cpu": [ - "loong64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-mips64el": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.1.tgz", - "integrity": "sha512-1osBbPEFYwIE5IVB/0g2X6i1qInZa1aIoj1TdL4AaAb55xIIgbg8Doq6a5BzYWgr+tEcDzYH67XVnTmUzL+nXg==", - "cpu": [ - "mips64el" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-ppc64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.1.tgz", - "integrity": "sha512-/6VBJOwUf3TdTvJZ82qF3tbLuWsscd7/1w+D9LH0W/SqUgM5/JJD0lrJ1fVIfZsqB6RFmLCe0Xz3fmZc3WtyVg==", - "cpu": [ - "ppc64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-riscv64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.1.tgz", - "integrity": "sha512-nSut/Mx5gnilhcq2yIMLMe3Wl4FK5wx/o0QuuCLMtmJn+WeWYoEGDN1ipcN72g1WHsnIbxGXd4i/MF0gTcuAjQ==", - "cpu": [ - "riscv64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-s390x": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.1.tgz", - "integrity": "sha512-cEECeLlJNfT8kZHqLarDBQso9a27o2Zd2AQ8USAEoGtejOrCYHNtKP8XQhMDJMtthdF4GBmjR2au3x1udADQQQ==", - "cpu": [ - "s390x" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-x64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.1.tgz", - "integrity": "sha512-xbfUhu/gnvSEg+EGovRc+kjBAkrvtk38RlerAzQxvMzlB4fXpCFCeUAYzJvrnhFtdeyVCDANSjJvOvGYoeKzFA==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/netbsd-arm64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.1.tgz", - "integrity": "sha512-O96poM2XGhLtpTh+s4+nP7YCCAfb4tJNRVZHfIE7dgmax+yMP2WgMd2OecBuaATHKTHsLWHQeuaxMRnCsH8+5g==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "netbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/netbsd-x64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.1.tgz", - "integrity": "sha512-X53z6uXip6KFXBQ+Krbx25XHV/NCbzryM6ehOAeAil7X7oa4XIq+394PWGnwaSQ2WRA0KI6PUO6hTO5zeF5ijA==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "netbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/openbsd-arm64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.1.tgz", - "integrity": "sha512-Na9T3szbXezdzM/Kfs3GcRQNjHzM6GzFBeU1/6IV/npKP5ORtp9zbQjvkDJ47s6BCgaAZnnnu/cY1x342+MvZg==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "openbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/openbsd-x64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.1.tgz", - "integrity": "sha512-T3H78X2h1tszfRSf+txbt5aOp/e7TAz3ptVKu9Oyir3IAOFPGV6O9c2naym5TOriy1l0nNf6a4X5UXRZSGX/dw==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "openbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/sunos-x64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.1.tgz", - "integrity": "sha512-2H3RUvcmULO7dIE5EWJH8eubZAI4xw54H1ilJnRNZdeo8dTADEZ21w6J22XBkXqGJbe0+wnNJtw3UXRoLJnFEg==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "sunos" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/win32-arm64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.1.tgz", - "integrity": "sha512-GE7XvrdOzrb+yVKB9KsRMq+7a2U/K5Cf/8grVFRAGJmfADr/e/ODQ134RK2/eeHqYV5eQRFxb1hY7Nr15fv1NQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/win32-ia32": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.1.tgz", - "integrity": "sha512-uOxSJCIcavSiT6UnBhBzE8wy3n0hOkJsBOzy7HDAuTDE++1DJMRRVCPGisULScHL+a/ZwdXPpXD3IyFKjA7K8A==", - "cpu": [ - "ia32" - ], - "dev": true, - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/win32-x64": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.1.tgz", - "integrity": "sha512-Y1EQdcfwMSeQN/ujR5VayLOJ1BHaK+ssyk0AEzPjC+t1lITgsnccPqFjb6V+LsTp/9Iov4ysfjxLaGJ9RPtkVg==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@jridgewell/gen-mapping": { - "version": "0.3.8", - "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.8.tgz", - "integrity": "sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA==", - "dev": true, - "dependencies": { - "@jridgewell/set-array": "^1.2.1", - "@jridgewell/sourcemap-codec": "^1.4.10", - "@jridgewell/trace-mapping": "^0.3.24" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@jridgewell/resolve-uri": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", - "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", - "dev": true, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@jridgewell/set-array": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz", - "integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==", - "dev": true, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@jridgewell/source-map": { - "version": "0.3.6", - "resolved": "https://registry.npmjs.org/@jridgewell/source-map/-/source-map-0.3.6.tgz", - "integrity": "sha512-1ZJTZebgqllO79ue2bm3rIGud/bOe0pP5BjSRCRxxYkEZS8STV7zN84UBbiYu7jy+eCKSnVIUgoWWE/tt+shMQ==", - "dev": true, - "dependencies": { - "@jridgewell/gen-mapping": "^0.3.5", - "@jridgewell/trace-mapping": "^0.3.25" - } - }, - "node_modules/@jridgewell/sourcemap-codec": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz", - "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==", - "dev": true - }, - "node_modules/@jridgewell/trace-mapping": { - "version": "0.3.25", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz", - "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==", - "dev": true, - "dependencies": { - "@jridgewell/resolve-uri": "^3.1.0", - "@jridgewell/sourcemap-codec": "^1.4.14" - } - }, - "node_modules/@napi-rs/lzma": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@napi-rs/lzma/-/lzma-1.4.1.tgz", - "integrity": "sha512-5f8K9NHjwHjZKGm3SS+7CFxXQhz8rbg2umBm/9g0xQRXBdYEI31N5z1ACuk9bmBQOusXAq9CArGfs/ZQso2rUA==", - "dev": true, - "engines": { - "node": ">= 10" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/Brooooooklyn" - }, - "optionalDependencies": { - "@napi-rs/lzma-android-arm-eabi": "1.4.1", - "@napi-rs/lzma-android-arm64": "1.4.1", - "@napi-rs/lzma-darwin-arm64": "1.4.1", - "@napi-rs/lzma-darwin-x64": "1.4.1", - "@napi-rs/lzma-freebsd-x64": "1.4.1", - "@napi-rs/lzma-linux-arm-gnueabihf": "1.4.1", - "@napi-rs/lzma-linux-arm64-gnu": "1.4.1", - "@napi-rs/lzma-linux-arm64-musl": "1.4.1", - "@napi-rs/lzma-linux-ppc64-gnu": "1.4.1", - "@napi-rs/lzma-linux-riscv64-gnu": "1.4.1", - "@napi-rs/lzma-linux-s390x-gnu": "1.4.1", - "@napi-rs/lzma-linux-x64-gnu": "1.4.1", - "@napi-rs/lzma-linux-x64-musl": "1.4.1", - "@napi-rs/lzma-wasm32-wasi": "1.4.1", - "@napi-rs/lzma-win32-arm64-msvc": "1.4.1", - "@napi-rs/lzma-win32-ia32-msvc": "1.4.1", - "@napi-rs/lzma-win32-x64-msvc": "1.4.1" - } - }, - "node_modules/@napi-rs/lzma-android-arm-eabi": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@napi-rs/lzma-android-arm-eabi/-/lzma-android-arm-eabi-1.4.1.tgz", - "integrity": "sha512-yenreSpZ9IrqppJOiWDqWMmja7XtSgio9LhtxYwgdILmy/OJTe/mlTYv+FhJBf7hIV9Razu5eBuEa3zKri81IA==", - "cpu": [ - "arm" - ], - "dev": true, - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/lzma-android-arm64": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@napi-rs/lzma-android-arm64/-/lzma-android-arm64-1.4.1.tgz", - "integrity": "sha512-piutVBz5B1TNxXeEjub0n/IKI6dMaXPPRbVSXuc4gnZgzcihNDUh68vcLZgYd+IMiACZvBxvx2O3t5nthtph3A==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/lzma-darwin-arm64": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@napi-rs/lzma-darwin-arm64/-/lzma-darwin-arm64-1.4.1.tgz", - "integrity": "sha512-sDfOhQQFqV8lGbpgJN9DqNLBPR7QOfYjcWUv8FOGPaVP1LPJDnrc5uCpRWWEa2zIKmTiO8P9xzIl0TDzrYmghg==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/lzma-darwin-x64": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@napi-rs/lzma-darwin-x64/-/lzma-darwin-x64-1.4.1.tgz", - "integrity": "sha512-S5/RbC6EP4QkYy2xhxbfm48ZD9FkysfpWY4Slve0nj5RGGsHvcJBg2Pi69jrTPB/zLKz2SUa0i+RfUt9zvZNaw==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/lzma-freebsd-x64": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@napi-rs/lzma-freebsd-x64/-/lzma-freebsd-x64-1.4.1.tgz", - "integrity": "sha512-4AFnq6aZnclwameSBkDWu5Ftb8y4GwvVXeQXJKbN7hf7O5GG/8QpQB1R1NJw2QORUhpKwjAQUpbkTyhL2GFWWw==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/lzma-linux-arm-gnueabihf": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@napi-rs/lzma-linux-arm-gnueabihf/-/lzma-linux-arm-gnueabihf-1.4.1.tgz", - "integrity": "sha512-j5rL1YRIm6rWmmGAvN6DPX6QuRjvFGB93xJ7DTRB47GXW4zHekXae6ivowjJ95vT4Iz4hSWkZbuwAy95eFrWRA==", - "cpu": [ - "arm" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/lzma-linux-arm64-gnu": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@napi-rs/lzma-linux-arm64-gnu/-/lzma-linux-arm64-gnu-1.4.1.tgz", - "integrity": "sha512-1XdFGKyTS9m+VrRQYs9uz+ToHf4Jwm0ejHU48k9lT9MPl8jSqzKdVtFzZBPzronHteSynBfKmUq0+HeWmjrsOQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/lzma-linux-arm64-musl": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@napi-rs/lzma-linux-arm64-musl/-/lzma-linux-arm64-musl-1.4.1.tgz", - "integrity": "sha512-9d09tYS0/rBwIk1QTcO2hMZEB/ZpsG2+uFW5am1RHElSWMclObirB1An7b6AMDJcRvcomkOg2GZ9COzrvHKwEA==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/lzma-linux-ppc64-gnu": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@napi-rs/lzma-linux-ppc64-gnu/-/lzma-linux-ppc64-gnu-1.4.1.tgz", - "integrity": "sha512-UzEkmsgoJ3IOGIRb6kBzNiw+ThUpiighop7dVYfSqlF5juGzwf7YewC57RGn4FoJCvadOCrSm5VikAcgrwVgAw==", - "cpu": [ - "ppc64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/lzma-linux-riscv64-gnu": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@napi-rs/lzma-linux-riscv64-gnu/-/lzma-linux-riscv64-gnu-1.4.1.tgz", - "integrity": "sha512-9dUKlZ1PdwxTaFF+j3oc+xjlk9nqFwo1NWWOH30uwjl4Rm5Gkv+Fx0pHrzu4kR/iVA+oyQqa9/2uDYnGSTijBA==", - "cpu": [ - "riscv64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/lzma-linux-s390x-gnu": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@napi-rs/lzma-linux-s390x-gnu/-/lzma-linux-s390x-gnu-1.4.1.tgz", - "integrity": "sha512-MOVXUWJSLLCJDCCAlGa39sh7nv9XjvXzCf7QJus7rD8Ciz0mpXNXF9mg0ji7/MZ7pZlKPlXjXDnpVCfFdSEaFQ==", - "cpu": [ - "s390x" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/lzma-linux-x64-gnu": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@napi-rs/lzma-linux-x64-gnu/-/lzma-linux-x64-gnu-1.4.1.tgz", - "integrity": "sha512-Sxu7aJxU1sDbUTqjqLVDV3DCOAlbsFKvmuCN/S5uXBJd1IF2wJ9jK3NbFzfqTAo5Hudx8Y7kOb6+3K+fYPI1KQ==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/lzma-linux-x64-musl": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@napi-rs/lzma-linux-x64-musl/-/lzma-linux-x64-musl-1.4.1.tgz", - "integrity": "sha512-4I3BeKBQJSE5gF2/VTEv7wCLLjhapeutbCGpZPmDiLHZ74rm9edmNXAlKpdjADQ4YDLJ2GIBzttvwLXkJ9U+cw==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/lzma-wasm32-wasi": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@napi-rs/lzma-wasm32-wasi/-/lzma-wasm32-wasi-1.4.1.tgz", - "integrity": "sha512-s32HdKqQWbohf6DGWpG9YMODaBdbKJ++JpNr6Ii7821sKf4h/o+p8IRFTOaWdmdJdllEWlRirnd5crA29VivJQ==", - "cpu": [ - "wasm32" - ], - "dev": true, - "optional": true, - "dependencies": { - "@napi-rs/wasm-runtime": "^0.2.4" - }, - "engines": { - "node": ">=14.0.0" - } - }, - "node_modules/@napi-rs/lzma-win32-arm64-msvc": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@napi-rs/lzma-win32-arm64-msvc/-/lzma-win32-arm64-msvc-1.4.1.tgz", - "integrity": "sha512-ISz+v7ML5mKnjEZ7Kk4Z1BIn411r/fz3tDy9j5yDnwQI0MgTsUQFrIQElGUpULWYs2aYc6EZ9PhECbLBfSjh7A==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/lzma-win32-ia32-msvc": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@napi-rs/lzma-win32-ia32-msvc/-/lzma-win32-ia32-msvc-1.4.1.tgz", - "integrity": "sha512-3WKuCpZBrd7Jrw+h1jSu5XAsRWepMJu0sYuRoA4Y4Cwfu9gI7p5Z5Bc510nfjg7M7xvdpkI4UoW2WY7kBFRYrQ==", - "cpu": [ - "ia32" - ], - "dev": true, - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/lzma-win32-x64-msvc": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@napi-rs/lzma-win32-x64-msvc/-/lzma-win32-x64-msvc-1.4.1.tgz", - "integrity": "sha512-0ixRo5z1zFXdh62hlrTV+QCTKHK0te5NHKaExOluhtcc6AdpMmpslvM9JhUxNHI+zM46w/DmmcvcOtqsaTmHgg==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@napi-rs/wasm-runtime": { - "version": "0.2.7", - "resolved": "https://registry.npmjs.org/@napi-rs/wasm-runtime/-/wasm-runtime-0.2.7.tgz", - "integrity": "sha512-5yximcFK5FNompXfJFoWanu5l8v1hNGqNHh9du1xETp9HWk/B/PzvchX55WYOPaIeNglG8++68AAiauBAtbnzw==", - "dev": true, - "optional": true, - "dependencies": { - "@emnapi/core": "^1.3.1", - "@emnapi/runtime": "^1.3.1", - "@tybys/wasm-util": "^0.9.0" - } - }, - "node_modules/@noble/curves": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/@noble/curves/-/curves-1.2.0.tgz", - "integrity": "sha512-oYclrNgRaM9SsBUBVbb8M6DTV7ZHRTKugureoYEncY5c65HOmRzvSiTE3y5CYaPYJA/GVkrhXEoF0M3Ya9PMnw==", - "dev": true, - "dependencies": { - "@noble/hashes": "1.3.2" - }, - "funding": { - "url": "https://paulmillr.com/funding/" - } - }, - "node_modules/@noble/hashes": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.3.2.tgz", - "integrity": "sha512-MVC8EAQp7MvEcm30KWENFjgR+Mkmf+D189XJTkFIlwohU5hcBbn1ZkKq7KVTi2Hme3PMGF390DaL52beVrIihQ==", - "dev": true, - "engines": { - "node": ">= 16" - }, - "funding": { - "url": "https://paulmillr.com/funding/" - } - }, - "node_modules/@tybys/wasm-util": { - "version": "0.9.0", - "resolved": "https://registry.npmjs.org/@tybys/wasm-util/-/wasm-util-0.9.0.tgz", - "integrity": "sha512-6+7nlbMVX/PVDCwaIQ8nTOPveOcFLSt8GcXdx8hD0bt39uWxYT88uXzqTd4fTvqta7oeUJqudepapKNt2DYJFw==", - "dev": true, - "optional": true, - "dependencies": { - "tslib": "^2.4.0" - } - }, - "node_modules/@types/node": { - "version": "22.7.5", - "resolved": "https://registry.npmjs.org/@types/node/-/node-22.7.5.tgz", - "integrity": "sha512-jML7s2NAzMWc//QSJ1a3prpk78cOPchGvXJsC3C6R6PSMoooztvRVQEz89gmBTBY1SPMaqo5teB4uNHPdetShQ==", - "dev": true, - "dependencies": { - "undici-types": "~6.19.2" - } - }, - "node_modules/acorn": { - "version": "8.14.1", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.1.tgz", - "integrity": "sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg==", - "dev": true, - "bin": { - "acorn": "bin/acorn" - }, - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/aes-js": { - "version": "4.0.0-beta.5", - "resolved": "https://registry.npmjs.org/aes-js/-/aes-js-4.0.0-beta.5.tgz", - "integrity": "sha512-G965FqalsNyrPqgEGON7nIx1e/OVENSgiEIzyC63haUMuvNnwIgIjMs52hlTCKhkBny7A2ORNlfY9Zu+jmGk1Q==", - "dev": true - }, - "node_modules/ansi-regex": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz", - "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==", - "dev": true, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-regex?sponsor=1" - } - }, - "node_modules/base64-js": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", - "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] - }, - "node_modules/binaryen": { - "version": "122.0.0", - "resolved": "https://registry.npmjs.org/binaryen/-/binaryen-122.0.0.tgz", - "integrity": "sha512-XOvjvucq04e5UW0cuoiEZX4wxzepuF7q8sz24JtaXltqjgCqQOZHPXAq5ja/eLljF/orWYmaxuWivpp7paVUCQ==", - "dev": true, - "bin": { - "wasm-as": "bin/wasm-as", - "wasm-ctor-eval": "bin/wasm-ctor-eval", - "wasm-dis": "bin/wasm-dis", - "wasm-merge": "bin/wasm-merge", - "wasm-metadce": "bin/wasm-metadce", - "wasm-opt": "bin/wasm-opt", - "wasm-reduce": "bin/wasm-reduce", - "wasm-shell": "bin/wasm-shell", - "wasm2js": "bin/wasm2js" - } - }, - "node_modules/bl": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/bl/-/bl-1.2.3.tgz", - "integrity": "sha512-pvcNpa0UU69UT341rO6AYy4FVAIkUHuZXRIWbq+zHnsVcRzDDjIAhGuuYoi0d//cwIwtt4pkpKycWEfjdV+vww==", - "dev": true, - "dependencies": { - "readable-stream": "^2.3.5", - "safe-buffer": "^5.1.1" - } - }, - "node_modules/buffer": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", - "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "dependencies": { - "base64-js": "^1.3.1", - "ieee754": "^1.1.13" - } - }, - "node_modules/buffer-alloc": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/buffer-alloc/-/buffer-alloc-1.2.0.tgz", - "integrity": "sha512-CFsHQgjtW1UChdXgbyJGtnm+O/uLQeZdtbDo8mfUgYXCHSM1wgrVxXm6bSyrUuErEb+4sYVGCzASBRot7zyrow==", - "dev": true, - "dependencies": { - "buffer-alloc-unsafe": "^1.1.0", - "buffer-fill": "^1.0.0" - } - }, - "node_modules/buffer-alloc-unsafe": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/buffer-alloc-unsafe/-/buffer-alloc-unsafe-1.1.0.tgz", - "integrity": "sha512-TEM2iMIEQdJ2yjPJoSIsldnleVaAk1oW3DBVUykyOLsEsFmEc9kn+SFFPz+gl54KQNxlDnAwCXosOS9Okx2xAg==", - "dev": true - }, - "node_modules/buffer-crc32": { - "version": "0.2.13", - "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", - "integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==", - "dev": true, - "engines": { - "node": "*" - } - }, - "node_modules/buffer-fill": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/buffer-fill/-/buffer-fill-1.0.0.tgz", - "integrity": "sha512-T7zexNBwiiaCOGDg9xNX9PBmjrubblRkENuptryuI64URkXDFum9il/JGL8Lm8wYfAXpredVXXZz7eMHilimiQ==", - "dev": true - }, - "node_modules/buffer-from": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", - "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", - "dev": true - }, - "node_modules/chalk": { - "version": "5.4.1", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.4.1.tgz", - "integrity": "sha512-zgVZuo2WcZgfUEmsn6eO3kINexW8RAE4maiQ8QNs8CtpPCSyMiYsULR3HQYkm3w8FIA3SberyMJMSldGsW+U3w==", - "dev": true, - "engines": { - "node": "^12.17.0 || ^14.13 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" - } - }, - "node_modules/chalk-template": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/chalk-template/-/chalk-template-1.1.0.tgz", - "integrity": "sha512-T2VJbcDuZQ0Tb2EWwSotMPJjgpy1/tGee1BTpUNsGZ/qgNjV2t7Mvu+d4600U564nbLesN1x2dPL+xii174Ekg==", - "dev": true, - "dependencies": { - "chalk": "^5.2.0" - }, - "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/chalk/chalk-template?sponsor=1" - } - }, - "node_modules/cli-cursor": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-5.0.0.tgz", - "integrity": "sha512-aCj4O5wKyszjMmDT4tZj93kxyydN/K5zPWSCe6/0AV/AA1pqe5ZBIw0a2ZfPQV7lL5/yb5HsUreJ6UFAF1tEQw==", - "dev": true, - "dependencies": { - "restore-cursor": "^5.0.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/cli-spinners": { - "version": "2.9.2", - "resolved": "https://registry.npmjs.org/cli-spinners/-/cli-spinners-2.9.2.tgz", - "integrity": "sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg==", - "dev": true, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/commander": { - "version": "12.1.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-12.1.0.tgz", - "integrity": "sha512-Vw8qHK3bZM9y/P10u3Vib8o/DdkvA2OtPtZvD871QKjy74Wj1WSKFILMPRPSdUSx5RFK1arlJzEtA4PkFgnbuA==", - "dev": true, - "engines": { - "node": ">=18" - } - }, - "node_modules/core-util-is": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", - "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", - "dev": true - }, - "node_modules/decompress": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/decompress/-/decompress-4.2.1.tgz", - "integrity": "sha512-e48kc2IjU+2Zw8cTb6VZcJQ3lgVbS4uuB1TfCHbiZIP/haNXm+SVyhu+87jts5/3ROpd82GSVCoNs/z8l4ZOaQ==", - "dev": true, - "dependencies": { - "decompress-tar": "^4.0.0", - "decompress-tarbz2": "^4.0.0", - "decompress-targz": "^4.0.0", - "decompress-unzip": "^4.0.1", - "graceful-fs": "^4.1.10", - "make-dir": "^1.0.0", - "pify": "^2.3.0", - "strip-dirs": "^2.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/decompress-tar": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/decompress-tar/-/decompress-tar-4.1.1.tgz", - "integrity": "sha512-JdJMaCrGpB5fESVyxwpCx4Jdj2AagLmv3y58Qy4GE6HMVjWz1FeVQk1Ct4Kye7PftcdOo/7U7UKzYBJgqnGeUQ==", - "dev": true, - "dependencies": { - "file-type": "^5.2.0", - "is-stream": "^1.1.0", - "tar-stream": "^1.5.2" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/decompress-tarbz2": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/decompress-tarbz2/-/decompress-tarbz2-4.1.1.tgz", - "integrity": "sha512-s88xLzf1r81ICXLAVQVzaN6ZmX4A6U4z2nMbOwobxkLoIIfjVMBg7TeguTUXkKeXni795B6y5rnvDw7rxhAq9A==", - "dev": true, - "dependencies": { - "decompress-tar": "^4.1.0", - "file-type": "^6.1.0", - "is-stream": "^1.1.0", - "seek-bzip": "^1.0.5", - "unbzip2-stream": "^1.0.9" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/decompress-tarbz2/node_modules/file-type": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/file-type/-/file-type-6.2.0.tgz", - "integrity": "sha512-YPcTBDV+2Tm0VqjybVd32MHdlEGAtuxS3VAYsumFokDSMG+ROT5wawGlnHDoz7bfMcMDt9hxuXvXwoKUx2fkOg==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/decompress-targz": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/decompress-targz/-/decompress-targz-4.1.1.tgz", - "integrity": "sha512-4z81Znfr6chWnRDNfFNqLwPvm4db3WuZkqV+UgXQzSngG3CEKdBkw5jrv3axjjL96glyiiKjsxJG3X6WBZwX3w==", - "dev": true, - "dependencies": { - "decompress-tar": "^4.1.1", - "file-type": "^5.2.0", - "is-stream": "^1.1.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/decompress-unzip": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/decompress-unzip/-/decompress-unzip-4.0.1.tgz", - "integrity": "sha512-1fqeluvxgnn86MOh66u8FjbtJpAFv5wgCT9Iw8rcBqQcCo5tO8eiJw7NNTrvt9n4CRBVq7CstiS922oPgyGLrw==", - "dev": true, - "dependencies": { - "file-type": "^3.8.0", - "get-stream": "^2.2.0", - "pify": "^2.3.0", - "yauzl": "^2.4.2" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/decompress-unzip/node_modules/file-type": { - "version": "3.9.0", - "resolved": "https://registry.npmjs.org/file-type/-/file-type-3.9.0.tgz", - "integrity": "sha512-RLoqTXE8/vPmMuTI88DAzhMYC99I8BWv7zYP4A1puo5HIjEJ5EX48ighy4ZyKMG9EDXxBgW6e++cn7d1xuFghA==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/emoji-regex": { - "version": "10.4.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.4.0.tgz", - "integrity": "sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw==", - "dev": true - }, - "node_modules/end-of-stream": { - "version": "1.4.4", - "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", - "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", - "dev": true, - "dependencies": { - "once": "^1.4.0" - } - }, - "node_modules/es-module-lexer": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.6.0.tgz", - "integrity": "sha512-qqnD1yMU6tk/jnaMosogGySTZP8YtUgAffA9nMN+E/rjxcfRQ6IEk7IiozUjgxKoFHBGjTLnrHB/YC45r/59EQ==", - "dev": true - }, - "node_modules/esbuild": { - "version": "0.25.1", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.1.tgz", - "integrity": "sha512-BGO5LtrGC7vxnqucAe/rmvKdJllfGaYWdyABvyMoXQlfYMb2bbRuReWR5tEGE//4LcNJj9XrkovTqNYRFZHAMQ==", - "dev": true, - "hasInstallScript": true, - "bin": { - "esbuild": "bin/esbuild" - }, - "engines": { - "node": ">=18" - }, - "optionalDependencies": { - "@esbuild/aix-ppc64": "0.25.1", - "@esbuild/android-arm": "0.25.1", - "@esbuild/android-arm64": "0.25.1", - "@esbuild/android-x64": "0.25.1", - "@esbuild/darwin-arm64": "0.25.1", - "@esbuild/darwin-x64": "0.25.1", - "@esbuild/freebsd-arm64": "0.25.1", - "@esbuild/freebsd-x64": "0.25.1", - "@esbuild/linux-arm": "0.25.1", - "@esbuild/linux-arm64": "0.25.1", - "@esbuild/linux-ia32": "0.25.1", - "@esbuild/linux-loong64": "0.25.1", - "@esbuild/linux-mips64el": "0.25.1", - "@esbuild/linux-ppc64": "0.25.1", - "@esbuild/linux-riscv64": "0.25.1", - "@esbuild/linux-s390x": "0.25.1", - "@esbuild/linux-x64": "0.25.1", - "@esbuild/netbsd-arm64": "0.25.1", - "@esbuild/netbsd-x64": "0.25.1", - "@esbuild/openbsd-arm64": "0.25.1", - "@esbuild/openbsd-x64": "0.25.1", - "@esbuild/sunos-x64": "0.25.1", - "@esbuild/win32-arm64": "0.25.1", - "@esbuild/win32-ia32": "0.25.1", - "@esbuild/win32-x64": "0.25.1" - } - }, - "node_modules/ethers": { - "version": "6.13.5", - "resolved": "https://registry.npmjs.org/ethers/-/ethers-6.13.5.tgz", - "integrity": "sha512-+knKNieu5EKRThQJWwqaJ10a6HE9sSehGeqWN65//wE7j47ZpFhKAnHB/JJFibwwg61I/koxaPsXbXpD/skNOQ==", - "dev": true, - "funding": [ - { - "type": "individual", - "url": "https://github.com/sponsors/ethers-io/" - }, - { - "type": "individual", - "url": "https://www.buymeacoffee.com/ricmoo" - } - ], - "dependencies": { - "@adraffy/ens-normalize": "1.10.1", - "@noble/curves": "1.2.0", - "@noble/hashes": "1.3.2", - "@types/node": "22.7.5", - "aes-js": "4.0.0-beta.5", - "tslib": "2.7.0", - "ws": "8.17.1" - }, - "engines": { - "node": ">=14.0.0" - } - }, - "node_modules/ethers/node_modules/tslib": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.7.0.tgz", - "integrity": "sha512-gLXCKdN1/j47AiHiOkJN69hJmcbGTHI0ImLmbYLHykhgeN0jVGola9yVjFgzCUklsZQMW55o+dW7IXv3RCXDzA==", - "dev": true - }, - "node_modules/fd-slicer": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz", - "integrity": "sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g==", - "dev": true, - "dependencies": { - "pend": "~1.2.0" - } - }, - "node_modules/file-type": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/file-type/-/file-type-5.2.0.tgz", - "integrity": "sha512-Iq1nJ6D2+yIO4c8HHg4fyVb8mAJieo1Oloy1mLLaB2PvezNedhBVm+QU7g0qM42aiMbRXTxKKwGD17rjKNJYVQ==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/fs-constants": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz", - "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==", - "dev": true - }, - "node_modules/get-east-asian-width": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/get-east-asian-width/-/get-east-asian-width-1.3.0.tgz", - "integrity": "sha512-vpeMIQKxczTD/0s2CdEWHcb0eeJe6TFjxb+J5xgX7hScxqrGuyjmv4c1D4A/gelKfyox0gJJwIHF+fLjeaM8kQ==", - "dev": true, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/get-stream": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-2.3.1.tgz", - "integrity": "sha512-AUGhbbemXxrZJRD5cDvKtQxLuYaIbNtDTK8YqupCI393Q2KSTreEsLUN3ZxAWFGiKTzL6nKuzfcIvieflUX9qA==", - "dev": true, - "dependencies": { - "object-assign": "^4.0.1", - "pinkie-promise": "^2.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/graceful-fs": { - "version": "4.2.11", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", - "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", - "dev": true - }, - "node_modules/ieee754": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", - "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] - }, - "node_modules/inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", - "dev": true - }, - "node_modules/is-interactive": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-interactive/-/is-interactive-2.0.0.tgz", - "integrity": "sha512-qP1vozQRI+BMOPcjFzrjXuQvdak2pHNUMZoeG2eRbiSqyvbEf/wQtEOTOX1guk6E3t36RkaqiSt8A/6YElNxLQ==", - "dev": true, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/is-natural-number": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/is-natural-number/-/is-natural-number-4.0.1.tgz", - "integrity": "sha512-Y4LTamMe0DDQIIAlaer9eKebAlDSV6huy+TWhJVPlzZh2o4tRP5SQWFlLn5N0To4mDD22/qdOq+veo1cSISLgQ==", - "dev": true - }, - "node_modules/is-stream": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", - "integrity": "sha512-uQPm8kcs47jx38atAcWTVxyltQYoPT68y9aWYdV6yWXSyW8mzSat0TL6CiWdZeCdF3KrAvpVtnHbTv4RN+rqdQ==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-unicode-supported": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-2.1.0.tgz", - "integrity": "sha512-mE00Gnza5EEB3Ds0HfMyllZzbBrmLOX3vfWoj9A9PEnTfratQ/BcaJOuMhnkhjXvb2+FkY3VuHqtAGpTPmglFQ==", - "dev": true, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", - "dev": true - }, - "node_modules/log-symbols": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-6.0.0.tgz", - "integrity": "sha512-i24m8rpwhmPIS4zscNzK6MSEhk0DUWa/8iYQWxhffV8jkI4Phvs3F+quL5xvS0gdQR0FyTCMMH33Y78dDTzzIw==", - "dev": true, - "dependencies": { - "chalk": "^5.3.0", - "is-unicode-supported": "^1.3.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/log-symbols/node_modules/is-unicode-supported": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-1.3.0.tgz", - "integrity": "sha512-43r2mRvz+8JRIKnWJ+3j8JtjRKZ6GmjzfaE/qiBJnikNnYv/6bagRJ1kUhNk8R5EX/GkobD+r+sfxCPJsiKBLQ==", - "dev": true, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/make-dir": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-1.3.0.tgz", - "integrity": "sha512-2w31R7SJtieJJnQtGc7RVL2StM2vGYVfqUOvUDxH6bC6aJTxPxTF0GnIgCyu7tjockiUWAYQRbxa7vKn34s5sQ==", - "dev": true, - "dependencies": { - "pify": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/make-dir/node_modules/pify": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", - "integrity": "sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/mimic-function": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/mimic-function/-/mimic-function-5.0.1.tgz", - "integrity": "sha512-VP79XUPxV2CigYP3jWwAUFSku2aKqBH7uTAapFWCBqutsbmDo96KY5o8uh6U+/YSIn5OxJnXp73beVkpqMIGhA==", - "dev": true, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/mkdirp": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz", - "integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==", - "dev": true, - "bin": { - "mkdirp": "dist/cjs/src/bin.js" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/object-assign": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", - "dev": true, - "dependencies": { - "wrappy": "1" - } - }, - "node_modules/onetime": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-7.0.0.tgz", - "integrity": "sha512-VXJjc87FScF88uafS3JllDgvAm+c/Slfz06lorj2uAY34rlUu0Nt+v8wreiImcrgAjjIHp1rXpTDlLOGw29WwQ==", - "dev": true, - "dependencies": { - "mimic-function": "^5.0.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/ora": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/ora/-/ora-8.2.0.tgz", - "integrity": "sha512-weP+BZ8MVNnlCm8c0Qdc1WSWq4Qn7I+9CJGm7Qali6g44e/PUzbjNqJX5NJ9ljlNMosfJvg1fKEGILklK9cwnw==", - "dev": true, - "dependencies": { - "chalk": "^5.3.0", - "cli-cursor": "^5.0.0", - "cli-spinners": "^2.9.2", - "is-interactive": "^2.0.0", - "is-unicode-supported": "^2.0.0", - "log-symbols": "^6.0.0", - "stdin-discarder": "^0.2.2", - "string-width": "^7.2.0", - "strip-ansi": "^7.1.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/pend": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz", - "integrity": "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==", - "dev": true - }, - "node_modules/pify": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/pinkie": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz", - "integrity": "sha512-MnUuEycAemtSaeFSjXKW/aroV7akBbY+Sv+RkyqFjgAe73F+MR0TBWKBRDkmfWq/HiFmdavfZ1G7h4SPZXaCSg==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/pinkie-promise": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz", - "integrity": "sha512-0Gni6D4UcLTbv9c57DfxDGdr41XfgUjqWZu492f0cIGr16zDU06BWP/RAEvOuo7CQ0CNjHaLlM59YJJFm3NWlw==", - "dev": true, - "dependencies": { - "pinkie": "^2.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/process-nextick-args": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", - "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", - "dev": true - }, - "node_modules/readable-stream": { - "version": "2.3.8", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", - "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", - "dev": true, - "dependencies": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "node_modules/readable-stream/node_modules/safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", - "dev": true - }, - "node_modules/restore-cursor": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-5.1.0.tgz", - "integrity": "sha512-oMA2dcrw6u0YfxJQXm342bFKX/E4sG9rbTzO9ptUcR/e8A33cHuvStiYOwH7fszkZlZ1z/ta9AAoPk2F4qIOHA==", - "dev": true, - "dependencies": { - "onetime": "^7.0.0", - "signal-exit": "^4.1.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] - }, - "node_modules/seek-bzip": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/seek-bzip/-/seek-bzip-1.0.6.tgz", - "integrity": "sha512-e1QtP3YL5tWww8uKaOCQ18UxIT2laNBXHjV/S2WYCiK4udiv8lkG89KRIoCjUagnAmCBurjF4zEVX2ByBbnCjQ==", - "dev": true, - "dependencies": { - "commander": "^2.8.1" - }, - "bin": { - "seek-bunzip": "bin/seek-bunzip", - "seek-table": "bin/seek-bzip-table" - } - }, - "node_modules/seek-bzip/node_modules/commander": { - "version": "2.20.3", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", - "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", - "dev": true - }, - "node_modules/signal-exit": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", - "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", - "dev": true, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/source-map-support": { - "version": "0.5.21", - "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz", - "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==", - "dev": true, - "dependencies": { - "buffer-from": "^1.0.0", - "source-map": "^0.6.0" - } - }, - "node_modules/stdin-discarder": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/stdin-discarder/-/stdin-discarder-0.2.2.tgz", - "integrity": "sha512-UhDfHmA92YAlNnCfhmq0VeNL5bDbiZGg7sZ2IvPsXubGkiNa9EC+tUTsjBRsYUAz87btI6/1wf4XoVvQ3uRnmQ==", - "dev": true, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "dev": true, - "dependencies": { - "safe-buffer": "~5.1.0" - } - }, - "node_modules/string_decoder/node_modules/safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", - "dev": true - }, - "node_modules/string-width": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz", - "integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==", - "dev": true, - "dependencies": { - "emoji-regex": "^10.3.0", - "get-east-asian-width": "^1.0.0", - "strip-ansi": "^7.1.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/strip-ansi": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", - "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", - "dev": true, - "dependencies": { - "ansi-regex": "^6.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/strip-ansi?sponsor=1" - } - }, - "node_modules/strip-dirs": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/strip-dirs/-/strip-dirs-2.1.0.tgz", - "integrity": "sha512-JOCxOeKLm2CAS73y/U4ZeZPTkE+gNVCzKt7Eox84Iej1LT/2pTWYpZKJuxwQpvX1LiZb1xokNR7RLfuBAa7T3g==", - "dev": true, - "dependencies": { - "is-natural-number": "^4.0.1" - } - }, - "node_modules/tar-stream": { - "version": "1.6.2", - "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-1.6.2.tgz", - "integrity": "sha512-rzS0heiNf8Xn7/mpdSVVSMAWAoy9bfb1WOTYC78Z0UQKeKa/CWS8FOq0lKGNa8DWKAn9gxjCvMLYc5PGXYlK2A==", - "dev": true, - "dependencies": { - "bl": "^1.0.0", - "buffer-alloc": "^1.2.0", - "end-of-stream": "^1.0.0", - "fs-constants": "^1.0.0", - "readable-stream": "^2.3.0", - "to-buffer": "^1.1.1", - "xtend": "^4.0.0" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/terser": { - "version": "5.39.0", - "resolved": "https://registry.npmjs.org/terser/-/terser-5.39.0.tgz", - "integrity": "sha512-LBAhFyLho16harJoWMg/nZsQYgTrg5jXOn2nCYjRUcZZEdE3qa2zb8QEDRUGVZBW4rlazf2fxkg8tztybTaqWw==", - "dev": true, - "dependencies": { - "@jridgewell/source-map": "^0.3.3", - "acorn": "^8.8.2", - "commander": "^2.20.0", - "source-map-support": "~0.5.20" - }, - "bin": { - "terser": "bin/terser" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/terser/node_modules/commander": { - "version": "2.20.3", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", - "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", - "dev": true - }, - "node_modules/through": { - "version": "2.3.8", - "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", - "integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==", - "dev": true - }, - "node_modules/to-buffer": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/to-buffer/-/to-buffer-1.1.1.tgz", - "integrity": "sha512-lx9B5iv7msuFYE3dytT+KE5tap+rNYw+K4jVkb9R/asAb+pbBSM17jtunHplhBe6RRJdZx3Pn2Jph24O32mOVg==", - "dev": true - }, - "node_modules/tslib": { - "version": "2.8.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", - "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", - "dev": true, - "optional": true - }, - "node_modules/unbzip2-stream": { - "version": "1.4.3", - "resolved": "https://registry.npmjs.org/unbzip2-stream/-/unbzip2-stream-1.4.3.tgz", - "integrity": "sha512-mlExGW4w71ebDJviH16lQLtZS32VKqsSfk80GCfUlwT/4/hNRFsoscrF/c++9xinkMzECL1uL9DDwXqFWkruPg==", - "dev": true, - "dependencies": { - "buffer": "^5.2.1", - "through": "^2.3.8" - } - }, - "node_modules/undici-types": { - "version": "6.19.8", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz", - "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==", - "dev": true - }, - "node_modules/util-deprecate": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", - "dev": true - }, - "node_modules/wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", - "dev": true - }, - "node_modules/ws": { - "version": "8.17.1", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.17.1.tgz", - "integrity": "sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==", - "dev": true, - "engines": { - "node": ">=10.0.0" - }, - "peerDependencies": { - "bufferutil": "^4.0.1", - "utf-8-validate": ">=5.0.2" - }, - "peerDependenciesMeta": { - "bufferutil": { - "optional": true - }, - "utf-8-validate": { - "optional": true - } - } - }, - "node_modules/xtend": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", - "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", - "dev": true, - "engines": { - "node": ">=0.4" - } - }, - "node_modules/yauzl": { - "version": "2.10.0", - "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz", - "integrity": "sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g==", - "dev": true, - "dependencies": { - "buffer-crc32": "~0.2.3", - "fd-slicer": "~1.1.0" - } - } - } -} diff --git a/components/js-evm-price-oracle/package.json b/components/js-evm-price-oracle/package.json deleted file mode 100644 index e8c05087..00000000 --- a/components/js-evm-price-oracle/package.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "name": "js-evm-price-oracle", - "version": "1.0.0", - "description": "", - "main": "index.js", - "type": "module", - "scripts": { - "test": "echo \"Error: no test specified\" && exit 1" - }, - "author": "Reece Williams ", - "license": "ISC", - "devDependencies": { - "@bytecodealliance/componentize-js": "^0.18.0", - "@bytecodealliance/jco": "^1.10.2", - "esbuild": "^0.25.0", - "ethers": "^6.13.5" - } -} diff --git a/components/js-evm-price-oracle/trigger.ts b/components/js-evm-price-oracle/trigger.ts deleted file mode 100644 index 57235514..00000000 --- a/components/js-evm-price-oracle/trigger.ts +++ /dev/null @@ -1,97 +0,0 @@ -import { TriggerData } from "./out/interfaces/wavs-worker-layer-types"; -import { getBytes, hexlify, Interface } from "ethers"; -import { AbiCoder } from "ethers"; - -enum Destination { - Cli = "Cli", - Ethereum = "Ethereum", - Cosmos = "Cosmos", -} - -// === Contract Types === -type TriggerInfoType = { - triggerId: number; - creator: string; - data: Uint8Array; -}; - -// ITypes.sol Types -const DataWithId = "tuple(uint64 triggerId, bytes data)"; -const TriggerInfo = "tuple(uint64 triggerId, address creator, bytes data)"; -const EventName = "NewTrigger"; -const eventInterface = new Interface([ - `event ${EventName}(bytes _triggerInfo)`, -]); - -function encodeOutput(triggerId: number, outputData: Uint8Array): Uint8Array { - try { - const encoded = new AbiCoder().encode( - [DataWithId], - [ - { - triggerId: triggerId, - data: outputData, - }, - ] - ); - - // Convert the hex string back to Uint8Array - return getBytes(encoded); - } catch (error) { - console.error("Error encoding output:", error); - // Return a simple fallback if encoding fails - return new Uint8Array([0]); - } -} - -function decodeTriggerEvent( - triggerAction: TriggerData -): [TriggerInfoType, Destination] { - if (triggerAction.tag === "raw") { - return [ - { - triggerId: 0, - data: triggerAction.val, - creator: "", - }, - Destination.Cli, - ]; - } - - if (triggerAction.tag === "evm-contract-event") { - const ethContractEvent = triggerAction.val; - - try { - const topics = ethContractEvent.log.topics.map((t) => hexlify(t)); - // Decode the NewTrigger event to get the encoded _triggerInfo bytes - const decodedEvent = eventInterface.decodeEventLog( - EventName, - ethContractEvent.log.data, - topics - ); - - // One-step decoding of the TriggerInfo struct - const [triggerInfo] = new AbiCoder().decode( - [TriggerInfo], - decodedEvent._triggerInfo - ); - - return [ - { - triggerId: Number(triggerInfo.triggerId), - creator: triggerInfo.creator, - data: getBytes(triggerInfo.data), - }, - Destination.Ethereum, - ]; - } catch (error) { - throw new Error("Error processing eth contract event: " + error); - } - } - - throw new Error( - "Unknown triggerAction type or not supported: " + triggerAction.tag - ); -} - -export { decodeTriggerEvent, encodeOutput, Destination }; From 830a51584934e439735bf842bedd8fb84c1a25b4 Mon Sep 17 00:00:00 2001 From: Reece Williams Date: Wed, 1 Oct 2025 13:12:24 -0500 Subject: [PATCH 6/6] working --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 0801a0aa..db44b62a 100644 --- a/README.md +++ b/README.md @@ -316,7 +316,7 @@ RPC_URL=${RPC_URL} forge script ./src/script/ShowResult.s.sol ${SERVICE_TRIGGER_ ```bash docci-delay-per-cmd=2 docci-output-contains="BTC" export SERVICE_SUBMIT_ADDR=`jq -r '.evmpriceoracle_submit.deployedTo' .docker/deployment_summary.json` -RPC_URL=${RPC_URL} forge script ./src/script/ShowResult.s.sol ${SERVICE_SUBMIT_ADDR} 4 --sig 'data(string,uint64)' --rpc-url ${RPC_URL} +RPC_URL=${RPC_URL} forge script ./src/script/ShowResult.s.sol ${SERVICE_SUBMIT_ADDR} 1 --sig 'data(string,uint64)' --rpc-url ${RPC_URL} ``` ## AI Coding Agents