From df7abad493096353f1a34490fd3fd9cc053f53cd Mon Sep 17 00:00:00 2001 From: ladamesny Date: Wed, 23 Jul 2025 10:42:10 -0400 Subject: [PATCH 1/3] feat(cli): add automatic-generate-keys command and improve key generation UX - Introduced command to partner-chains-cli: - Generates and saves session keys by connecting to a running node via RPC () - Decodes session keys using the runtime API, supports modern and legacy formats, and saves to keystore and JSON - Provides fallback and error handling for various runtime formats - Integrated the new command into the CLI as an alternative to for all roles (Governance Authority, Registered Validator, Permissioned Validator) - Updated CLI help message and workflow documentation to clearly present both key generation methods and when to use each - Added comprehensive test suite for SCALE decoding and key handling logic - Updated changelog with details of the new feature and related improvements - Updated Cargo.toml and workspace dependencies for , (with serde), and related requirements --- Cargo.lock | 208 +++++++++++- Cargo.toml | 2 + changelog.md | 10 + toolkit/partner-chains-cli/Cargo.toml | 3 + .../src/automatic_generate_keys/mod.rs | 301 ++++++++++++++++++ .../src/automatic_generate_keys/tests.rs | 71 +++++ toolkit/partner-chains-cli/src/lib.rs | 35 +- 7 files changed, 612 insertions(+), 18 deletions(-) create mode 100644 toolkit/partner-chains-cli/src/automatic_generate_keys/mod.rs create mode 100644 toolkit/partner-chains-cli/src/automatic_generate_keys/tests.rs diff --git a/Cargo.lock b/Cargo.lock index a0ff536aa1..8e827152c9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2625,6 +2625,15 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" +[[package]] +name = "encoding_rs" +version = "0.8.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" +dependencies = [ + "cfg-if", +] + [[package]] name = "enum-as-inner" version = "0.6.1" @@ -2944,6 +2953,21 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + [[package]] name = "fork-tree" version = "13.0.1" @@ -3963,6 +3987,19 @@ dependencies = [ "tower-service", ] +[[package]] +name = "hyper-tls" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" +dependencies = [ + "bytes", + "hyper 0.14.32", + "native-tls", + "tokio", + "tokio-native-tls", +] + [[package]] name = "hyper-util" version = "0.1.15" @@ -4149,7 +4186,7 @@ dependencies = [ "netlink-proto", "netlink-sys", "rtnetlink", - "system-configuration", + "system-configuration 0.6.1", "tokio", "windows 0.53.0", ] @@ -4251,6 +4288,7 @@ checksum = "fe4cd85333e22411419a0bcae1297d25e58c9443848b11dc6a86fefe8c78a661" dependencies = [ "equivalent", "hashbrown 0.15.4", + "serde", ] [[package]] @@ -5670,6 +5708,12 @@ dependencies = [ "syn 2.0.104", ] +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + [[package]] name = "minicbor" version = "0.25.1" @@ -5933,6 +5977,23 @@ dependencies = [ "rand 0.8.5", ] +[[package]] +name = "native-tls" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87de3442987e9dbec73158d5c715e7ad9072fda936bb03d19d7fa10e00520f0e" +dependencies = [ + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework 2.11.1", + "security-framework-sys", + "tempfile", +] + [[package]] name = "netlink-packet-core" version = "0.7.0" @@ -6304,12 +6365,50 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" +[[package]] +name = "openssl" +version = "0.10.73" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8505734d46c8ab1e19a1dce3aef597ad87dcb4c37e7188231769bd6bd51cebf8" +dependencies = [ + "bitflags 2.9.1", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.104", +] + [[package]] name = "openssl-probe" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" +[[package]] +name = "openssl-sys" +version = "0.9.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90096e2e47630d78b7d1c20952dc621f957103f8bc2c8359ec81290d75238571" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + [[package]] name = "option-ext" version = "0.2.0" @@ -7060,6 +7159,7 @@ dependencies = [ "frame-system", "hex", "hex-literal 1.0.0", + "indexmap 2.10.0", "inquire", "libp2p-identity", "log", @@ -7074,6 +7174,7 @@ dependencies = [ "plutus", "plutus-datum-derive", "pretty_assertions", + "reqwest", "scale-info", "secp256k1 0.30.0", "serde", @@ -8467,6 +8568,46 @@ version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" +[[package]] +name = "reqwest" +version = "0.11.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62" +dependencies = [ + "base64 0.21.7", + "bytes", + "encoding_rs", + "futures-core", + "futures-util", + "h2 0.3.26", + "http 0.2.12", + "http-body 0.4.6", + "hyper 0.14.32", + "hyper-tls", + "ipnet", + "js-sys", + "log", + "mime", + "native-tls", + "once_cell", + "percent-encoding", + "pin-project-lite", + "rustls-pemfile", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper", + "system-configuration 0.5.1", + "tokio", + "tokio-native-tls", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "winreg", +] + [[package]] name = "resolv-conf" version = "0.7.4" @@ -8693,7 +8834,16 @@ dependencies = [ "openssl-probe", "rustls-pki-types", "schannel", - "security-framework", + "security-framework 3.2.0", +] + +[[package]] +name = "rustls-pemfile" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" +dependencies = [ + "base64 0.21.7", ] [[package]] @@ -8721,7 +8871,7 @@ dependencies = [ "rustls-native-certs", "rustls-platform-verifier-android", "rustls-webpki 0.103.3", - "security-framework", + "security-framework 3.2.0", "security-framework-sys", "webpki-root-certs 0.26.11", "windows-sys 0.59.0", @@ -10043,6 +10193,19 @@ dependencies = [ "zeroize", ] +[[package]] +name = "security-framework" +version = "2.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" +dependencies = [ + "bitflags 2.9.1", + "core-foundation 0.9.4", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + [[package]] name = "security-framework" version = "3.2.0" @@ -11956,6 +12119,12 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "sync_wrapper" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" + [[package]] name = "synstructure" version = "0.12.6" @@ -11994,6 +12163,17 @@ dependencies = [ "windows 0.52.0", ] +[[package]] +name = "system-configuration" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" +dependencies = [ + "bitflags 1.3.2", + "core-foundation 0.9.4", + "system-configuration-sys 0.5.0", +] + [[package]] name = "system-configuration" version = "0.6.1" @@ -12002,7 +12182,17 @@ checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" dependencies = [ "bitflags 2.9.1", "core-foundation 0.9.4", - "system-configuration-sys", + "system-configuration-sys 0.6.0", +] + +[[package]] +name = "system-configuration-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" +dependencies = [ + "core-foundation-sys", + "libc", ] [[package]] @@ -12275,6 +12465,16 @@ dependencies = [ "syn 2.0.104", ] +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", +] + [[package]] name = "tokio-retry" version = "0.3.0" diff --git a/Cargo.toml b/Cargo.toml index a8f1bf9055..fe0706215a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -185,6 +185,8 @@ sqlx = { version = "0.8.6", default-features = false, features = [ ] } derive-where = { version = "1.2.7", default-features = false } once_cell = { version = "1.21.3", default-features = false } +reqwest = { version = "0.11", features = ["json"] } +indexmap = { version = "2.0", features = ["serde"] } # substrate dependencies frame-benchmarking = { default-features = false, git = "https://github.com/paritytech/polkadot-sdk.git", tag = "polkadot-stable2506" } diff --git a/changelog.md b/changelog.md index ac823cebea..79fe781051 100644 --- a/changelog.md +++ b/changelog.md @@ -4,6 +4,16 @@ This changelog is based on [Keep A Changelog](https://keepachangelog.com/en/1.1. # Unreleased +## Added + +* New `automatic-generate-keys` command in `partner-chains-cli`: + * Allows users to generate and save session keys by connecting to a running node via RPC (`author_rotateKeys`). + * Decodes session keys using the runtime API and saves them to the keystore and `partner-chains-public-keys.json`. + * Provides fallback and error handling for various runtime formats. + * Fully integrated into the CLI as an alternative to `generate-keys` for all roles (Governance Authority, Registered Validator, Permissioned Validator). + * Comprehensive test suite for SCALE decoding and key handling logic. + * Help message and workflow documentation updated to show both key generation methods and when to use each. + ## Changed * **BREAKING**: Updated partner-chains-smart-contracts (raw-scripts) dependency to v8.0.0. diff --git a/toolkit/partner-chains-cli/Cargo.toml b/toolkit/partner-chains-cli/Cargo.toml index f7f288c5d5..8d1495f502 100644 --- a/toolkit/partner-chains-cli/Cargo.toml +++ b/toolkit/partner-chains-cli/Cargo.toml @@ -48,6 +48,9 @@ pallet-governed-map = { workspace = true, features = ["std"] } sp-governed-map = { workspace = true, features = ["std"] } sidechain-slots = { workspace = true } authority-selection-inherents = { workspace = true, features = ["std"] } +reqwest = { workspace = true } +indexmap = { workspace = true } +parity-scale-codec = { workspace = true } [dev-dependencies] frame-system = { workspace = true } diff --git a/toolkit/partner-chains-cli/src/automatic_generate_keys/mod.rs b/toolkit/partner-chains-cli/src/automatic_generate_keys/mod.rs new file mode 100644 index 0000000000..3aac8e82c5 --- /dev/null +++ b/toolkit/partner-chains-cli/src/automatic_generate_keys/mod.rs @@ -0,0 +1,301 @@ +use crate::config::KEYS_FILE_PATH; +use crate::generate_keys::GenerateKeysConfig; +use crate::keystore::keystore_path; +use crate::{CmdRun, IOContext}; +use clap::Parser; +use indexmap::IndexMap; +use parity_scale_codec::{Decode, Encode}; +use reqwest::Client; +use serde::{Deserialize, Serialize}; + +#[cfg(test)] +mod tests; + +#[derive(Serialize)] +struct JsonRpcRequest { + jsonrpc: String, + method: String, + params: serde_json::Value, + id: u64, +} + +#[derive(Deserialize)] +#[allow(dead_code)] +struct JsonRpcResponse { + jsonrpc: String, + result: Option, + error: Option, +} + +#[derive(Deserialize)] +struct JsonRpcError { + code: i32, + message: String, +} + +/// Command to automatically generate and save session keys by connecting to a node. +#[derive(Clone, Debug, Parser)] +pub struct AutomaticGenerateKeysCmd { + /// URL of the Substrate node RPC endpoint (e.g., http://localhost:9933). + #[arg(long = "url", default_value = "http://localhost:9933")] + node_url: String, +} + +impl CmdRun for AutomaticGenerateKeysCmd { + fn run(&self, context: &C) -> anyhow::Result<()> { + context.eprint("This 🧙 wizard will generate session keys by calling author_rotateKeys on the node, decode them, and save them to the keystore and partner-chains-public-keys.json file:"); + context.enewline(); + + let config = GenerateKeysConfig::load(context); + let keystore_path = keystore_path(&config.substrate_node_base_path); + context.eprint(&format!("🔑 Keystore path: {}", keystore_path)); + context.enewline(); + + let rt = tokio::runtime::Runtime::new()?; + rt.block_on(async { + let client = Client::new(); + + // Step 1: Call author_rotateKeys RPC to get session keys + let session_keys_hex = + call_author_rotate_keys(&client, &self.node_url, context).await?; + + // Step 2: Decode session keys using runtime API + let decoded_keys = + decode_session_keys(&client, &self.node_url, &session_keys_hex, context).await?; + + // Step 3: Save keys to keystore and JSON file + save_keys_to_storage(&decoded_keys, &session_keys_hex, &keystore_path, context).await?; + + context.print("🚀 All done!"); + Ok(()) + }) + } +} + +/// Call author_rotateKeys RPC method to generate new session keys +async fn call_author_rotate_keys( + client: &Client, + node_url: &str, + context: &C, +) -> anyhow::Result { + let session_keys_hex: String = + send_rpc_request(client, node_url, "author_rotateKeys", serde_json::json!([])) + .await + .map_err(|e| anyhow::anyhow!("Failed to call author_rotateKeys: {}", e))?; + + context.print(&format!("Raw session keys (hex): {}", session_keys_hex)); + Ok(session_keys_hex) +} + +/// Decode session keys using the runtime API +async fn decode_session_keys( + client: &Client, + node_url: &str, + session_keys_hex: &str, + _context: &C, +) -> anyhow::Result, Vec)>> { + // Decode hex string to bytes (remove "0x" prefix) + let session_keys = hex::decode(&session_keys_hex[2..]) + .map_err(|e| anyhow::anyhow!("Failed to decode session keys: {}", e))?; + + // Get finalized block hash + let block_hash: String = + send_rpc_request(client, node_url, "chain_getFinalizedHead", serde_json::json!([])) + .await + .map_err(|e| anyhow::anyhow!("Failed to get finalized block hash: {}", e))?; + + // Use SCALE-encoded parameter for modern Polkadot SDK method + let session_keys_param = format!("0x{}", hex::encode(session_keys.encode())); + let params = + serde_json::json!(["SessionKeys_decode_session_keys", session_keys_param, block_hash]); + + let decoded_keys: Vec<(Vec, Vec)> = + match send_rpc_request::(client, node_url, "state_call", params).await { + Ok(decoded_hex) => { + let bytes = hex::decode(&decoded_hex[2..]) + .map_err(|e| anyhow::anyhow!("Failed to decode runtime API response: {}", e))?; + + parse_decoded_keys_response(&bytes)? + }, + Err(e) => { + return Err(anyhow::anyhow!( + "Failed to call SessionKeys_decode_session_keys: {}", + e + )); + }, + }; + + Ok(decoded_keys) +} + +/// Parse the SCALE-encoded response from the runtime API +fn parse_decoded_keys_response(bytes: &[u8]) -> anyhow::Result, Vec)>> { + // Try decoding as Option, u32)>> (newer Polkadot SDK) + let mut cursor = bytes; + match , u32)>>>::decode(&mut cursor) { + Ok(Some(vec)) if cursor.is_empty() => { + return Ok(vec + .into_iter() + .map(|(pubkey, key_type)| (key_type.to_le_bytes().to_vec(), pubkey)) + .collect()); + }, + Ok(None) if cursor.is_empty() => { + // Successfully decoded as None (empty result) + return Ok(Vec::new()); + }, + _ => { + // Try Vec<(Vec, Vec)> (legacy format) + let mut cursor_alt = bytes; + match , Vec)>>::decode(&mut cursor_alt) { + Ok(vec) if cursor_alt.is_empty() => return Ok(vec), + _ => { + // Try Option, Vec)>> (alternative legacy) + let mut cursor_opt = bytes; + match , Vec)>>>::decode(&mut cursor_opt) { + Ok(Some(vec)) if cursor_opt.is_empty() => return Ok(vec), + Ok(None) if cursor_opt.is_empty() => return Ok(Vec::new()), + _ => { + return Err(anyhow::anyhow!("Failed to SCALE decode keys")); + }, + } + }, + } + }, + } +} + +/// Save keys to keystore and JSON file +async fn save_keys_to_storage( + decoded_keys: &[(Vec, Vec)], + session_keys_hex: &str, + keystore_path: &str, + context: &C, +) -> anyhow::Result<()> { + // Create keystore directory - in tests this is mocked, in real usage it creates the directory + let _ = context.run_command(&format!("mkdir -p {}", keystore_path)); + + let mut key_map: IndexMap = IndexMap::new(); + + if !decoded_keys.is_empty() { + save_decoded_keys(decoded_keys, keystore_path, &mut key_map, context)?; + } else { + save_raw_keys_as_fallback(session_keys_hex, keystore_path, &mut key_map, context)?; + } + + save_keys_to_json_file(&key_map, context)?; + + // Print decoded keys for reference + context.print(&format!("Decoded session keys: {:?}", key_map)); + + Ok(()) +} + +/// Save successfully decoded keys to keystore +fn save_decoded_keys( + decoded_keys: &[(Vec, Vec)], + keystore_path: &str, + key_map: &mut IndexMap, + context: &C, +) -> anyhow::Result<()> { + for (key_type, public_key) in decoded_keys { + // Convert key type to string for JSON and display + let key_type_str = String::from_utf8(key_type.clone()) + .map_err(|e| anyhow::anyhow!("Invalid key type encoding: {}", e))?; + let public_key_hex = format!("0x{}", hex::encode(public_key)); + + // Save to keystore with key_type_hex + public_key format + let key_type_hex = hex::encode(key_type); + let store_path = format!("{}/{}{}", keystore_path, key_type_hex, hex::encode(public_key)); + context.write_file(&store_path, &hex::encode(public_key)); + context.print(&format!("Saved {} key to {}", key_type_str, store_path)); + + // Store in key map for JSON output + key_map.insert(key_type_str, public_key_hex); + } + Ok(()) +} + +/// Save raw session keys as fallback when decoding fails +fn save_raw_keys_as_fallback( + session_keys_hex: &str, + keystore_path: &str, + key_map: &mut IndexMap, + context: &C, +) -> anyhow::Result<()> { + context.eprint("⚠️ No session keys decoded. Saving raw keys as fallback."); + context.eprint("Please verify the node's runtime configuration by fetching metadata:"); + context.eprint("curl -X POST -H 'Content-Type: application/json' -d '{\"jsonrpc\":\"2.0\",\"method\":\"state_getMetadata\",\"id\":1}' http://localhost:9933 > metadata.json"); + context.eprint("Look for the Session pallet and SessionKeys type to determine key order (e.g., aura, gran, imon)."); + + let session_keys = hex::decode(&session_keys_hex[2..]) + .map_err(|e| anyhow::anyhow!("Failed to decode session keys: {}", e))?; + + let raw_key_hex = format!("0x{}", hex::encode(&session_keys)); + let store_path = format!("{}/raw{}", keystore_path, hex::encode(&session_keys)); + context.write_file(&store_path, &hex::encode(&session_keys)); + context.print(&format!("Saved raw session keys to {}", store_path)); + key_map.insert("raw".to_string(), raw_key_hex); + + Ok(()) +} + +/// Save keys to JSON file +fn save_keys_to_json_file( + key_map: &IndexMap, + context: &C, +) -> anyhow::Result<()> { + if !key_map.is_empty() { + if prompt_can_write("keys file", KEYS_FILE_PATH, context) { + let public_keys_json = serde_json::to_string_pretty(key_map) + .map_err(|e| anyhow::anyhow!("Failed to serialize public keys: {}", e))?; + context.write_file(KEYS_FILE_PATH, &public_keys_json); + context.print(&format!( + "🔑 Public keys saved to {}:\n{}", + KEYS_FILE_PATH, public_keys_json + )); + context.print("You may share these public keys with your chain governance authority."); + } else { + context.print("Refusing to overwrite keys file - skipping JSON save"); + } + } else { + context.print("Warning: No keys decoded, skipping JSON save"); + } + Ok(()) +} + +/// Helper to prompt if the keys file can be written +fn prompt_can_write(file_desc: &str, file_path: &str, context: &C) -> bool { + if context.file_exists(file_path) { + context.prompt_yes_no( + &format!("A {} already exists at {} - overwrite it?", file_desc, file_path), + false, + ) + } else { + true + } +} + +/// Helper to send a JSON-RPC request +async fn send_rpc_request Deserialize<'de>>( + client: &Client, + url: &str, + method: &str, + params: serde_json::Value, +) -> Result> { + let request = + JsonRpcRequest { jsonrpc: "2.0".to_string(), method: method.to_string(), params, id: 1 }; + + let response = client + .post(url) + .json(&request) + .send() + .await? + .json::>() + .await?; + + if let Some(error) = response.error { + return Err(format!("RPC error: {} (code: {})", error.message, error.code).into()); + } + + response.result.ok_or_else(|| "No result in response".into()) +} diff --git a/toolkit/partner-chains-cli/src/automatic_generate_keys/tests.rs b/toolkit/partner-chains-cli/src/automatic_generate_keys/tests.rs new file mode 100644 index 0000000000..f1e675154e --- /dev/null +++ b/toolkit/partner-chains-cli/src/automatic_generate_keys/tests.rs @@ -0,0 +1,71 @@ +use super::*; +use anyhow::Result; +use parity_scale_codec::Encode; + +#[tokio::test] +async fn test_parse_decoded_keys_response_modern_format() -> Result<()> { + // Test parsing modern Polkadot SDK format: Option, u32)>> + let key_data = vec![ + (b"aura_public_key".to_vec(), 0x61757261u32), // 'aura' as u32 + (b"grandpa_public_key".to_vec(), 0x6772616eu32), // 'gran' as u32 + ]; + let encoded = Some(key_data).encode(); + + let result = parse_decoded_keys_response(&encoded)?; + + assert_eq!(result.len(), 2); + assert_eq!(result[0].0, 0x61757261u32.to_le_bytes().to_vec()); + assert_eq!(result[0].1, b"aura_public_key".to_vec()); + assert_eq!(result[1].0, 0x6772616eu32.to_le_bytes().to_vec()); + assert_eq!(result[1].1, b"grandpa_public_key".to_vec()); + + Ok(()) +} + +#[tokio::test] +async fn test_parse_decoded_keys_response_legacy_format() -> Result<()> { + // Test parsing legacy format: Vec<(Vec, Vec)> + let key_data = vec![ + (b"aura".to_vec(), b"aura_public_key".to_vec()), + (b"gran".to_vec(), b"grandpa_public_key".to_vec()), + ]; + let encoded = key_data.encode(); + + let result = parse_decoded_keys_response(&encoded)?; + + assert_eq!(result.len(), 2); + assert_eq!(result[0].0, b"aura".to_vec()); + assert_eq!(result[0].1, b"aura_public_key".to_vec()); + assert_eq!(result[1].0, b"gran".to_vec()); + assert_eq!(result[1].1, b"grandpa_public_key".to_vec()); + + Ok(()) +} + +#[tokio::test] +async fn test_parse_decoded_keys_response_empty() -> Result<()> { + // Test parsing empty response: Option, u32)>> = None + let encoded = Option::, u32)>>::None.encode(); + + let result = parse_decoded_keys_response(&encoded)?; + + assert_eq!(result.len(), 0); + + Ok(()) +} + +#[tokio::test] +async fn test_parse_decoded_keys_response_invalid() { + // Test parsing invalid data + let invalid_data = b"invalid_data"; + + let result = parse_decoded_keys_response(invalid_data); + + assert!(result.is_err()); + assert!(result.unwrap_err().to_string().contains("Failed to SCALE decode keys")); +} + +// The core SCALE decoding functionality is thoroughly tested above. +// The MockIO system in this codebase doesn't provide comprehensive file operation mocking, +// so more detailed integration tests would require a separate test framework or actual +// HTTP server setup for testing the full RPC workflow. \ No newline at end of file diff --git a/toolkit/partner-chains-cli/src/lib.rs b/toolkit/partner-chains-cli/src/lib.rs index ac2caa8b78..80386b6f41 100644 --- a/toolkit/partner-chains-cli/src/lib.rs +++ b/toolkit/partner-chains-cli/src/lib.rs @@ -2,6 +2,7 @@ //! Interacts with Smart Contracts using [`partner_chains_cardano_offchain`] crate. #![deny(missing_docs)] +mod automatic_generate_keys; mod cardano_key; mod cmd_traits; mod config; @@ -56,6 +57,8 @@ impl CommonArguments { pub enum Command { /// This wizard generates the keys required for operating a partner-chains node, stores them in the keystore directory, and prints the public keys and keystore location. GenerateKeys(generate_keys::GenerateKeysCmd), + /// Command to automatically generate and save session keys by connecting to a node. + AutomaticGenerateKeys(automatic_generate_keys::AutomaticGenerateKeysCmd), /// Wizard to obtain the configuration needed for the partner-chain governance authority. This configuration should be shared with chain participants and used to create the chain spec json file. PrepareConfiguration(prepare_configuration::PrepareConfigurationCmd), /// Wizard for setting D-parameter and Permissioned Candidates list on the main chain. @@ -81,6 +84,7 @@ impl Command { pub fn run(&self, context: &C) -> anyhow::Result<()> { match self { Command::GenerateKeys(cmd) => cmd.run(context), + Command::AutomaticGenerateKeys(cmd) => cmd.run(context), Command::PrepareConfiguration(cmd) => cmd.run(context), Command::CreateChainSpec(cmd) => cmd.run(context), Command::SetupMainChainState(cmd) => cmd.run(context), @@ -108,29 +112,32 @@ const HELP_EXAMPLES: &str = r#" ║ this order may vary depending on specific deployment scenarios. ║ ╟────────────────────────────────────────────────────────────────────────────────╢ ║ Governance Authority: ║ -║ 1. generate-keys : generate necessary cryptographic keys ║ -║ 2. prepare-configuration : set up the partner chain configuration ║ -║ 3. setup-main-chain-state: configure the main chain parameters ║ -║ 4. create-chain-spec : create the chain specification file ║ -║ 5. start-node : start the validator node ║ +║ 1. generate-keys : generate necessary cryptographic keys ║ +║ 2. automatic-generate-keys : generate keys via running node RPC ║ +║ 3. prepare-configuration : set up the partner chain configuration ║ +║ 4. setup-main-chain-state : configure the main chain parameters ║ +║ 5. create-chain-spec : create the chain specification file ║ +║ 6. start-node : start the validator node ║ ╟────────────────────────────────────────────────────────────────────────────────╢ ║ Registered Validator: ║ -║ 1. generate-keys : generate validator keys ║ -║ 2. register1 : initiate the registration process ║ -║ 3. register2 : complete registration with cold keys ║ -║ 4. register3 : finalize registration ║ -║ 5. start-node : start the validator node ║ -║ 6. deregister : cancel registration ║ +║ 1. generate-keys : generate validator keys ║ +║ 2. automatic-generate-keys : generate keys via running node RPC ║ +║ 3. register1 : initiate the registration process ║ +║ 4. register2 : complete registration with cold keys ║ +║ 5. register3 : finalize registration ║ +║ 6. start-node : start the validator node ║ +║ 7. deregister : cancel registration ║ ║ ║ ║ Note: This sequence assumes that the chain-spec.json and ║ ║ pc-chain-config.json files have been obtained from ║ ║ the Governance Authority and are present in the working directory. ║ ╟────────────────────────────────────────────────────────────────────────────────╢ ║ Permissioned Validator: ║ -║ 1. generate-keys : generate validator keys ║ -║ 2. start-node : start the validator node ║ +║ 1. generate-keys : generate validator keys ║ +║ 2. automatic-generate-keys : generate keys via running node RPC ║ +║ 3. start-node : start the validator node ║ ║ ║ -║ Note: After executing 'generate-keys', the generated keys must be shared ║ +║ Note: After executing key generation, the generated keys must be shared ║ ║ with the Governance Authority. The 'start-node' command can only be ║ ║ executed after the Governance Authority has established the partner ║ ║ chain on the main network. This sequence assumes that the ║ From c8e5672a37f0ff7ed4522d7793e301d331f24553 Mon Sep 17 00:00:00 2001 From: ladamesny Date: Mon, 11 Aug 2025 15:18:45 -0400 Subject: [PATCH 2/3] feat: combine automatic-generate-keys functionality into original generate keys command --- Cargo.lock | 2 - changelog.md | 8 +- toolkit/partner-chains-cli/Cargo.toml | 2 +- .../src/automatic_generate_keys/mod.rs | 301 ------------- .../src/automatic_generate_keys/tests.rs | 71 ---- .../src/generate_keys/mod.rs | 402 ++++++++++++++++-- .../src/generate_keys/tests.rs | 80 +++- toolkit/partner-chains-cli/src/lib.rs | 9 +- 8 files changed, 453 insertions(+), 422 deletions(-) delete mode 100644 toolkit/partner-chains-cli/src/automatic_generate_keys/mod.rs delete mode 100644 toolkit/partner-chains-cli/src/automatic_generate_keys/tests.rs diff --git a/Cargo.lock b/Cargo.lock index 8e827152c9..18f65bb726 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4288,7 +4288,6 @@ checksum = "fe4cd85333e22411419a0bcae1297d25e58c9443848b11dc6a86fefe8c78a661" dependencies = [ "equivalent", "hashbrown 0.15.4", - "serde", ] [[package]] @@ -7159,7 +7158,6 @@ dependencies = [ "frame-system", "hex", "hex-literal 1.0.0", - "indexmap 2.10.0", "inquire", "libp2p-identity", "log", diff --git a/changelog.md b/changelog.md index 79fe781051..4161c4cd07 100644 --- a/changelog.md +++ b/changelog.md @@ -6,11 +6,11 @@ This changelog is based on [Keep A Changelog](https://keepachangelog.com/en/1.1. ## Added -* New `automatic-generate-keys` command in `partner-chains-cli`: - * Allows users to generate and save session keys by connecting to a running node via RPC (`author_rotateKeys`). - * Decodes session keys using the runtime API and saves them to the keystore and `partner-chains-public-keys.json`. +* Enhanced `generate-keys` command in `partner-chains-cli`: + * Added `--url` parameter to enable automatic key generation via RPC (`author_rotateKeys`) when connecting to a running node. + * When `--url` is provided, the command automatically generates session keys by calling the node's RPC endpoint, decodes them using the runtime API, and saves them to the keystore and `partner-chains-public-keys.json`. * Provides fallback and error handling for various runtime formats. - * Fully integrated into the CLI as an alternative to `generate-keys` for all roles (Governance Authority, Registered Validator, Permissioned Validator). + * Maintains backward compatibility - when called without `--url`, it uses the traditional local key generation method. * Comprehensive test suite for SCALE decoding and key handling logic. * Help message and workflow documentation updated to show both key generation methods and when to use each. diff --git a/toolkit/partner-chains-cli/Cargo.toml b/toolkit/partner-chains-cli/Cargo.toml index 8d1495f502..684cfb0e52 100644 --- a/toolkit/partner-chains-cli/Cargo.toml +++ b/toolkit/partner-chains-cli/Cargo.toml @@ -49,7 +49,7 @@ sp-governed-map = { workspace = true, features = ["std"] } sidechain-slots = { workspace = true } authority-selection-inherents = { workspace = true, features = ["std"] } reqwest = { workspace = true } -indexmap = { workspace = true } + parity-scale-codec = { workspace = true } [dev-dependencies] diff --git a/toolkit/partner-chains-cli/src/automatic_generate_keys/mod.rs b/toolkit/partner-chains-cli/src/automatic_generate_keys/mod.rs deleted file mode 100644 index 3aac8e82c5..0000000000 --- a/toolkit/partner-chains-cli/src/automatic_generate_keys/mod.rs +++ /dev/null @@ -1,301 +0,0 @@ -use crate::config::KEYS_FILE_PATH; -use crate::generate_keys::GenerateKeysConfig; -use crate::keystore::keystore_path; -use crate::{CmdRun, IOContext}; -use clap::Parser; -use indexmap::IndexMap; -use parity_scale_codec::{Decode, Encode}; -use reqwest::Client; -use serde::{Deserialize, Serialize}; - -#[cfg(test)] -mod tests; - -#[derive(Serialize)] -struct JsonRpcRequest { - jsonrpc: String, - method: String, - params: serde_json::Value, - id: u64, -} - -#[derive(Deserialize)] -#[allow(dead_code)] -struct JsonRpcResponse { - jsonrpc: String, - result: Option, - error: Option, -} - -#[derive(Deserialize)] -struct JsonRpcError { - code: i32, - message: String, -} - -/// Command to automatically generate and save session keys by connecting to a node. -#[derive(Clone, Debug, Parser)] -pub struct AutomaticGenerateKeysCmd { - /// URL of the Substrate node RPC endpoint (e.g., http://localhost:9933). - #[arg(long = "url", default_value = "http://localhost:9933")] - node_url: String, -} - -impl CmdRun for AutomaticGenerateKeysCmd { - fn run(&self, context: &C) -> anyhow::Result<()> { - context.eprint("This 🧙 wizard will generate session keys by calling author_rotateKeys on the node, decode them, and save them to the keystore and partner-chains-public-keys.json file:"); - context.enewline(); - - let config = GenerateKeysConfig::load(context); - let keystore_path = keystore_path(&config.substrate_node_base_path); - context.eprint(&format!("🔑 Keystore path: {}", keystore_path)); - context.enewline(); - - let rt = tokio::runtime::Runtime::new()?; - rt.block_on(async { - let client = Client::new(); - - // Step 1: Call author_rotateKeys RPC to get session keys - let session_keys_hex = - call_author_rotate_keys(&client, &self.node_url, context).await?; - - // Step 2: Decode session keys using runtime API - let decoded_keys = - decode_session_keys(&client, &self.node_url, &session_keys_hex, context).await?; - - // Step 3: Save keys to keystore and JSON file - save_keys_to_storage(&decoded_keys, &session_keys_hex, &keystore_path, context).await?; - - context.print("🚀 All done!"); - Ok(()) - }) - } -} - -/// Call author_rotateKeys RPC method to generate new session keys -async fn call_author_rotate_keys( - client: &Client, - node_url: &str, - context: &C, -) -> anyhow::Result { - let session_keys_hex: String = - send_rpc_request(client, node_url, "author_rotateKeys", serde_json::json!([])) - .await - .map_err(|e| anyhow::anyhow!("Failed to call author_rotateKeys: {}", e))?; - - context.print(&format!("Raw session keys (hex): {}", session_keys_hex)); - Ok(session_keys_hex) -} - -/// Decode session keys using the runtime API -async fn decode_session_keys( - client: &Client, - node_url: &str, - session_keys_hex: &str, - _context: &C, -) -> anyhow::Result, Vec)>> { - // Decode hex string to bytes (remove "0x" prefix) - let session_keys = hex::decode(&session_keys_hex[2..]) - .map_err(|e| anyhow::anyhow!("Failed to decode session keys: {}", e))?; - - // Get finalized block hash - let block_hash: String = - send_rpc_request(client, node_url, "chain_getFinalizedHead", serde_json::json!([])) - .await - .map_err(|e| anyhow::anyhow!("Failed to get finalized block hash: {}", e))?; - - // Use SCALE-encoded parameter for modern Polkadot SDK method - let session_keys_param = format!("0x{}", hex::encode(session_keys.encode())); - let params = - serde_json::json!(["SessionKeys_decode_session_keys", session_keys_param, block_hash]); - - let decoded_keys: Vec<(Vec, Vec)> = - match send_rpc_request::(client, node_url, "state_call", params).await { - Ok(decoded_hex) => { - let bytes = hex::decode(&decoded_hex[2..]) - .map_err(|e| anyhow::anyhow!("Failed to decode runtime API response: {}", e))?; - - parse_decoded_keys_response(&bytes)? - }, - Err(e) => { - return Err(anyhow::anyhow!( - "Failed to call SessionKeys_decode_session_keys: {}", - e - )); - }, - }; - - Ok(decoded_keys) -} - -/// Parse the SCALE-encoded response from the runtime API -fn parse_decoded_keys_response(bytes: &[u8]) -> anyhow::Result, Vec)>> { - // Try decoding as Option, u32)>> (newer Polkadot SDK) - let mut cursor = bytes; - match , u32)>>>::decode(&mut cursor) { - Ok(Some(vec)) if cursor.is_empty() => { - return Ok(vec - .into_iter() - .map(|(pubkey, key_type)| (key_type.to_le_bytes().to_vec(), pubkey)) - .collect()); - }, - Ok(None) if cursor.is_empty() => { - // Successfully decoded as None (empty result) - return Ok(Vec::new()); - }, - _ => { - // Try Vec<(Vec, Vec)> (legacy format) - let mut cursor_alt = bytes; - match , Vec)>>::decode(&mut cursor_alt) { - Ok(vec) if cursor_alt.is_empty() => return Ok(vec), - _ => { - // Try Option, Vec)>> (alternative legacy) - let mut cursor_opt = bytes; - match , Vec)>>>::decode(&mut cursor_opt) { - Ok(Some(vec)) if cursor_opt.is_empty() => return Ok(vec), - Ok(None) if cursor_opt.is_empty() => return Ok(Vec::new()), - _ => { - return Err(anyhow::anyhow!("Failed to SCALE decode keys")); - }, - } - }, - } - }, - } -} - -/// Save keys to keystore and JSON file -async fn save_keys_to_storage( - decoded_keys: &[(Vec, Vec)], - session_keys_hex: &str, - keystore_path: &str, - context: &C, -) -> anyhow::Result<()> { - // Create keystore directory - in tests this is mocked, in real usage it creates the directory - let _ = context.run_command(&format!("mkdir -p {}", keystore_path)); - - let mut key_map: IndexMap = IndexMap::new(); - - if !decoded_keys.is_empty() { - save_decoded_keys(decoded_keys, keystore_path, &mut key_map, context)?; - } else { - save_raw_keys_as_fallback(session_keys_hex, keystore_path, &mut key_map, context)?; - } - - save_keys_to_json_file(&key_map, context)?; - - // Print decoded keys for reference - context.print(&format!("Decoded session keys: {:?}", key_map)); - - Ok(()) -} - -/// Save successfully decoded keys to keystore -fn save_decoded_keys( - decoded_keys: &[(Vec, Vec)], - keystore_path: &str, - key_map: &mut IndexMap, - context: &C, -) -> anyhow::Result<()> { - for (key_type, public_key) in decoded_keys { - // Convert key type to string for JSON and display - let key_type_str = String::from_utf8(key_type.clone()) - .map_err(|e| anyhow::anyhow!("Invalid key type encoding: {}", e))?; - let public_key_hex = format!("0x{}", hex::encode(public_key)); - - // Save to keystore with key_type_hex + public_key format - let key_type_hex = hex::encode(key_type); - let store_path = format!("{}/{}{}", keystore_path, key_type_hex, hex::encode(public_key)); - context.write_file(&store_path, &hex::encode(public_key)); - context.print(&format!("Saved {} key to {}", key_type_str, store_path)); - - // Store in key map for JSON output - key_map.insert(key_type_str, public_key_hex); - } - Ok(()) -} - -/// Save raw session keys as fallback when decoding fails -fn save_raw_keys_as_fallback( - session_keys_hex: &str, - keystore_path: &str, - key_map: &mut IndexMap, - context: &C, -) -> anyhow::Result<()> { - context.eprint("⚠️ No session keys decoded. Saving raw keys as fallback."); - context.eprint("Please verify the node's runtime configuration by fetching metadata:"); - context.eprint("curl -X POST -H 'Content-Type: application/json' -d '{\"jsonrpc\":\"2.0\",\"method\":\"state_getMetadata\",\"id\":1}' http://localhost:9933 > metadata.json"); - context.eprint("Look for the Session pallet and SessionKeys type to determine key order (e.g., aura, gran, imon)."); - - let session_keys = hex::decode(&session_keys_hex[2..]) - .map_err(|e| anyhow::anyhow!("Failed to decode session keys: {}", e))?; - - let raw_key_hex = format!("0x{}", hex::encode(&session_keys)); - let store_path = format!("{}/raw{}", keystore_path, hex::encode(&session_keys)); - context.write_file(&store_path, &hex::encode(&session_keys)); - context.print(&format!("Saved raw session keys to {}", store_path)); - key_map.insert("raw".to_string(), raw_key_hex); - - Ok(()) -} - -/// Save keys to JSON file -fn save_keys_to_json_file( - key_map: &IndexMap, - context: &C, -) -> anyhow::Result<()> { - if !key_map.is_empty() { - if prompt_can_write("keys file", KEYS_FILE_PATH, context) { - let public_keys_json = serde_json::to_string_pretty(key_map) - .map_err(|e| anyhow::anyhow!("Failed to serialize public keys: {}", e))?; - context.write_file(KEYS_FILE_PATH, &public_keys_json); - context.print(&format!( - "🔑 Public keys saved to {}:\n{}", - KEYS_FILE_PATH, public_keys_json - )); - context.print("You may share these public keys with your chain governance authority."); - } else { - context.print("Refusing to overwrite keys file - skipping JSON save"); - } - } else { - context.print("Warning: No keys decoded, skipping JSON save"); - } - Ok(()) -} - -/// Helper to prompt if the keys file can be written -fn prompt_can_write(file_desc: &str, file_path: &str, context: &C) -> bool { - if context.file_exists(file_path) { - context.prompt_yes_no( - &format!("A {} already exists at {} - overwrite it?", file_desc, file_path), - false, - ) - } else { - true - } -} - -/// Helper to send a JSON-RPC request -async fn send_rpc_request Deserialize<'de>>( - client: &Client, - url: &str, - method: &str, - params: serde_json::Value, -) -> Result> { - let request = - JsonRpcRequest { jsonrpc: "2.0".to_string(), method: method.to_string(), params, id: 1 }; - - let response = client - .post(url) - .json(&request) - .send() - .await? - .json::>() - .await?; - - if let Some(error) = response.error { - return Err(format!("RPC error: {} (code: {})", error.message, error.code).into()); - } - - response.result.ok_or_else(|| "No result in response".into()) -} diff --git a/toolkit/partner-chains-cli/src/automatic_generate_keys/tests.rs b/toolkit/partner-chains-cli/src/automatic_generate_keys/tests.rs deleted file mode 100644 index f1e675154e..0000000000 --- a/toolkit/partner-chains-cli/src/automatic_generate_keys/tests.rs +++ /dev/null @@ -1,71 +0,0 @@ -use super::*; -use anyhow::Result; -use parity_scale_codec::Encode; - -#[tokio::test] -async fn test_parse_decoded_keys_response_modern_format() -> Result<()> { - // Test parsing modern Polkadot SDK format: Option, u32)>> - let key_data = vec![ - (b"aura_public_key".to_vec(), 0x61757261u32), // 'aura' as u32 - (b"grandpa_public_key".to_vec(), 0x6772616eu32), // 'gran' as u32 - ]; - let encoded = Some(key_data).encode(); - - let result = parse_decoded_keys_response(&encoded)?; - - assert_eq!(result.len(), 2); - assert_eq!(result[0].0, 0x61757261u32.to_le_bytes().to_vec()); - assert_eq!(result[0].1, b"aura_public_key".to_vec()); - assert_eq!(result[1].0, 0x6772616eu32.to_le_bytes().to_vec()); - assert_eq!(result[1].1, b"grandpa_public_key".to_vec()); - - Ok(()) -} - -#[tokio::test] -async fn test_parse_decoded_keys_response_legacy_format() -> Result<()> { - // Test parsing legacy format: Vec<(Vec, Vec)> - let key_data = vec![ - (b"aura".to_vec(), b"aura_public_key".to_vec()), - (b"gran".to_vec(), b"grandpa_public_key".to_vec()), - ]; - let encoded = key_data.encode(); - - let result = parse_decoded_keys_response(&encoded)?; - - assert_eq!(result.len(), 2); - assert_eq!(result[0].0, b"aura".to_vec()); - assert_eq!(result[0].1, b"aura_public_key".to_vec()); - assert_eq!(result[1].0, b"gran".to_vec()); - assert_eq!(result[1].1, b"grandpa_public_key".to_vec()); - - Ok(()) -} - -#[tokio::test] -async fn test_parse_decoded_keys_response_empty() -> Result<()> { - // Test parsing empty response: Option, u32)>> = None - let encoded = Option::, u32)>>::None.encode(); - - let result = parse_decoded_keys_response(&encoded)?; - - assert_eq!(result.len(), 0); - - Ok(()) -} - -#[tokio::test] -async fn test_parse_decoded_keys_response_invalid() { - // Test parsing invalid data - let invalid_data = b"invalid_data"; - - let result = parse_decoded_keys_response(invalid_data); - - assert!(result.is_err()); - assert!(result.unwrap_err().to_string().contains("Failed to SCALE decode keys")); -} - -// The core SCALE decoding functionality is thoroughly tested above. -// The MockIO system in this codebase doesn't provide comprehensive file operation mocking, -// so more detailed integration tests would require a separate test framework or actual -// HTTP server setup for testing the full RPC workflow. \ No newline at end of file diff --git a/toolkit/partner-chains-cli/src/generate_keys/mod.rs b/toolkit/partner-chains-cli/src/generate_keys/mod.rs index b91c2d2912..e3f64b5324 100644 --- a/toolkit/partner-chains-cli/src/generate_keys/mod.rs +++ b/toolkit/partner-chains-cli/src/generate_keys/mod.rs @@ -4,7 +4,9 @@ use crate::keystore::*; use crate::permissioned_candidates::PermissionedCandidateKeys; use crate::{config::config_fields, *}; use anyhow::{Context, anyhow}; -use serde::Deserialize; +use parity_scale_codec::{Decode, Encode}; +use reqwest::Client; +use serde::{Deserialize, Serialize}; use sidechain_domain::byte_string::ByteString; use sp_core::{Pair, ed25519}; use std::collections::BTreeMap; @@ -17,6 +19,10 @@ mod tests; pub struct GenerateKeysCmd { #[clap(skip)] _phantom: PhantomData, + /// URL of the Substrate node RPC endpoint for automatic key generation (e.g., http://localhost:9933). + /// If not provided, keys will be generated locally using the traditional method. + #[arg(long = "url")] + node_url: Option, } #[derive(Debug)] @@ -50,34 +56,68 @@ pub(crate) fn network_key_path(substrate_node_base_path: &str) -> String { impl CmdRun for GenerateKeysCmd { fn run(&self, context: &C) -> anyhow::Result<()> { - context.eprint( - "This 🧙 wizard will generate the following keys and save them to your node's keystore:", - ); - context.eprint(&format!("→ {} {} key", CROSS_CHAIN.scheme, CROSS_CHAIN.name)); - for key_def in T::key_definitions() { - context.eprint(&format!("→ {} {} key", key_def.scheme, key_def.name)); - } - context.eprint("It will also generate a network key for your node if needed."); - context.enewline(); + match &self.node_url { + Some(url) => { + // Automatic key generation via RPC + context.eprint("This 🧙 wizard will generate session keys by calling author_rotateKeys on the node, decode them, and save them to the keystore and partner-chains-public-keys.json file:"); + context.enewline(); - let chain_spec_path = write_temp_chain_spec( - context, - T::create_chain_spec(&CreateChainSpecConfig::::default()), - ); + let config = GenerateKeysConfig::load(context); + let keystore_path = keystore_path(&config.substrate_node_base_path); + context.eprint(&format!("🔑 Keystore path: {}", keystore_path)); + context.enewline(); - let config = GenerateKeysConfig::load(context); - context.enewline(); + let rt = tokio::runtime::Runtime::new()?; + rt.block_on(async { + let client = Client::new(); - generate_spo_keys::(&config, &chain_spec_path, context)?; + // Step 1: Call author_rotateKeys RPC to get session keys + let session_keys_hex = + call_author_rotate_keys(&client, url, context).await?; - context.enewline(); + // Step 2: Decode session keys using runtime API + let decoded_keys = + decode_session_keys(&client, url, &session_keys_hex, context).await?; - generate_network_key(&config, &chain_spec_path, context)?; - context.enewline(); + // Step 3: Save keys to keystore and JSON file + save_keys_to_storage(&decoded_keys, &session_keys_hex, &keystore_path, context).await?; + + context.print("🚀 All done!"); + Ok(()) + }) + }, + None => { + // Traditional key generation + context.eprint( + "This 🧙 wizard will generate the following keys and save them to your node's keystore:", + ); + context.eprint(&format!("→ {} {} key", CROSS_CHAIN.scheme, CROSS_CHAIN.name)); + for key_def in T::key_definitions() { + context.eprint(&format!("→ {} {} key", key_def.scheme, key_def.name)); + } + context.eprint("It will also generate a network key for your node if needed."); + context.enewline(); - context.eprint("🚀 All done!"); - context.delete_file(&chain_spec_path)?; - Ok(()) + let chain_spec_path = write_temp_chain_spec( + context, + T::create_chain_spec(&CreateChainSpecConfig::::default()), + ); + + let config = GenerateKeysConfig::load(context); + context.enewline(); + + generate_spo_keys::(&config, &chain_spec_path, context)?; + + context.enewline(); + + generate_network_key(&config, &chain_spec_path, context)?; + context.enewline(); + + context.eprint("🚀 All done!"); + context.delete_file(&chain_spec_path)?; + Ok(()) + } + } } } @@ -107,18 +147,12 @@ pub(crate) fn generate_spo_keys( keys.insert(key_definition.key_type.to_owned(), generated_key); } - let public_keys_json = - serde_json::to_string_pretty(&PermissionedCandidateKeys { partner_chains_key, keys }) - .expect("PermissionedCandidateKeys have only UTF-8 encodable ids"); - context.write_file(KEYS_FILE_PATH, &public_keys_json); - - context.eprint(&format!( - "🔑 The following public keys were generated and saved to the {} file:", - KEYS_FILE_PATH, - )); - context.print(&(public_keys_json).to_string()); - context.eprint("You may share them with your chain governance authority"); - context.eprint("if you wish to be included as a permissioned candidate."); + let permissioned_keys = PermissionedCandidateKeys { partner_chains_key, keys }; + + // Use the shared function to save to JSON file + save_permissioned_keys_to_json_file(&permissioned_keys, context)?; + + // The save_permissioned_keys_to_json_file function already handles the output } else { context.eprint("Refusing to overwrite keys file - skipping"); } @@ -251,3 +285,301 @@ fn generate_or_load_key( }; ByteString::decode_hex(&key?).map_err(|e| anyhow!(e)) } + +// JSON-RPC structures for automatic key generation +#[derive(Serialize)] +struct JsonRpcRequest { + jsonrpc: String, + method: String, + params: serde_json::Value, + id: u64, +} + +#[derive(Deserialize)] +#[allow(dead_code)] +struct JsonRpcResponse { + jsonrpc: String, + result: Option, + error: Option, +} + +#[derive(Deserialize)] +struct JsonRpcError { + code: i32, + message: String, +} + +/// Call author_rotateKeys RPC method to generate new session keys +async fn call_author_rotate_keys( + client: &Client, + node_url: &str, + context: &C, +) -> anyhow::Result { + let session_keys_hex: String = + send_rpc_request(client, node_url, "author_rotateKeys", serde_json::json!([])) + .await + .map_err(|e| anyhow!("Failed to call author_rotateKeys: {}", e))?; + + context.print(&format!("Raw session keys (hex): {}", session_keys_hex)); + Ok(session_keys_hex) +} + +/// Decode session keys using the runtime API +async fn decode_session_keys( + client: &Client, + node_url: &str, + session_keys_hex: &str, + _context: &C, +) -> anyhow::Result, Vec)>> { + // Decode hex string to bytes (remove "0x" prefix) + let session_keys = hex::decode(&session_keys_hex[2..]) + .map_err(|e| anyhow!("Failed to decode session keys: {}", e))?; + + // Get finalized block hash + let block_hash: String = + send_rpc_request(client, node_url, "chain_getFinalizedHead", serde_json::json!([])) + .await + .map_err(|e| anyhow!("Failed to get finalized block hash: {}", e))?; + + // Use SCALE-encoded parameter for modern Polkadot SDK method + let session_keys_param = format!("0x{}", hex::encode(session_keys.encode())); + let params = + serde_json::json!(["SessionKeys_decode_session_keys", session_keys_param, block_hash]); + + let decoded_keys: Vec<(Vec, Vec)> = + match send_rpc_request::(client, node_url, "state_call", params).await { + Ok(decoded_hex) => { + let bytes = hex::decode(&decoded_hex[2..]) + .map_err(|e| anyhow!("Failed to decode runtime API response: {}", e))?; + + parse_decoded_keys_response(&bytes)? + }, + Err(e) => { + return Err(anyhow!( + "Failed to call SessionKeys_decode_session_keys: {}", + e + )); + }, + }; + + Ok(decoded_keys) +} + +/// Parse the SCALE-encoded response from the runtime API +fn parse_decoded_keys_response(bytes: &[u8]) -> anyhow::Result, Vec)>> { + // Try decoding as Option, u32)>> (newer Polkadot SDK) + let mut cursor = bytes; + match , u32)>>>::decode(&mut cursor) { + Ok(Some(vec)) if cursor.is_empty() => { + return Ok(vec + .into_iter() + .map(|(pubkey, key_type)| (key_type.to_le_bytes().to_vec(), pubkey)) + .collect()); + }, + Ok(None) if cursor.is_empty() => { + // Successfully decoded as None (empty result) + return Ok(Vec::new()); + }, + _ => { + // Try Vec<(Vec, Vec)> (legacy format) + let mut cursor_alt = bytes; + match , Vec)>>::decode(&mut cursor_alt) { + Ok(vec) if cursor_alt.is_empty() => return Ok(vec), + _ => { + // Try Option, Vec)>> (alternative legacy) + let mut cursor_opt = bytes; + match , Vec)>>>::decode(&mut cursor_opt) { + Ok(Some(vec)) if cursor_opt.is_empty() => Ok(vec), + Ok(None) if cursor_opt.is_empty() => Ok(Vec::new()), + _ => { + return Err(anyhow!("Failed to SCALE decode keys")); + }, + } + }, + } + }, + } +} + +/// Save keys to keystore and JSON file +async fn save_keys_to_storage( + decoded_keys: &[(Vec, Vec)], + session_keys_hex: &str, + keystore_path: &str, + context: &C, +) -> anyhow::Result<()> { + // Create keystore directory - in tests this is mocked, in real usage it creates the directory + let _ = context.run_command(&format!("mkdir -p {}", keystore_path)); + + let mut keys: BTreeMap = BTreeMap::new(); + + if !decoded_keys.is_empty() { + save_decoded_keys(decoded_keys, keystore_path, &mut keys, context)?; + } else { + save_raw_keys_as_fallback(session_keys_hex, keystore_path, &mut keys, context)?; + } + + // For automatic key generation, generate the cross-chain key without requiring a chain spec + let partner_chains_key = generate_cross_chain_key_for_automatic_flow(context, keystore_path)?; + + // Create PermissionedCandidateKeys struct to match traditional generate-keys format + let permissioned_keys = PermissionedCandidateKeys { + partner_chains_key, + keys: keys.clone(), + }; + + save_permissioned_keys_to_json_file(&permissioned_keys, context)?; + + // Print decoded keys for reference + context.print(&format!("Decoded session keys: {:?}", keys)); + context.eprint("Note: Cross-chain key has been generated and included in the JSON file."); + + Ok(()) +} + +/// Generate cross-chain key for automatic flow without requiring a chain spec file +fn generate_cross_chain_key_for_automatic_flow( + context: &C, + keystore_path: &str, +) -> anyhow::Result { + // Check if cross-chain key already exists + let existing_keys = context.list_directory(keystore_path)?.unwrap_or_default(); + + if let Some(existing_key) = find_existing_key(&existing_keys, &CROSS_CHAIN) { + if context.prompt_yes_no( + &format!("A {} key already exists in store: {} - overwrite it?", CROSS_CHAIN.name, existing_key), + false, + ) { + // Generate new key + let new_key = generate_keys(context, &CROSS_CHAIN)?; + + // Save to keystore directly (without chain spec) + let store_path = format!("{}/{}{}", keystore_path, CROSS_CHAIN.key_type_hex(), new_key.public_key); + context.write_file(&store_path, &new_key.public_key); + context.eprint(&format!("💾 {} key stored at {}", CROSS_CHAIN.name, store_path)); + + // Remove old key if it exists + let old_key_path = format!("{}/{}{}", keystore_path, CROSS_CHAIN.key_type_hex(), existing_key); + if context.file_exists(&old_key_path) { + context.delete_file(&old_key_path)?; + } + + ByteString::decode_hex(&new_key.public_key) + .map_err(|e| anyhow!("Failed to decode hex: {}", e)) + } else { + // Use existing key + ByteString::decode_hex(&format!("0x{}", existing_key)) + .map_err(|e| anyhow!("Failed to decode hex: {}", e)) + } + } else { + // Generate new key + let new_key = generate_keys(context, &CROSS_CHAIN)?; + + // Save to keystore directly (without chain spec) + let store_path = format!("{}/{}{}", keystore_path, CROSS_CHAIN.key_type_hex(), new_key.public_key); + context.write_file(&store_path, &new_key.public_key); + context.eprint(&format!("💾 {} key stored at {}", CROSS_CHAIN.name, store_path)); + + ByteString::decode_hex(&new_key.public_key) + .map_err(|e| anyhow!("Failed to decode hex: {}", e)) + } +} + +/// Save successfully decoded keys to keystore +fn save_decoded_keys( + decoded_keys: &[(Vec, Vec)], + keystore_path: &str, + keys: &mut BTreeMap, + context: &C, +) -> anyhow::Result<()> { + for (key_type, public_key) in decoded_keys { + // Convert key type to string for JSON and display + let key_type_str = String::from_utf8(key_type.clone()) + .map_err(|e| anyhow!("Invalid key type encoding: {}", e))?; + + // Save to keystore with key_type_hex + public_key format + let key_type_hex = hex::encode(key_type); + let store_path = format!("{}/{}{}", keystore_path, key_type_hex, hex::encode(public_key)); + context.write_file(&store_path, &hex::encode(public_key)); + context.print(&format!("Saved {} key to {}", key_type_str, store_path)); + + // Store in keys map for JSON output as ByteString + keys.insert(key_type_str, ByteString::from(public_key.clone())); + } + Ok(()) +} + +/// Save raw session keys as fallback when decoding fails +fn save_raw_keys_as_fallback( + session_keys_hex: &str, + keystore_path: &str, + keys: &mut BTreeMap, + context: &C, +) -> anyhow::Result<()> { + context.eprint("⚠️ No session keys decoded. Saving raw keys as fallback."); + context.eprint("Please verify the node's runtime configuration by fetching metadata:"); + context.eprint("curl -X POST -H 'Content-Type: application/json' -d '{\"jsonrpc\":\"2.0\",\"method\":\"state_getMetadata\",\"id\":1}' http://localhost:9933 > metadata.json"); + context.eprint("Look for the Session pallet and SessionKeys type to determine key order (e.g., aura, gran, imon)."); + + let session_keys = hex::decode(&session_keys_hex[2..]) + .map_err(|e| anyhow!("Failed to decode session keys: {}", e))?; + + let store_path = format!("{}/raw{}", keystore_path, hex::encode(&session_keys)); + context.write_file(&store_path, &hex::encode(&session_keys)); + context.print(&format!("Saved raw session keys to {}", store_path)); + keys.insert("raw".to_string(), ByteString::from(session_keys)); + + Ok(()) +} + +/// Save keys to JSON file +fn save_permissioned_keys_to_json_file( + permissioned_keys: &PermissionedCandidateKeys, + context: &C, +) -> anyhow::Result<()> { + if !permissioned_keys.keys.is_empty() || !permissioned_keys.partner_chains_key.0.is_empty() { + if prompt_can_write("keys file", KEYS_FILE_PATH, context) { + let public_keys_json = serde_json::to_string_pretty(permissioned_keys) + .map_err(|e| anyhow!("Failed to serialize public keys: {}", e))?; + context.write_file(KEYS_FILE_PATH, &public_keys_json); + + context.eprint(&format!( + "🔑 The following public keys were generated and saved to the {} file:", + KEYS_FILE_PATH, + )); + context.print(&public_keys_json); + context.eprint("You may share them with your chain governance authority"); + context.eprint("if you wish to be included as a permissioned candidate."); + } else { + context.eprint("Refusing to overwrite keys file - skipping"); + } + } else { + context.eprint("Warning: No keys to save, skipping JSON file creation"); + } + Ok(()) +} + +/// Helper to send a JSON-RPC request +async fn send_rpc_request Deserialize<'de>>( + client: &Client, + url: &str, + method: &str, + params: serde_json::Value, +) -> Result> { + let request = + JsonRpcRequest { jsonrpc: "2.0".to_string(), method: method.to_string(), params, id: 1 }; + + let response = client + .post(url) + .json(&request) + .send() + .await? + .json::>() + .await?; + + if let Some(error) = response.error { + return Err(format!("RPC error: {} (code: {})", error.message, error.code).into()); + } + + response.result.ok_or_else(|| "No result in response".into()) +} diff --git a/toolkit/partner-chains-cli/src/generate_keys/tests.rs b/toolkit/partner-chains-cli/src/generate_keys/tests.rs index c56cddb8e0..b280263f37 100644 --- a/toolkit/partner-chains-cli/src/generate_keys/tests.rs +++ b/toolkit/partner-chains-cli/src/generate_keys/tests.rs @@ -181,7 +181,10 @@ fn happy_path() { ]); let result = - GenerateKeysCmd:: { _phantom: std::marker::PhantomData }.run(&mock_context); + GenerateKeysCmd:: { + _phantom: std::marker::PhantomData, + node_url: None, + }.run(&mock_context); result.expect("should succeed"); verify_json!( @@ -387,3 +390,78 @@ mod generate_network_key { fn key_type_hex_works() { assert_eq!(GRANDPA.key_type_hex(), "6772616e") } + +// Tests for automatic key generation functionality +mod automatic_key_generation { + use super::*; + use anyhow::Result; + use parity_scale_codec::Encode; + + #[tokio::test] + async fn test_parse_decoded_keys_response_modern_format() -> Result<()> { + // Test parsing modern Polkadot SDK format: Option, u32)>> + let key_data = vec![ + (b"aura_public_key".to_vec(), 0x61757261u32), // 'aura' as u32 + (b"grandpa_public_key".to_vec(), 0x6772616eu32), // 'gran' as u32 + ]; + let encoded = Some(key_data).encode(); + + let result = parse_decoded_keys_response(&encoded)?; + + assert_eq!(result.len(), 2); + assert_eq!(result[0].0, 0x61757261u32.to_le_bytes().to_vec()); + assert_eq!(result[0].1, b"aura_public_key".to_vec()); + assert_eq!(result[1].0, 0x6772616eu32.to_le_bytes().to_vec()); + assert_eq!(result[1].1, b"grandpa_public_key".to_vec()); + + Ok(()) + } + + #[tokio::test] + async fn test_parse_decoded_keys_response_legacy_format() -> Result<()> { + // Test parsing legacy format: Vec<(Vec, Vec)> + let key_data = vec![ + (b"aura".to_vec(), b"aura_public_key".to_vec()), + (b"gran".to_vec(), b"grandpa_public_key".to_vec()), + ]; + let encoded = key_data.encode(); + + let result = parse_decoded_keys_response(&encoded)?; + + assert_eq!(result.len(), 2); + assert_eq!(result[0].0, b"aura".to_vec()); + assert_eq!(result[0].1, b"aura_public_key".to_vec()); + assert_eq!(result[1].0, b"gran".to_vec()); + assert_eq!(result[1].1, b"grandpa_public_key".to_vec()); + + Ok(()) + } + + #[tokio::test] + async fn test_parse_decoded_keys_response_empty() -> Result<()> { + // Test parsing empty response: Option, u32)>> = None + let encoded = Option::, u32)>>::None.encode(); + + let result = parse_decoded_keys_response(&encoded)?; + + assert_eq!(result.len(), 0); + + Ok(()) + } + + #[tokio::test] + async fn test_parse_decoded_keys_response_invalid() { + // Test parsing invalid data + let invalid_data = b"invalid_data"; + + let result = parse_decoded_keys_response(invalid_data); + + assert!(result.is_err()); + assert!(result.unwrap_err().to_string().contains("Failed to SCALE decode keys")); + } + + // The core SCALE decoding functionality is thoroughly tested above. + // The MockIO system in this codebase doesn't provide comprehensive file operation mocking, + // so more detailed integration tests would require a separate test framework or actual + // HTTP server setup for testing the full RPC workflow. +} diff --git a/toolkit/partner-chains-cli/src/lib.rs b/toolkit/partner-chains-cli/src/lib.rs index 80386b6f41..ad6215336c 100644 --- a/toolkit/partner-chains-cli/src/lib.rs +++ b/toolkit/partner-chains-cli/src/lib.rs @@ -2,7 +2,7 @@ //! Interacts with Smart Contracts using [`partner_chains_cardano_offchain`] crate. #![deny(missing_docs)] -mod automatic_generate_keys; + mod cardano_key; mod cmd_traits; mod config; @@ -56,9 +56,8 @@ impl CommonArguments { /// Partner Chains text "wizards" for setting up a chain. pub enum Command { /// This wizard generates the keys required for operating a partner-chains node, stores them in the keystore directory, and prints the public keys and keystore location. + /// When called with --url, it can also generate session keys by connecting to a running node via RPC (author_rotateKeys). GenerateKeys(generate_keys::GenerateKeysCmd), - /// Command to automatically generate and save session keys by connecting to a node. - AutomaticGenerateKeys(automatic_generate_keys::AutomaticGenerateKeysCmd), /// Wizard to obtain the configuration needed for the partner-chain governance authority. This configuration should be shared with chain participants and used to create the chain spec json file. PrepareConfiguration(prepare_configuration::PrepareConfigurationCmd), /// Wizard for setting D-parameter and Permissioned Candidates list on the main chain. @@ -84,7 +83,6 @@ impl Command { pub fn run(&self, context: &C) -> anyhow::Result<()> { match self { Command::GenerateKeys(cmd) => cmd.run(context), - Command::AutomaticGenerateKeys(cmd) => cmd.run(context), Command::PrepareConfiguration(cmd) => cmd.run(context), Command::CreateChainSpec(cmd) => cmd.run(context), Command::SetupMainChainState(cmd) => cmd.run(context), @@ -113,7 +111,6 @@ const HELP_EXAMPLES: &str = r#" ╟────────────────────────────────────────────────────────────────────────────────╢ ║ Governance Authority: ║ ║ 1. generate-keys : generate necessary cryptographic keys ║ -║ 2. automatic-generate-keys : generate keys via running node RPC ║ ║ 3. prepare-configuration : set up the partner chain configuration ║ ║ 4. setup-main-chain-state : configure the main chain parameters ║ ║ 5. create-chain-spec : create the chain specification file ║ @@ -121,7 +118,6 @@ const HELP_EXAMPLES: &str = r#" ╟────────────────────────────────────────────────────────────────────────────────╢ ║ Registered Validator: ║ ║ 1. generate-keys : generate validator keys ║ -║ 2. automatic-generate-keys : generate keys via running node RPC ║ ║ 3. register1 : initiate the registration process ║ ║ 4. register2 : complete registration with cold keys ║ ║ 5. register3 : finalize registration ║ @@ -134,7 +130,6 @@ const HELP_EXAMPLES: &str = r#" ╟────────────────────────────────────────────────────────────────────────────────╢ ║ Permissioned Validator: ║ ║ 1. generate-keys : generate validator keys ║ -║ 2. automatic-generate-keys : generate keys via running node RPC ║ ║ 3. start-node : start the validator node ║ ║ ║ ║ Note: After executing key generation, the generated keys must be shared ║ From 1fccb558d7ed0e2fdf83181ce03284c9b2e82743 Mon Sep 17 00:00:00 2001 From: ladamesny Date: Wed, 13 Aug 2025 13:18:03 -0400 Subject: [PATCH 3/3] Fix help numbering and test expectations --- .../src/generate_keys/mod.rs | 37 +++++++++------ .../src/generate_keys/tests.rs | 47 ++++++++++--------- toolkit/partner-chains-cli/src/lib.rs | 23 +++++---- 3 files changed, 61 insertions(+), 46 deletions(-) diff --git a/toolkit/partner-chains-cli/src/generate_keys/mod.rs b/toolkit/partner-chains-cli/src/generate_keys/mod.rs index e3f64b5324..3cbead6537 100644 --- a/toolkit/partner-chains-cli/src/generate_keys/mod.rs +++ b/toolkit/partner-chains-cli/src/generate_keys/mod.rs @@ -98,6 +98,7 @@ impl CmdRun for GenerateKeysCmd { context.eprint("It will also generate a network key for your node if needed."); context.enewline(); + // Create a proper temporary chain spec as it was in master let chain_spec_path = write_temp_chain_spec( context, T::create_chain_spec(&CreateChainSpecConfig::::default()), @@ -121,15 +122,6 @@ impl CmdRun for GenerateKeysCmd { } } -fn write_temp_chain_spec(context: &C, chain_spec: serde_json::Value) -> String { - let dir_path = context.new_tmp_dir(); - let dir_path = dir_path.to_str().expect("temp dir path is correct utf-8"); - let path = format!("{dir_path}/chain-spec.json"); - let content = format!("{chain_spec}"); - context.write_file(&path, &content); - path -} - pub(crate) fn generate_spo_keys( config: &GenerateKeysConfig, chain_spec_path: &str, @@ -147,12 +139,18 @@ pub(crate) fn generate_spo_keys( keys.insert(key_definition.key_type.to_owned(), generated_key); } - let permissioned_keys = PermissionedCandidateKeys { partner_chains_key, keys }; - - // Use the shared function to save to JSON file - save_permissioned_keys_to_json_file(&permissioned_keys, context)?; - - // The save_permissioned_keys_to_json_file function already handles the output + let public_keys_json = + serde_json::to_string_pretty(&PermissionedCandidateKeys { partner_chains_key, keys }) + .expect("PermissionedCandidateKeys have only UTF-8 encodable ids"); + context.write_file(KEYS_FILE_PATH, &public_keys_json); + + context.eprint(&format!( + "🔑 The following public keys were generated and saved to the {} file:", + KEYS_FILE_PATH, + )); + context.print(&(public_keys_json).to_string()); + context.eprint("You may share them with your chain governance authority"); + context.eprint("if you wish to be included as a permissioned candidate."); } else { context.eprint("Refusing to overwrite keys file - skipping"); } @@ -583,3 +581,12 @@ async fn send_rpc_request Deserialize<'de>>( response.result.ok_or_else(|| "No result in response".into()) } + +fn write_temp_chain_spec(context: &C, chain_spec: serde_json::Value) -> String { + let dir_path = context.new_tmp_dir(); + let dir_path = dir_path.to_str().expect("temp dir path is correct utf-8"); + let path = format!("{dir_path}/chain-spec.json"); + let content = format!("{chain_spec}"); + context.write_file(&path, &content); + path +} diff --git a/toolkit/partner-chains-cli/src/generate_keys/tests.rs b/toolkit/partner-chains-cli/src/generate_keys/tests.rs index b280263f37..bebc3a8380 100644 --- a/toolkit/partner-chains-cli/src/generate_keys/tests.rs +++ b/toolkit/partner-chains-cli/src/generate_keys/tests.rs @@ -157,28 +157,33 @@ pub mod scenarios { MockIO::eprint("if you wish to be included as a permissioned candidate."), ]) } - - pub fn create_temp_chain_spec() -> MockIO { - MockIO::Group(vec![MockIO::new_tmp_dir()]) - } } #[test] fn happy_path() { - let mock_context = MockIOContext::new().with_expected_io(vec![ - scenarios::show_intro(), - MockIO::enewline(), - scenarios::create_temp_chain_spec(), - scenarios::prompt_all_config_fields(), - MockIO::enewline(), - scenarios::generate_all_spo_keys(AURA_KEY, GRANDPA_KEY, CROSS_CHAIN_KEY), - scenarios::write_key_file(AURA_KEY, GRANDPA_KEY, CROSS_CHAIN_KEY), - MockIO::enewline(), - scenarios::generate_network_key(), - MockIO::enewline(), - MockIO::eprint("🚀 All done!"), - MockIO::delete_file("/tmp/MockIOContext_tmp_dir/chain-spec.json"), - ]); + let mock_context = MockIOContext::new() + .with_json_file( + RESOURCES_CONFIG_FILE_PATH, + serde_json::json!({ + "substrate_node_base_path": DATA_PATH, + }), + ) + .with_expected_io(vec![ + scenarios::show_intro(), + MockIO::enewline(), + MockIO::new_tmp_dir(), + MockIO::eprint(&format!( + "🛠️ Loaded node base path from config ({RESOURCES_CONFIG_FILE_PATH}): {DATA_PATH}" + )), + MockIO::enewline(), + scenarios::generate_all_spo_keys(AURA_KEY, GRANDPA_KEY, CROSS_CHAIN_KEY), + scenarios::write_key_file(AURA_KEY, GRANDPA_KEY, CROSS_CHAIN_KEY), + MockIO::enewline(), + scenarios::generate_network_key(), + MockIO::enewline(), + MockIO::eprint("🚀 All done!"), + MockIO::delete_file("/tmp/MockIOContext_tmp_dir/chain-spec.json"), + ]); let result = GenerateKeysCmd:: { @@ -373,14 +378,14 @@ mod generate_network_key { MockIO::run_command(&format!("mkdir -p {DATA_PATH}/network"), "irrelevant"), MockIO::run_command( &format!( - " key generate-node-key --chain path/to/chain-spec.json --file {}", - network_key_file() + " key generate-node-key --chain {}/chain_spec.json --file {}", + DATA_PATH, network_key_file() ), "irrelevant", ), ]); - let result = generate_network_key(&default_config(), "path/to/chain-spec.json", &context); + let result = generate_network_key(&default_config(), &format!("{}/chain_spec.json", DATA_PATH), &context); assert!(result.is_ok()); } diff --git a/toolkit/partner-chains-cli/src/lib.rs b/toolkit/partner-chains-cli/src/lib.rs index ad6215336c..c97e2838b1 100644 --- a/toolkit/partner-chains-cli/src/lib.rs +++ b/toolkit/partner-chains-cli/src/lib.rs @@ -111,18 +111,20 @@ const HELP_EXAMPLES: &str = r#" ╟────────────────────────────────────────────────────────────────────────────────╢ ║ Governance Authority: ║ ║ 1. generate-keys : generate necessary cryptographic keys ║ -║ 3. prepare-configuration : set up the partner chain configuration ║ -║ 4. setup-main-chain-state : configure the main chain parameters ║ -║ 5. create-chain-spec : create the chain specification file ║ -║ 6. start-node : start the validator node ║ +║ (use --url for automatic key generation) ║ +║ 2. prepare-configuration : set up the partner chain configuration ║ +║ 3. setup-main-chain-state : configure the main chain parameters ║ +║ 4. create-chain-spec : create the chain specification file ║ +║ 5. start-node : start the validator node ║ ╟────────────────────────────────────────────────────────────────────────────────╢ ║ Registered Validator: ║ ║ 1. generate-keys : generate validator keys ║ -║ 3. register1 : initiate the registration process ║ -║ 4. register2 : complete registration with cold keys ║ -║ 5. register3 : finalize registration ║ -║ 6. start-node : start the validator node ║ -║ 7. deregister : cancel registration ║ +║ (use --url for automatic key generation) ║ +║ 2. register1 : initiate the registration process ║ +║ 3. register2 : complete registration with cold keys ║ +║ 4. register3 : finalize registration ║ +║ 5. start-node : start the validator node ║ +║ 6. deregister : cancel registration ║ ║ ║ ║ Note: This sequence assumes that the chain-spec.json and ║ ║ pc-chain-config.json files have been obtained from ║ @@ -130,7 +132,8 @@ const HELP_EXAMPLES: &str = r#" ╟────────────────────────────────────────────────────────────────────────────────╢ ║ Permissioned Validator: ║ ║ 1. generate-keys : generate validator keys ║ -║ 3. start-node : start the validator node ║ +║ (use --url for automatic key generation) ║ +║ 2. start-node : start the validator node ║ ║ ║ ║ Note: After executing key generation, the generated keys must be shared ║ ║ with the Governance Authority. The 'start-node' command can only be ║