diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ad023fc2..b5771f0f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -66,7 +66,6 @@ jobs: run: RUST_BACKTRACE=1 cargo test --workspace --exclude ark-secp256k1 e2e-tests: - needs: [ clippy ] strategy: fail-fast: false matrix: diff --git a/ark-client/src/batch.rs b/ark-client/src/batch.rs index fccae943..ffef38f4 100644 --- a/ark-client/src/batch.rs +++ b/ark-client/src/batch.rs @@ -17,6 +17,7 @@ use ark_core::proof_of_funds; use ark_core::server::BatchTreeEventType; use ark_core::server::StreamEvent; use ark_core::ArkAddress; +use ark_core::ArkNote; use ark_core::TxGraph; use backon::ExponentialBuilder; use backon::Retryable; @@ -76,6 +77,7 @@ where &mut rng.clone(), boarding_inputs.clone(), vtxo_inputs.clone(), + vec![], BatchOutputType::Board { to_address, to_amount: total_amount, @@ -136,6 +138,7 @@ where &mut rng.clone(), boarding_inputs.clone(), vtxo_inputs.clone(), + vec![], BatchOutputType::OffBoard { to_address: to_address.clone(), to_amount, @@ -248,17 +251,18 @@ where Ok((boarding_inputs, vtxo_inputs, total_amount)) } - async fn join_next_batch( + pub(crate) async fn join_next_batch( &self, rng: &mut R, onchain_inputs: Vec, vtxo_inputs: Vec, + arknotes: Vec, output_type: BatchOutputType, ) -> Result where R: Rng + CryptoRng, { - if onchain_inputs.is_empty() && vtxo_inputs.is_empty() { + if onchain_inputs.is_empty() && vtxo_inputs.is_empty() && arknotes.is_empty() { return Err(Error::ad_hoc("cannot join batch without inputs")); } @@ -304,7 +308,14 @@ where ) }); - boarding_inputs.chain(vtxo_inputs).collect::>() + let arknotes = arknotes + .iter() + .map(|n| n.into()) + .collect::>(); + boarding_inputs + .chain(vtxo_inputs) + .chain(arknotes) + .collect::>() }; let mut outputs = vec![]; @@ -689,9 +700,13 @@ where Some(commitment_psbt) }; - network_client - .submit_signed_forfeit_txs(signed_forfeit_psbts, commitment_psbt) - .await?; + // Only submit forfeit transactions if we have actual inputs that require + // them ArkNotes don't require forfeit transactions + if !signed_forfeit_psbts.is_empty() || commitment_psbt.is_some() { + network_client + .submit_signed_forfeit_txs(signed_forfeit_psbts, commitment_psbt) + .await?; + } step = step.next(); } @@ -753,7 +768,7 @@ where } } -enum BatchOutputType { +pub(crate) enum BatchOutputType { Board { to_address: ArkAddress, to_amount: Amount, diff --git a/ark-client/src/lib.rs b/ark-client/src/lib.rs index 91b5792b..56a17f93 100644 --- a/ark-client/src/lib.rs +++ b/ark-client/src/lib.rs @@ -13,6 +13,7 @@ use ark_core::server::GetVtxosRequest; use ark_core::server::SubscriptionResponse; use ark_core::server::VirtualTxOutPoint; use ark_core::ArkAddress; +use ark_core::ArkNote; use ark_core::UtxoCoinSelection; use ark_core::Vtxo; use ark_grpc::VtxoChainResponse; @@ -27,6 +28,8 @@ use bitcoin::Txid; use futures::Future; use futures::Stream; use jiff::Timestamp; +use rand::CryptoRng; +use rand::Rng; use std::sync::Arc; use std::time::Duration; @@ -39,6 +42,7 @@ mod send_vtxo; mod unilateral_exit; mod utils; +use batch::BatchOutputType; pub use error::Error; /// A client to interact with Ark Server @@ -527,6 +531,83 @@ where Ok(sum) } + /// Redeem multiple ArkNotes by settling them into new VTXOs + /// + /// This method takes ArkNote objects and creates a batch transaction to convert + /// them into spendable VTXOs at the user's offchain address. + pub async fn redeem_notes( + &self, + rng: &mut R, + arknotes: Vec, + ) -> Result, Error> + where + R: CryptoRng + Rng + Clone, + { + if arknotes.is_empty() { + return Ok(None); + } + + // Calculate total amount from all notes + let total_amount = arknotes + .iter() + .fold(Amount::ZERO, |acc, note| acc + note.value()); + + // Get user's offchain address to send the redeemed funds + let (to_address, _) = self.get_offchain_address()?; + + tracing::info!( + note_count = arknotes.len(), + total_amount = %total_amount, + to_address = %to_address.encode(), + "Redeeming ArkNotes" + ); + + // Join the next batch with the ArkNotes as inputs + self.join_next_batch( + rng, + vec![], + vec![], + arknotes, + BatchOutputType::Board { + to_address, + to_amount: total_amount, + }, + ) + .await + .map(Some) + } + + /// Redeem a single ArkNote + pub async fn redeem_note(&self, rng: &mut R, arknote: ArkNote) -> Result, Error> + where + R: CryptoRng + Rng + Clone, + { + self.redeem_notes(rng, vec![arknote]).await + } + + pub async fn create_arknote(&self, amount: Amount) -> Result { + let notes = self + .inner + .network_client + .clone() + .create_arknote(amount.to_sat() as u32, 1) + .await + .map_err(Error::ad_hoc)?; + + if notes.is_empty() { + return Err(Error::ad_hoc("No notes returned from server")); + } + + let note_str = notes + .into_iter() + .next() + .ok_or_else(|| Error::ad_hoc("No note in response"))?; + + tracing::info!(note = %note_str, "Created ArkNote"); + let note = ArkNote::from_string(¬e_str).map_err(Error::ad_hoc)?; + Ok(note) + } + pub async fn transaction_history(&self) -> Result, Error> { let mut boarding_transactions = Vec::new(); let mut boarding_commitment_transactions = Vec::new(); diff --git a/ark-core/Cargo.toml b/ark-core/Cargo.toml index 93cf3d7b..6dc5ba52 100644 --- a/ark-core/Cargo.toml +++ b/ark-core/Cargo.toml @@ -8,6 +8,8 @@ description = "Core types and utilities for Ark" [dependencies] bech32 = "0.11" bitcoin = { version = "0.32.4", features = ["base64", "rand", "serde"] } +bs58 = "0.5" +hex = "0.4" musig = { package = "ark-secp256k1", path = "../ark-rust-secp256k1", features = ["serde", "rand"] } rand = "0.8" serde = { version = "1.0", features = ["derive"] } diff --git a/ark-core/src/arknote.rs b/ark-core/src/arknote.rs new file mode 100644 index 00000000..030ce8ee --- /dev/null +++ b/ark-core/src/arknote.rs @@ -0,0 +1,916 @@ +use crate::Error; +use crate::VirtualUtxoScript; +use bitcoin::hashes::sha256; +use bitcoin::hashes::Hash; +use bitcoin::key::Secp256k1; +use bitcoin::Amount; +use bitcoin::OutPoint; +use bitcoin::ScriptBuf; +use bitcoin::TxOut; +use bitcoin::Txid; +use serde::Deserialize; +use serde::Serialize; +use std::fmt; + +/// Default human-readable prefix for ArkNote string encoding +pub const DEFAULT_HRP: &str = "arknote"; + +/// Length of the preimage in bytes +pub const PREIMAGE_LENGTH: usize = 32; + +/// Length of the value field in bytes +pub const VALUE_LENGTH: usize = 4; + +/// Total length of an encoded ArkNote +pub const ARKNOTE_LENGTH: usize = PREIMAGE_LENGTH + VALUE_LENGTH; + +/// Fake outpoint index used for ArkNotes +pub const FAKE_OUTPOINT_INDEX: u32 = 0; + +/// Status of a coin/VTXO +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct Status { + pub confirmed: bool, +} + +impl fmt::Display for ArkNote { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let encoded = self.encode(); + let value = format!("{}{}", self.hrp, bs58::encode(encoded).into_string()); + write!(f, "{value}") + } +} + +/// ArkNote is a fake VTXO coin that can be spent by revealing the preimage +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ArkNote { + preimage: [u8; PREIMAGE_LENGTH], + value: Amount, + hrp: String, + // Computed fields + txid: Txid, + vtxo_script: VirtualUtxoScript, + tap_tree_bytes: Vec, // Cache for tap_tree() method + status: Status, + // FIXME: this is necessary? + extra_witness: Vec>, + + note_script: ScriptBuf, +} + +impl ArkNote { + /// Create a note tapscript that checks the preimage hash + fn note_tapscript(preimage_hash: &sha256::Hash) -> ScriptBuf { + ScriptBuf::builder() + .push_opcode(bitcoin::opcodes::all::OP_SHA256) + .push_slice(preimage_hash.as_byte_array()) + .push_opcode(bitcoin::opcodes::all::OP_EQUAL) + .into_script() + } + + /// Create a new ArkNote with the given preimage and value + pub fn new(preimage: [u8; PREIMAGE_LENGTH], value: Amount) -> Self { + Self::new_with_hrp(preimage, value, DEFAULT_HRP.to_string()) + } + + /// Create a new ArkNote with a custom HRP + pub fn new_with_hrp(preimage: [u8; PREIMAGE_LENGTH], value: Amount, hrp: String) -> Self { + let preimage_hash = sha256::Hash::hash(&preimage); + + let note_script = Self::note_tapscript(&preimage_hash); + + // Create the VTXO script structure using VirtualUtxoScript + let secp = Secp256k1::new(); + let vtxo_script = VirtualUtxoScript::new(&secp, vec![note_script.clone()]) + .expect("failed to create VirtualUtxoScript"); + + let txid = Txid::from_slice(preimage_hash.as_byte_array()).expect("valid txid"); + + // Convert the encoded hex strings to bytes for tap_tree_bytes + let encoded_scripts = vtxo_script.encode(); + ArkNote { + preimage, + value, + hrp, + txid, + vtxo_script, + tap_tree_bytes: encoded_scripts, + status: Status { confirmed: true }, + extra_witness: vec![preimage.to_vec()], + note_script, + } + } + + /// Get the note value + pub fn value(&self) -> Amount { + self.value + } + + /// Get the preimage + pub fn preimage(&self) -> &[u8; PREIMAGE_LENGTH] { + &self.preimage + } + + /// Get the HRP + pub fn hrp(&self) -> &str { + &self.hrp + } + + /// Get the txid + pub fn txid(&self) -> Txid { + self.txid + } + + /// Get the vout (always returns FAKE_OUTPOINT_INDEX) + pub fn vout(&self) -> u32 { + FAKE_OUTPOINT_INDEX + } + + /// Get the note script + pub fn note_script(&self) -> &ScriptBuf { + &self.note_script + } + + /// Get the status + pub fn status(&self) -> &Status { + &self.status + } + + /// Get the extra witness + pub fn extra_witness(&self) -> Option<&[Vec]> { + Some(&self.extra_witness) + } + + /// Get the tap tree + pub fn tap_tree(&self) -> Vec { + self.tap_tree_bytes.clone() + } + + /// Get the forfeit tap leaf script + pub fn forfeit_tap_leaf_script(&self) -> &ScriptBuf { + // The note script is the first (and only) script in our VirtualUtxoScript + &self.vtxo_script.scripts()[0] + } + + /// Get the intent tap leaf script + pub fn intent_tap_leaf_script(&self) -> &ScriptBuf { + // For ArkNote, forfeit and intent scripts are the same + &self.vtxo_script.scripts()[0] + } + + /// Get the underlying VirtualUtxoScript + pub fn vtxo_script(&self) -> &VirtualUtxoScript { + &self.vtxo_script + } + + /// Encode the ArkNote to bytes + pub fn encode(&self) -> Vec { + let mut result = Vec::with_capacity(ARKNOTE_LENGTH); + result.extend_from_slice(&self.preimage); + // Use big-endian to match TypeScript's writeUInt32BE + result.extend_from_slice(&(self.value.to_sat() as u32).to_be_bytes()); + result + } + + pub fn to_encoded_string(&self) -> String { + self.to_string() + } + + /// Decode bytes into an ArkNote + pub fn decode(data: &[u8]) -> Result { + Self::decode_with_hrp(data, DEFAULT_HRP) + } + + /// Decode bytes into an ArkNote with custom HRP + pub fn decode_with_hrp(data: &[u8], hrp: &str) -> Result { + if data.len() != ARKNOTE_LENGTH { + return Err(Error::ad_hoc(format!( + "invalid data length: expected {} bytes, got {}", + ARKNOTE_LENGTH, + data.len() + ))); + } + + let mut preimage = [0u8; PREIMAGE_LENGTH]; + preimage.copy_from_slice(&data[..PREIMAGE_LENGTH]); + + let value_bytes = &data[PREIMAGE_LENGTH..]; + let value = u32::from_be_bytes([ + value_bytes[0], + value_bytes[1], + value_bytes[2], + value_bytes[3], + ]); + + Ok(Self::new_with_hrp( + preimage, + Amount::from_sat(value as u64), + hrp.to_string(), + )) + } + + /// Parse an ArkNote from a string + pub fn from_string(note_str: &str) -> Result { + Self::from_string_with_hrp(note_str, DEFAULT_HRP) + } + + /// Parse an ArkNote from a string with custom HRP + pub fn from_string_with_hrp(note_str: &str, hrp: &str) -> Result { + let note_str = note_str.trim(); + if !note_str.starts_with(hrp) { + return Err(Error::ad_hoc(format!( + "invalid human-readable part: expected {hrp} prefix (note '{note_str}')" + ))); + } + + let encoded = ¬e_str[hrp.len()..]; + let decoded = bs58::decode(encoded) + .into_vec() + .map_err(|e| Error::ad_hoc(format!("failed to decode base58: {e}")))?; + + if decoded.is_empty() { + return Err(Error::ad_hoc("failed to decode base58 string".to_string())); + } + + Self::decode_with_hrp(&decoded, hrp) + } + + /// Get the outpoint for this ArkNote + pub fn outpoint(&self) -> OutPoint { + OutPoint::new(self.txid, FAKE_OUTPOINT_INDEX) + } + + /// Convert to a TxOut + pub fn to_tx_out(&self) -> TxOut { + let script_pubkey = self.vtxo_script.script_pubkey(); + TxOut { + value: self.value, + script_pubkey, + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use serde::Deserialize; + use serde::Serialize; + + #[derive(Debug, Serialize, Deserialize)] + struct TestVectors { + address: AddressTestVectors, + note: NoteTestVectors, + } + + #[derive(Debug, Serialize, Deserialize)] + struct AddressTestVectors { + valid: Vec, + invalid: Vec, + } + + #[derive(Debug, Serialize, Deserialize)] + struct AddressValidTest { + addr: String, + #[serde(rename = "expectedVersion")] + expected_version: u8, + #[serde(rename = "expectedPrefix")] + expected_prefix: String, + #[serde(rename = "expectedUserKey")] + expected_user_key: String, + #[serde(rename = "expectedServerKey")] + expected_server_key: String, + } + + #[derive(Debug, Serialize, Deserialize)] + struct AddressInvalidTest { + addr: String, + } + + #[derive(Debug, Serialize, Deserialize)] + struct NoteTestVectors { + valid: Vec, + invalid: Vec, + } + + #[derive(Debug, Serialize, Deserialize)] + struct NoteValidTest { + hrp: String, + str: String, + #[serde(rename = "expectedPreimage")] + expected_preimage: String, + #[serde(rename = "expectedValue")] + expected_value: u64, + } + + #[derive(Debug, Serialize, Deserialize)] + struct NoteInvalidTest { + str: String, + } + + // Helper function for converting hex to bytes + fn hex_to_bytes(hex: &str) -> Vec { + (0..hex.len()) + .step_by(2) + .map(|i| u8::from_str_radix(&hex[i..i + 2], 16).unwrap()) + .collect() + } + + // Helper function for converting hex to 32-byte array + fn hex_to_array32(hex: &str) -> [u8; 32] { + let bytes = hex_to_bytes(hex); + let mut array = [0u8; 32]; + array.copy_from_slice(&bytes); + array + } + + #[test] + fn test_arknote_test_vectors() { + // First test with hardcoded test vectors for reliable testing + let test_cases = vec![ + // Test case 1: Default HRP + ( + "arknote", + "arknote8rFzGqZsG9RCLripA6ez8d2hQEzFKsqCeiSnXhQj56Ysw7ZQT", + "11d2a03264d0efd311d2a03264d0efd311d2a03264d0efd311d2a03264d0efd3", + 900000_u64, + ), + // Test case 2: Default HRP with different values + ( + "arknote", + "arknoteSkB92YpWm4Q2ijQHH34cqbKkCZWszsiQgHVjtNeFF2Cwp59D", + "0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20", + 1828932_u64, + ), + // Test case 3: Custom HRP + ( + "noteark", + "noteark8rFzGqZsG9RCLripA6ez8d2hQEzFKsqCeiSnXhQj56Ysw7ZQT", + "11d2a03264d0efd311d2a03264d0efd311d2a03264d0efd311d2a03264d0efd3", + 900000_u64, + ), + // Test case 4: Custom HRP with different values + ( + "noteark", + "notearkSkB92YpWm4Q2ijQHH34cqbKkCZWszsiQgHVjtNeFF2Cwp59D", + "0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20", + 1828932_u64, + ), + ]; + + for (i, (hrp, note_str, expected_preimage_hex, expected_value)) in + test_cases.iter().enumerate() + { + // Parse the note from string + let parsed_note = ArkNote::from_string_with_hrp(note_str, hrp).unwrap(); + + // Convert expected preimage from hex + let expected_preimage = hex_to_array32(expected_preimage_hex); + + // Validate preimage + assert_eq!( + parsed_note.preimage(), + &expected_preimage, + "Preimage mismatch for test case {}", + i + 1 + ); + + // Validate value + let expected_value = Amount::from_sat(*expected_value); + assert_eq!( + parsed_note.value(), + expected_value, + "Value mismatch for test case {}", + i + 1 + ); + + // Validate HRP + assert_eq!( + parsed_note.hrp(), + *hrp, + "HRP mismatch for test case {}", + i + 1 + ); + + // Test round-trip: create note from expected values and verify string matches + let reconstructed_note = + ArkNote::new_with_hrp(expected_preimage, expected_value, hrp.to_string()); + let reconstructed_string = reconstructed_note.to_string(); + assert_eq!( + reconstructed_string, + *note_str, + "Round-trip string mismatch for test case {}", + i + 1 + ); + } + } + + #[test] + fn test_arknote_boundary_cases() { + // Test zero-value note + let zero_preimage = [0u8; PREIMAGE_LENGTH]; + let zero_value = Amount::from_sat(0); + let zero_note = ArkNote::new(zero_preimage, zero_value); + + assert_eq!( + zero_note.value(), + zero_value, + "Zero value should be preserved" + ); + assert_eq!( + zero_note.preimage(), + &zero_preimage, + "Zero preimage should be preserved" + ); + + // Encode and decode zero-value note + let encoded = zero_note.encode(); + let decoded = ArkNote::decode(&encoded).expect("Should decode zero-value note"); + assert_eq!( + decoded.value(), + zero_value, + "Zero value should survive round-trip" + ); + assert_eq!( + decoded.preimage(), + &zero_preimage, + "Zero preimage should survive round-trip" + ); + + // Test maximum supported value (u32::MAX as satoshis) + let max_preimage = [0xFFu8; PREIMAGE_LENGTH]; + let max_value = Amount::from_sat(u32::MAX as u64); + let max_note = ArkNote::new(max_preimage, max_value); + + assert_eq!(max_note.value(), max_value, "Max value should be preserved"); + assert_eq!( + max_note.preimage(), + &max_preimage, + "Max preimage should be preserved" + ); + + // Encode and decode max-value note + let encoded = max_note.encode(); + let decoded = ArkNote::decode(&encoded).expect("Should decode max-value note"); + assert_eq!( + decoded.value(), + max_value, + "Max value should survive round-trip" + ); + assert_eq!( + decoded.preimage(), + &max_preimage, + "Max preimage should survive round-trip" + ); + + // Test mixed-case HRP (should be case-sensitive) + let mixed_hrp = "ArkNote"; + let preimage = [0x42u8; PREIMAGE_LENGTH]; + let value = Amount::from_sat(1000); + let mixed_note = ArkNote::new_with_hrp(preimage, value, mixed_hrp.to_string()); + + assert_eq!( + mixed_note.hrp(), + mixed_hrp, + "Mixed-case HRP should be preserved" + ); + + // Test that mixed-case HRP is handled correctly in string format + let note_string = mixed_note.to_string(); + assert!( + note_string.starts_with(mixed_hrp), + "String should start with mixed-case HRP" + ); + + // Parse with correct case should work + let parsed = ArkNote::from_string_with_hrp(¬e_string, mixed_hrp) + .expect("Should parse with matching HRP"); + assert_eq!(parsed.hrp(), mixed_hrp, "HRP should match after parsing"); + + // Parse with wrong case should fail + let wrong_case_result = ArkNote::from_string_with_hrp(¬e_string, "arknote"); + assert!( + wrong_case_result.is_err(), + "Should fail with mismatched HRP case" + ); + } + + #[test] + fn test_arknote_round_trip_equality() { + // Test multiple round-trips with different values + let test_cases = vec![ + ([0x01u8; PREIMAGE_LENGTH], 1), + ([0x42u8; PREIMAGE_LENGTH], 42), + ([0xAAu8; PREIMAGE_LENGTH], 1000), + ([0xFFu8; PREIMAGE_LENGTH], u32::MAX as u64), + ([0x00u8; PREIMAGE_LENGTH], 0), + ]; + + for (preimage, sats) in test_cases { + let value = Amount::from_sat(sats); + + // Create original note + let original = ArkNote::new(preimage, value); + + // First round-trip: encode -> decode + let encoded1 = original.encode(); + let decoded1 = ArkNote::decode(&encoded1).expect("First decode should succeed"); + + // Second round-trip: encode -> decode again + let encoded2 = decoded1.encode(); + let decoded2 = ArkNote::decode(&encoded2).expect("Second decode should succeed"); + + // Verify all encodings are identical + assert_eq!(encoded1, encoded2, "Encodings should be identical"); + + // Verify all properties are preserved + assert_eq!( + decoded1.preimage(), + original.preimage(), + "Preimage should be preserved after first round-trip" + ); + assert_eq!( + decoded2.preimage(), + original.preimage(), + "Preimage should be preserved after second round-trip" + ); + assert_eq!( + decoded1.value(), + original.value(), + "Value should be preserved after first round-trip" + ); + assert_eq!( + decoded2.value(), + original.value(), + "Value should be preserved after second round-trip" + ); + + // String round-trip: to_string -> from_string + let string1 = original.to_string(); + let parsed1 = ArkNote::from_string(&string1).expect("First parse should succeed"); + let string2 = parsed1.to_string(); + + // Verify string representations are identical + assert_eq!( + string1, string2, + "String representations should be identical" + ); + + // Verify parsed note matches original + assert_eq!( + parsed1.preimage(), + original.preimage(), + "Preimage should be preserved in string round-trip" + ); + assert_eq!( + parsed1.value(), + original.value(), + "Value should be preserved in string round-trip" + ); + } + } + + #[test] + fn test_arknote_invalid_formats() { + // Test invalid data length (too short) + let short_data = vec![0u8; ARKNOTE_LENGTH - 1]; + let result = ArkNote::decode(&short_data); + assert!(result.is_err(), "Should fail with short data"); + assert!( + result + .unwrap_err() + .to_string() + .contains("invalid data length"), + "Should report invalid data length for short data" + ); + + // Test invalid data length (too long) + let long_data = vec![0u8; ARKNOTE_LENGTH + 1]; + let result = ArkNote::decode(&long_data); + assert!(result.is_err(), "Should fail with long data"); + assert!( + result + .unwrap_err() + .to_string() + .contains("invalid data length"), + "Should report invalid data length for long data" + ); + + // Test empty data + let empty_data = vec![]; + let result = ArkNote::decode(&empty_data); + assert!(result.is_err(), "Should fail with empty data"); + + // Test invalid base58 string + let invalid_base58 = "arknote!!!INVALID!!!"; + let result = ArkNote::from_string(invalid_base58); + assert!(result.is_err(), "Should fail with invalid base58"); + assert!( + result + .unwrap_err() + .to_string() + .contains("failed to decode base58"), + "Should report base58 decode error" + ); + + // Test string with wrong HRP + let wrong_hrp_string = "wrongprefixABCDEF123456"; + let result = ArkNote::from_string(wrong_hrp_string); + assert!(result.is_err(), "Should fail with wrong HRP"); + assert!( + result + .unwrap_err() + .to_string() + .contains("invalid human-readable part"), + "Should report invalid HRP" + ); + + // Test valid HRP with wrong length payload + // Create a valid note first to get proper base58 + let valid_note = ArkNote::new([0x42u8; PREIMAGE_LENGTH], Amount::from_sat(1000)); + let valid_string = valid_note.to_string(); + + // Manipulate the string to have wrong length + let truncated_string = &valid_string[..valid_string.len() - 5]; + let result = ArkNote::from_string(truncated_string); + assert!(result.is_err(), "Should fail with truncated string"); + + // Test string with extra characters + let extra_chars_string = format!("{}EXTRA", valid_string); + let result = ArkNote::from_string(&extra_chars_string); + assert!(result.is_err(), "Should fail with extra characters"); + + // Test empty string after HRP + let empty_after_hrp = "arknote"; + let result = ArkNote::from_string(empty_after_hrp); + assert!(result.is_err(), "Should fail with empty payload after HRP"); + + // Test whitespace handling + let whitespace_string = format!(" {} ", valid_string); + let result = ArkNote::from_string(&whitespace_string); + assert!(result.is_ok(), "Should handle leading/trailing whitespace"); + assert_eq!( + result.unwrap().value(), + valid_note.value(), + "Should correctly parse after trimming whitespace" + ); + } + + #[test] + fn test_arknote_test_vectors_from_json() { + // Try to load test vectors from JSON file, skip test if file not found + let test_vectors_result = std::fs::read_to_string("test_vectors.json"); + + if test_vectors_result.is_err() { + // Skip test if JSON file not found + return; + } + + let test_vectors_json = test_vectors_result.unwrap(); + let test_vectors: TestVectors = + serde_json::from_str(&test_vectors_json).expect("Failed to parse test_vectors.json"); + + // Verify we have the expected number of test cases + assert!( + !test_vectors.note.valid.is_empty(), + "Should have valid test cases" + ); + assert!( + !test_vectors.note.invalid.is_empty(), + "Should have invalid test cases" + ); + + // Test valid notes + for (i, test_case) in test_vectors.note.valid.iter().enumerate() { + // Parse the note from string + let parsed_note = ArkNote::from_string_with_hrp(&test_case.str, &test_case.hrp) + .unwrap_or_else(|e| panic!("Failed to parse note for test case {}: {}", i + 1, e)); + + // Validate preimage + let expected_preimage = hex_to_array32(&test_case.expected_preimage); + assert_eq!( + parsed_note.preimage(), + &expected_preimage, + "Preimage mismatch for test case {}", + i + 1 + ); + + // Validate value + let expected_value = Amount::from_sat(test_case.expected_value); + assert_eq!( + parsed_note.value(), + expected_value, + "Value mismatch for test case {}", + i + 1 + ); + + // Validate HRP + assert_eq!( + parsed_note.hrp(), + test_case.hrp, + "HRP mismatch for test case {}", + i + 1 + ); + + // Validate that the string starts with the HRP (like TypeScript test) + assert!( + test_case.str.starts_with(&test_case.hrp), + "String should start with HRP '{}' for test case {}", + test_case.hrp, + i + 1 + ); + + // Validate that the HRP length matches the prefix length + let hrp_len = test_case.hrp.len(); + assert_eq!( + &test_case.str[..hrp_len], + test_case.hrp, + "String prefix should match HRP for test case {}", + i + 1 + ); + + // Test encoding: create note from expected values and verify string matches (TypeScript + // pattern) + let new_note = + ArkNote::new_with_hrp(expected_preimage, expected_value, test_case.hrp.clone()); + let encoded_string = new_note.to_string(); + assert_eq!( + encoded_string, + test_case.str, + "Encoded string mismatch for test case {}", + i + 1 + ); + + // Test decode-then-encode pattern (matching TypeScript test exactly) + let decoded_note = ArkNote::from_string_with_hrp(&test_case.str, &test_case.hrp) + .unwrap_or_else(|e| panic!("Failed to decode note for test case {}: {}", i + 1, e)); + + let new_note_from_decoded = ArkNote::new_with_hrp( + *decoded_note.preimage(), + decoded_note.value(), + decoded_note.hrp().to_string(), + ); + + let encoded_back = new_note_from_decoded.to_string(); + assert_eq!( + encoded_back, + test_case.str, + "Decode-then-encode pattern failed for test case {}", + i + 1 + ); + + // Test round-trip: create note from expected values and verify string matches + let reconstructed_note = + ArkNote::new_with_hrp(expected_preimage, expected_value, test_case.hrp.clone()); + let reconstructed_string = reconstructed_note.to_string(); + assert_eq!( + reconstructed_string, + test_case.str, + "Round-trip string mismatch for test case {}", + i + 1 + ); + + // Additional comprehensive assertions + assert!( + parsed_note.status().confirmed, + "Status should be confirmed for test case {}", + i + 1 + ); + assert_eq!( + parsed_note.vout(), + 0, + "Vout should be 0 for test case {}", + i + 1 + ); + assert!( + parsed_note.extra_witness().is_some(), + "Extra witness should exist for test case {}", + i + 1 + ); + assert_eq!( + parsed_note.extra_witness().unwrap().len(), + 1, + "Should have exactly one witness for test case {}", + i + 1 + ); + assert_eq!( + parsed_note.extra_witness().unwrap()[0], + expected_preimage.to_vec(), + "Witness should match preimage for test case {}", + i + 1 + ); + + // Verify VirtualUtxoScript properties + let vtxo_script = parsed_note.vtxo_script(); + assert_eq!( + vtxo_script.scripts().len(), + 1, + "Should have exactly one script for test case {}", + i + 1 + ); + assert_eq!( + parsed_note.forfeit_tap_leaf_script(), + parsed_note.intent_tap_leaf_script(), + "Forfeit and intent scripts should be the same for test case {}", + i + 1 + ); + + // Verify tap tree is not empty + let tap_tree = parsed_note.tap_tree(); + assert!( + !tap_tree.is_empty(), + "Tap tree should not be empty for test case {}", + i + 1 + ); + + // Verify txid format (should be valid hex) + let txid = parsed_note.txid().to_string(); + assert_eq!( + txid.len(), + 64, + "TXID should be 64 characters for test case {}", + i + 1 + ); + assert!( + txid.chars().all(|c| c.is_ascii_hexdigit()), + "TXID should be valid hex for test case {}", + i + 1 + ); + + // Verify outpoint creation + let outpoint = parsed_note.outpoint(); + assert_eq!( + outpoint.vout, + 0, + "Outpoint vout should be 0 for test case {}", + i + 1 + ); + + // Verify TxOut creation + let tx_out = parsed_note.to_tx_out(); + assert_eq!( + tx_out.value, + expected_value, + "TxOut value should match for test case {}", + i + 1 + ); + assert_eq!( + tx_out.script_pubkey, + vtxo_script.script_pubkey(), + "TxOut script should match VirtualUtxoScript for test case {}", + i + 1 + ); + + // Verify encoding/decoding consistency + let encoded = parsed_note.encode(); + assert_eq!( + encoded.len(), + ARKNOTE_LENGTH, + "Encoded length should be correct for test case {}", + i + 1 + ); + let decoded = ArkNote::decode(&encoded).unwrap(); + assert_eq!( + decoded.preimage(), + &expected_preimage, + "Decode should preserve preimage for test case {}", + i + 1 + ); + assert_eq!( + decoded.value(), + expected_value, + "Decode should preserve value for test case {}", + i + 1 + ); + } + + // Test invalid notes + for (i, test_case) in test_vectors.note.invalid.iter().enumerate() { + // Try to parse with default HRP - should fail + let result = ArkNote::from_string(&test_case.str); + assert!( + result.is_err(), + "Expected parsing to fail for invalid test case {}: {}", + i + 1, + test_case.str + ); + + // Ensure specific error types for known cases + let error_msg = result.unwrap_err().to_string(); + if test_case.str == "arknoteshort" { + assert!( + error_msg.contains("invalid data length"), + "Short note should fail with data length error for test case {}", + i + 1 + ); + } + + if test_case.str.starts_with("wrongprefix") { + assert!( + error_msg.contains("invalid human-readable part"), + "Wrong prefix should fail with HRP error for test case {}", + i + 1 + ); + } + } + } +} diff --git a/ark-core/src/lib.rs b/ark-core/src/lib.rs index 62a8bcb8..d6860c18 100644 --- a/ark-core/src/lib.rs +++ b/ark-core/src/lib.rs @@ -3,6 +3,7 @@ use bitcoin::OutPoint; use bitcoin::ScriptBuf; use bitcoin::TxOut; +pub mod arknote; pub mod batch; pub mod boarding_output; pub mod coin_select; @@ -21,6 +22,8 @@ mod tree_tx_output_script; mod tx_graph; pub use ark_address::ArkAddress; +pub use arknote::ArkNote; +pub use arknote::Status; pub use boarding_output::BoardingOutput; pub use error::Error; pub use error::ErrorContext; @@ -31,6 +34,7 @@ pub use unilateral_exit::build_anchor_tx; pub use unilateral_exit::build_unilateral_exit_tree_txids; pub use unilateral_exit::SelectedUtxo; pub use unilateral_exit::UtxoCoinSelection; +pub use vtxo::VirtualUtxoScript; pub use vtxo::Vtxo; pub const UNSPENDABLE_KEY: &str = diff --git a/ark-core/src/proof_of_funds.rs b/ark-core/src/proof_of_funds.rs index 99b5e43e..2d9dddf4 100644 --- a/ark-core/src/proof_of_funds.rs +++ b/ark-core/src/proof_of_funds.rs @@ -1,3 +1,6 @@ +use crate::arknote::PREIMAGE_LENGTH; +use crate::proof_of_funds::taproot::LeafVersion; +use crate::ArkNote; use crate::Error; use crate::ErrorContext; use bitcoin::absolute::LockTime; @@ -46,6 +49,8 @@ pub struct Input { pk: XOnlyPublicKey, spend_info: (ScriptBuf, taproot::ControlBlock), is_onchain: bool, + // FIXME: make the input type an enum based on the input type. + preimage: Option<[u8; PREIMAGE_LENGTH]>, } impl Input { @@ -66,6 +71,39 @@ impl Input { pk, spend_info, is_onchain, + preimage: None, + } + } +} + +impl From<&ArkNote> for Input { + fn from(value: &ArkNote) -> Self { + let spending_info = value.vtxo_script().spend_info(); + + // this is inside the taproot script path + let node_script = value.note_script().clone(); + let Some(control_block) = + spending_info.control_block(&(node_script.clone(), LeafVersion::TapScript)) + else { + // FIXME: probably we need a tryfrom? + panic!("no control block found"); + }; + + Self { + outpoint: value.outpoint(), + sequence: Sequence::MAX, + witness_utxo: TxOut { + value: value.value(), + // This should be unspendable script? + script_pubkey: value.vtxo_script().script_pubkey(), + }, + // This should be empty? + tapscripts: vec![], + pk: value.vtxo_script().x_only_public_key(), + // This contains the extra info to spend the note, right? + spend_info: (node_script, control_block), + is_onchain: false, + preimage: Some(*value.preimage()), } } } @@ -93,8 +131,8 @@ impl Bip322Proof { } pub fn make_bip322_signature( - signing_kps: &[Keypair], - sign_for_onchain_pk_fn: F, + signing_kps: &[Keypair], // This is to sign VTXOs in the `inputs` argument. + sign_for_onchain_pk_fn: F, // This is to sign boarding outputs in the `inputs` argument. inputs: Vec, outputs: Vec, own_cosigner_pks: Vec, @@ -153,7 +191,7 @@ where proof_input.tap_scripts = BTreeMap::from_iter([( leaf_proof.1.clone(), - (leaf_proof.0.clone(), taproot::LeafVersion::TapScript), + (leaf_proof.0.clone(), LeafVersion::TapScript), )]); } @@ -189,7 +227,7 @@ where let pk = input.pk; - let sig = match input.is_onchain { + let sig: Vec = match input.is_onchain { true => { let sig = sign_for_onchain_pk_fn(&pk, &msg)?; @@ -204,40 +242,52 @@ where proof_input.tap_script_sigs = BTreeMap::from_iter([((pk, leaf_hash), sig)]); - sig + sig.signature.serialize().to_vec() } false => { - let signing_kp = signing_kps - .iter() - .find(|kp| { - let (xonly_ok, _) = kp.x_only_public_key(); - xonly_ok == pk - }) - .ok_or_else(|| Error::ad_hoc("Could not find suitable kp for pk"))?; - - let sig = secp.sign_schnorr_no_aux_rand(&msg, signing_kp); - - secp.verify_schnorr(&sig, &msg, &pk) - .map_err(Error::crypto) - .context("failed to verify own proof of funds vtxo signature")?; - - let sig = taproot::Signature { - signature: sig, - sighash_type: TapSighashType::Default, - }; - - proof_input.tap_script_sigs = BTreeMap::from_iter([((pk, leaf_hash), sig)]); - - sig - } + // FIXME: this is an horrible hack to handle arknotes. + if let Some(preimage) = input.preimage { + preimage.to_vec() + } else { + let signing_kp = signing_kps + .iter() + .find(|kp| { + let (xonly_ok, _) = kp.x_only_public_key(); + xonly_ok == pk + }) + .ok_or_else(|| Error::ad_hoc("Could not find suitable kp for pk"))?; + + let sig = secp.sign_schnorr_no_aux_rand(&msg, signing_kp); + + secp.verify_schnorr(&sig, &msg, &pk) + .map_err(Error::crypto) + .context("failed to verify own proof of funds vtxo signature")?; + + let sig = taproot::Signature { + signature: sig, + sighash_type: TapSighashType::Default, + }; + + proof_input.tap_script_sigs = BTreeMap::from_iter([((pk, leaf_hash), sig)]); + + sig.signature.serialize().to_vec() + } + } // We need to branch here once more to handle "signing" (satisfying) the Note script. }; + // This is different for the Note script! let witness = Witness::from_slice(&[ - &sig.signature[..], + sig.as_slice(), exit_script.as_bytes(), &exit_control_block.serialize(), ]); + // let witness = Witness::from_slice(&[ + // preimage, + // note_script.as_bytes(), + // &control_block.serialize(), + // ]); + proof_input.final_script_witness = Some(witness); } diff --git a/ark-core/src/vtxo.rs b/ark-core/src/vtxo.rs index c0f2fedc..dbba39c7 100644 --- a/ark-core/src/vtxo.rs +++ b/ark-core/src/vtxo.rs @@ -320,3 +320,160 @@ where Ok(VirtualTxOutPoints { spendable, expired }) } + +/// A virtual UTXO script that represents a collection of taproot scripts +/// forming a taproot tree. This is equivalent to the TypeScript VtxoScript class. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct VirtualUtxoScript { + /// The original scripts that form the taproot tree + scripts: Vec, + /// The taproot spend info containing the merkle tree and keys + spend_info: TaprootSpendInfo, + /// The tweaked public key + tweaked_public_key: bitcoin::key::TweakedPublicKey, +} + +impl VirtualUtxoScript { + /// Create a new VirtualUtxoScript from a collection of scripts + pub fn new(secp: &Secp256k1, scripts: Vec) -> Result + where + C: Verification, + { + if scripts.is_empty() { + return Err(Error::ad_hoc("scripts cannot be empty")); + } + + let unspendable_key: PublicKey = UNSPENDABLE_KEY.parse().expect("valid key"); + let (unspendable_key, _) = unspendable_key.inner.x_only_public_key(); + + let mut builder = TaprootBuilder::new(); + if scripts.len() == 1 { + // Single script case + builder = builder + .add_leaf(0, scripts[0].clone()) + .map_err(|e| Error::ad_hoc(format!("failed to add leaf: {e:?}")))?; + } else { + // Multiple scripts case - use balanced tree + for script in &scripts { + builder = builder + .add_leaf(1, script.clone()) + .map_err(|e| Error::ad_hoc(format!("failed to add leaf: {e:?}")))?; + } + } + + let spend_info = builder + .finalize(secp, unspendable_key) + .map_err(|e| Error::ad_hoc(format!("failed to finalize taproot tree: {e:?}")))?; + + let tweaked_public_key = spend_info.output_key(); + + Ok(Self { + scripts, + spend_info, + tweaked_public_key, + }) + } + + /// Encode the scripts to a vector of hex strings + pub fn encode(&self) -> Vec { + self.scripts + .iter() + .map(|script| hex::encode(script.as_bytes())) + .collect() + } + + /// Get the scripts + pub fn scripts(&self) -> &[ScriptBuf] { + &self.scripts + } + + /// Get the spend info + pub fn spend_info(&self) -> &TaprootSpendInfo { + &self.spend_info + } + + /// Get the tweaked public key + pub fn tweaked_public_key(&self) -> bitcoin::key::TweakedPublicKey { + self.tweaked_public_key + } + + /// Get the X-only public key from the tweaked public key + pub fn x_only_public_key(&self) -> XOnlyPublicKey { + self.tweaked_public_key.to_x_only_public_key() + } + + /// Get the script pubkey (P2TR output script) + pub fn script_pubkey(&self) -> ScriptBuf { + tr_script_pubkey(&self.spend_info) + } + + /// Create an ArkAddress from this script + pub fn ark_address(&self, network: Network, server_pubkey: XOnlyPublicKey) -> ArkAddress { + ArkAddress::new(network, server_pubkey, self.tweaked_public_key) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use bitcoin::key::Secp256k1; + use bitcoin::opcodes::all::*; + + #[test] + fn test_virtual_utxo_script_creation() { + let secp = Secp256k1::new(); + + // Create some test scripts + let script1 = ScriptBuf::builder().push_opcode(OP_CHECKSIG).into_script(); + + let script2 = ScriptBuf::builder() + .push_opcode(OP_CHECKSIGVERIFY) + .into_script(); + + let scripts = vec![script1, script2]; + + // Create VirtualUtxoScript + let vtxo_script = VirtualUtxoScript::new(&secp, scripts.clone()).unwrap(); + + // Test that scripts are preserved + assert_eq!(vtxo_script.scripts().len(), 2); + assert_eq!(vtxo_script.scripts()[0], scripts[0]); + assert_eq!(vtxo_script.scripts()[1], scripts[1]); + + // Test encoding + let encoded = vtxo_script.encode(); + assert_eq!(encoded.len(), 2); + } + #[test] + fn test_virtual_utxo_script_addresses() { + let secp = Secp256k1::new(); + + // Create a test script + let script = ScriptBuf::builder().push_opcode(OP_CHECKSIG).into_script(); + + let scripts = vec![script]; + let vtxo_script = VirtualUtxoScript::new(&secp, scripts).unwrap(); + + // Test that we can create addresses + let server_key = XOnlyPublicKey::from_slice(&[2; 32]).unwrap(); + let _ark_address = vtxo_script.ark_address(Network::Regtest, server_key); + + // Test script pubkey + let script_pubkey = vtxo_script.script_pubkey(); + assert_eq!(script_pubkey.len(), 34); // P2TR script pubkey length + assert_eq!(script_pubkey.as_bytes()[0], 0x51); // OP_1 + } + + #[test] + fn test_virtual_utxo_script_empty_scripts() { + let secp = Secp256k1::new(); + + // Test that empty scripts fail + let result = VirtualUtxoScript::new(&secp, vec![]); + assert!(result.is_err()); + assert!(result + .unwrap_err() + .to_string() + .contains("scripts cannot be empty")); + } +} diff --git a/ark-core/test_vectors.json b/ark-core/test_vectors.json new file mode 100644 index 00000000..4da5e5d6 --- /dev/null +++ b/ark-core/test_vectors.json @@ -0,0 +1,57 @@ +{ + "address": { + "valid": [ + { + "addr": "tark1qqellv77udfmr20tun8dvju5vgudpf9vxe8jwhthrkn26fz96pawqfdy8nk05rsmrf8h94j26905e7n6sng8y059z8ykn2j5xcuw4xt846qj6x", + "expectedVersion": 0, + "expectedPrefix": "tark", + "expectedUserKey": "0225a43cecfa0e1b1a4f72d64ad15f4cfa7a84d0723e8511c969aa543638ea9967", + "expectedServerKey": "0233ffb3dee353b1a9ebe4ced64b946238d0a4ac364f275d771da6ad2445d07ae0" + } + ], + "invalid": [ + { + "addr": "tark1x0lm8hhr2wc6n6lyemtyh9rz8rg2ftpkfun46aca56kjg3ws0tsztfpuanaquxc6faedvjk3tax0575y6perapg3e95654pk8r4fjecs5fyd2" + }, + { + "addr": "wrongprefix1qt9tfh7c09hlsstzq5y9tzuwyaesrwr8gpy8cn29cxv0flp64958s0n0yd0" + } + ] + }, + "note": { + "valid": [ + { + "hrp": "arknote", + "str": "arknote8rFzGqZsG9RCLripA6ez8d2hQEzFKsqCeiSnXhQj56Ysw7ZQT", + "expectedPreimage": "11d2a03264d0efd311d2a03264d0efd311d2a03264d0efd311d2a03264d0efd3", + "expectedValue": 900000 + }, + { + "hrp": "arknote", + "str": "arknoteSkB92YpWm4Q2ijQHH34cqbKkCZWszsiQgHVjtNeFF2Cwp59D", + "expectedPreimage": "0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20", + "expectedValue": 1828932 + }, + { + "hrp": "noteark", + "str": "noteark8rFzGqZsG9RCLripA6ez8d2hQEzFKsqCeiSnXhQj56Ysw7ZQT", + "expectedPreimage": "11d2a03264d0efd311d2a03264d0efd311d2a03264d0efd311d2a03264d0efd3", + "expectedValue": 900000 + }, + { + "hrp": "noteark", + "str": "notearkSkB92YpWm4Q2ijQHH34cqbKkCZWszsiQgHVjtNeFF2Cwp59D", + "expectedPreimage": "0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20", + "expectedValue": 1828932 + } + ], + "invalid": [ + { + "str": "wrongprefix1qt9tfh7c09hlsstzq5y9tzuwyaesrwr8gpy8cn29cxv0flp64958s0n0yd0" + }, + { + "str": "arknoteshort" + } + ] + } +} diff --git a/ark-grpc/build.rs b/ark-grpc/build.rs index 57854676..b103e8eb 100644 --- a/ark-grpc/build.rs +++ b/ark-grpc/build.rs @@ -19,6 +19,7 @@ fn generate_protos() -> std::io::Result<()> { "proto/ark/v1/wallet.proto", "proto/ark/v1/indexer.proto", "proto/ark/v1/types.proto", + "proto/ark/v1/admin.proto", ], &["proto"], )?; diff --git a/ark-grpc/proto/ark/v1/admin.proto b/ark-grpc/proto/ark/v1/admin.proto new file mode 100644 index 00000000..6217a1e9 --- /dev/null +++ b/ark-grpc/proto/ark/v1/admin.proto @@ -0,0 +1,155 @@ +syntax = "proto3"; + +package ark.v1; + +import "google/api/annotations.proto"; +import "ark/v1/types.proto"; + +service AdminService { + rpc GetScheduledSweep(GetScheduledSweepRequest) returns (GetScheduledSweepResponse) { + option (google.api.http) = { + get: "/v1/admin/sweeps" + }; + } + rpc GetRoundDetails(GetRoundDetailsRequest) returns (GetRoundDetailsResponse) { + option (google.api.http) = { + get: "/v1/admin/round/{round_id}" + }; + } + rpc GetRounds(GetRoundsRequest) returns (GetRoundsResponse) { + option (google.api.http) = { + get: "/v1/admin/rounds" + }; + } + rpc CreateNote(CreateNoteRequest) returns (CreateNoteResponse) { + option (google.api.http) = { + post: "/v1/admin/note" + body: "*" + }; + } + rpc GetMarketHourConfig(GetMarketHourConfigRequest) returns (GetMarketHourConfigResponse) { + option (google.api.http) = { + get: "/v1/admin/marketHour" + }; + } + rpc UpdateMarketHourConfig(UpdateMarketHourConfigRequest) returns (UpdateMarketHourConfigResponse) { + option (google.api.http) = { + post: "/v1/admin/marketHour" + body: "*" + }; + } + rpc ListIntents(ListIntentsRequest) returns (ListIntentsResponse) { + option (google.api.http) = { + get: "/v1/admin/intents" + }; + } + rpc DeleteIntents(DeleteIntentsRequest) returns (DeleteIntentsResponse) { + option (google.api.http) = { + post: "/v1/admin/intents/delete" + body: "*" + }; + } +} + +message GetScheduledSweepRequest {} +message GetScheduledSweepResponse { + repeated ScheduledSweep sweeps = 1; +} + +message GetRoundDetailsRequest { + string round_id = 1; +} +message GetRoundDetailsResponse { + string round_id = 1; + int64 started_at = 2; + int64 ended_at = 3; + string commitment_txid = 4; + string forfeited_amount = 5; + string total_vtxos_amount = 6; + string total_exit_amount = 7; + string total_fee_amount = 8; + repeated string inputs_vtxos = 9; + repeated string outputs_vtxos = 10; + repeated string exit_addresses = 11; +} + +message GetRoundsRequest { + int64 after = 1; + int64 before = 2; +} +message GetRoundsResponse { + repeated string rounds = 1; +} + +message CreateNoteRequest { + uint32 amount = 1; + uint32 quantity = 2; +} +message CreateNoteResponse { + repeated string notes = 1; +} + +message GetMarketHourConfigRequest {} +message GetMarketHourConfigResponse { + MarketHourConfig config = 1; +} + +message UpdateMarketHourConfigRequest { + MarketHourConfig config = 1; +} +message UpdateMarketHourConfigResponse {} + +message ListIntentsRequest { + repeated string intent_ids = 1; +} +message ListIntentsResponse { + repeated IntentInfo intents = 1; +} + +message DeleteIntentsRequest { + repeated string intent_ids = 1; +} +message DeleteIntentsResponse {} + +message SweepableOutput { + string txid = 1; + uint32 vout = 2; + string amount = 3; + int64 scheduled_at = 4; +} + +message ScheduledSweep { + string round_id = 1; + repeated SweepableOutput outputs = 2; +} + +message MarketHourConfig { + int64 start_time = 1; + int64 end_time = 2; + int64 period = 3; + int64 round_interval = 4; +} + +message IntentInput { + string txid = 1; + uint32 vout = 2; + uint64 amount = 3; +} + +message IntentInfo { + string id = 1; + int64 created_at = 2; + repeated Output receivers = 3; + repeated IntentInput inputs = 4; + repeated IntentInput boarding_inputs = 5; + repeated string cosigners_public_keys = 6; + Bip322Signature proof = 7; +} + +message Output { + oneof destination { + string vtxo_script = 1; + string onchain_address = 2; + }; // onchain or off-chain + uint64 amount = 3; // Amount to send in satoshis. +} \ No newline at end of file diff --git a/ark-grpc/src/client.rs b/ark-grpc/src/client.rs index 0c2ec9b7..1bb3ba0d 100644 --- a/ark-grpc/src/client.rs +++ b/ark-grpc/src/client.rs @@ -1,9 +1,11 @@ use crate::generated; +use crate::generated::ark::v1::admin_service_client::AdminServiceClient; use crate::generated::ark::v1::ark_service_client::ArkServiceClient; use crate::generated::ark::v1::indexer_service_client::IndexerServiceClient; use crate::generated::ark::v1::indexer_tx_history_record::Key; use crate::generated::ark::v1::Bip322Signature; use crate::generated::ark::v1::ConfirmRegistrationRequest; +use crate::generated::ark::v1::CreateNoteRequest; use crate::generated::ark::v1::GetEventStreamRequest; use crate::generated::ark::v1::GetInfoRequest; use crate::generated::ark::v1::GetSubscriptionRequest; @@ -69,6 +71,7 @@ pub struct Client { url: String, ark_client: Option>, indexer_client: Option>, + admin_client: Option>, } impl Client { @@ -77,6 +80,7 @@ impl Client { url, ark_client: None, indexer_client: None, + admin_client: None, } } @@ -87,9 +91,13 @@ impl Client { let indexer_client = IndexerServiceClient::connect(self.url.clone()) .await .map_err(Error::connect)?; + let admin_client = AdminServiceClient::connect(self.url.clone()) + .await + .map_err(Error::connect)?; self.ark_client = Some(ark_service_client); self.indexer_client = Some(indexer_client); + self.admin_client = Some(admin_client); Ok(()) } @@ -559,6 +567,16 @@ impl Client { Ok(stream.boxed()) } + pub async fn create_arknote(&self, amount: u32, quantity: u32) -> Result, Error> { + let mut client = self.admin_client()?; + + let request = CreateNoteRequest { amount, quantity }; + + let response = client.create_note(request).await.map_err(Error::request)?; + + Ok(response.into_inner().notes) + } + fn ark_client(&self) -> Result, Error> { // Cloning an `ArkServiceClient` is cheap. self.ark_client.clone().ok_or(Error::not_connected()) @@ -566,6 +584,9 @@ impl Client { fn indexer_client(&self) -> Result, Error> { self.indexer_client.clone().ok_or(Error::not_connected()) } + fn admin_client(&self) -> Result, Error> { + self.admin_client.clone().ok_or(Error::not_connected()) + } } impl TryFrom for BatchStartedEvent { diff --git a/ark-grpc/src/generated/ark.v1.rs b/ark-grpc/src/generated/ark.v1.rs index 829fdbdd..cc919e4a 100644 --- a/ark-grpc/src/generated/ark.v1.rs +++ b/ark-grpc/src/generated/ark.v1.rs @@ -1753,3 +1753,388 @@ pub mod indexer_service_client { } } } +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct GetScheduledSweepRequest {} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct GetScheduledSweepResponse { + #[prost(message, repeated, tag = "1")] + pub sweeps: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct GetRoundDetailsRequest { + #[prost(string, tag = "1")] + pub round_id: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct GetRoundDetailsResponse { + #[prost(string, tag = "1")] + pub round_id: ::prost::alloc::string::String, + #[prost(int64, tag = "2")] + pub started_at: i64, + #[prost(int64, tag = "3")] + pub ended_at: i64, + #[prost(string, tag = "4")] + pub commitment_txid: ::prost::alloc::string::String, + #[prost(string, tag = "5")] + pub forfeited_amount: ::prost::alloc::string::String, + #[prost(string, tag = "6")] + pub total_vtxos_amount: ::prost::alloc::string::String, + #[prost(string, tag = "7")] + pub total_exit_amount: ::prost::alloc::string::String, + #[prost(string, tag = "8")] + pub total_fee_amount: ::prost::alloc::string::String, + #[prost(string, repeated, tag = "9")] + pub inputs_vtxos: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + #[prost(string, repeated, tag = "10")] + pub outputs_vtxos: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + #[prost(string, repeated, tag = "11")] + pub exit_addresses: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, +} +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct GetRoundsRequest { + #[prost(int64, tag = "1")] + pub after: i64, + #[prost(int64, tag = "2")] + pub before: i64, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct GetRoundsResponse { + #[prost(string, repeated, tag = "1")] + pub rounds: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, +} +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct CreateNoteRequest { + #[prost(uint32, tag = "1")] + pub amount: u32, + #[prost(uint32, tag = "2")] + pub quantity: u32, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct CreateNoteResponse { + #[prost(string, repeated, tag = "1")] + pub notes: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, +} +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct GetMarketHourConfigRequest {} +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct GetMarketHourConfigResponse { + #[prost(message, optional, tag = "1")] + pub config: ::core::option::Option, +} +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct UpdateMarketHourConfigRequest { + #[prost(message, optional, tag = "1")] + pub config: ::core::option::Option, +} +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct UpdateMarketHourConfigResponse {} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct ListIntentsRequest { + #[prost(string, repeated, tag = "1")] + pub intent_ids: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ListIntentsResponse { + #[prost(message, repeated, tag = "1")] + pub intents: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct DeleteIntentsRequest { + #[prost(string, repeated, tag = "1")] + pub intent_ids: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, +} +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct DeleteIntentsResponse {} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct SweepableOutput { + #[prost(string, tag = "1")] + pub txid: ::prost::alloc::string::String, + #[prost(uint32, tag = "2")] + pub vout: u32, + #[prost(string, tag = "3")] + pub amount: ::prost::alloc::string::String, + #[prost(int64, tag = "4")] + pub scheduled_at: i64, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ScheduledSweep { + #[prost(string, tag = "1")] + pub round_id: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "2")] + pub outputs: ::prost::alloc::vec::Vec, +} +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct MarketHourConfig { + #[prost(int64, tag = "1")] + pub start_time: i64, + #[prost(int64, tag = "2")] + pub end_time: i64, + #[prost(int64, tag = "3")] + pub period: i64, + #[prost(int64, tag = "4")] + pub round_interval: i64, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct IntentInput { + #[prost(string, tag = "1")] + pub txid: ::prost::alloc::string::String, + #[prost(uint32, tag = "2")] + pub vout: u32, + #[prost(uint64, tag = "3")] + pub amount: u64, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct IntentInfo { + #[prost(string, tag = "1")] + pub id: ::prost::alloc::string::String, + #[prost(int64, tag = "2")] + pub created_at: i64, + #[prost(message, repeated, tag = "3")] + pub receivers: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "4")] + pub inputs: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "5")] + pub boarding_inputs: ::prost::alloc::vec::Vec, + #[prost(string, repeated, tag = "6")] + pub cosigners_public_keys: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + #[prost(message, optional, tag = "7")] + pub proof: ::core::option::Option, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct Output { + /// Amount to send in satoshis. + #[prost(uint64, tag = "3")] + pub amount: u64, + #[prost(oneof = "output::Destination", tags = "1, 2")] + pub destination: ::core::option::Option, +} +/// Nested message and enum types in `Output`. +pub mod output { + #[derive(Clone, PartialEq, Eq, Hash, ::prost::Oneof)] + pub enum Destination { + #[prost(string, tag = "1")] + VtxoScript(::prost::alloc::string::String), + #[prost(string, tag = "2")] + OnchainAddress(::prost::alloc::string::String), + } +} +/// Generated client implementations. +pub mod admin_service_client { + #![allow( + unused_variables, + dead_code, + missing_docs, + clippy::wildcard_imports, + clippy::let_unit_value + )] + use tonic::codegen::http::Uri; + use tonic::codegen::*; + #[derive(Debug, Clone)] + pub struct AdminServiceClient { + inner: tonic::client::Grpc, + } + impl AdminServiceClient { + /// Attempt to create a new client by connecting to a given endpoint. + pub async fn connect(dst: D) -> Result + where + D: TryInto, + D::Error: Into, + { + let conn = tonic::transport::Endpoint::new(dst)?.connect().await?; + Ok(Self::new(conn)) + } + } + impl AdminServiceClient + where + T: tonic::client::GrpcService, + T::Error: Into, + T::ResponseBody: Body + std::marker::Send + 'static, + ::Error: Into + std::marker::Send, + { + pub fn new(inner: T) -> Self { + let inner = tonic::client::Grpc::new(inner); + Self { inner } + } + pub fn with_origin(inner: T, origin: Uri) -> Self { + let inner = tonic::client::Grpc::with_origin(inner, origin); + Self { inner } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> AdminServiceClient> + where + F: tonic::service::Interceptor, + T::ResponseBody: Default, + T: tonic::codegen::Service< + http::Request, + Response = http::Response< + >::ResponseBody, + >, + >, + >>::Error: + Into + std::marker::Send + std::marker::Sync, + { + AdminServiceClient::new(InterceptedService::new(inner, interceptor)) + } + /// Compress requests with the given encoding. + /// + /// This requires the server to support it otherwise it might respond with an + /// error. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.send_compressed(encoding); + self + } + /// Enable decompressing responses. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.accept_compressed(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_decoding_message_size(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_encoding_message_size(limit); + self + } + pub async fn get_scheduled_sweep( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result, tonic::Status> + { + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = + http::uri::PathAndQuery::from_static("/ark.v1.AdminService/GetScheduledSweep"); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("ark.v1.AdminService", "GetScheduledSweep")); + self.inner.unary(req, path, codec).await + } + pub async fn get_round_details( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result, tonic::Status> + { + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static("/ark.v1.AdminService/GetRoundDetails"); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("ark.v1.AdminService", "GetRoundDetails")); + self.inner.unary(req, path, codec).await + } + pub async fn get_rounds( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result, tonic::Status> { + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static("/ark.v1.AdminService/GetRounds"); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("ark.v1.AdminService", "GetRounds")); + self.inner.unary(req, path, codec).await + } + pub async fn create_note( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result, tonic::Status> + { + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static("/ark.v1.AdminService/CreateNote"); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("ark.v1.AdminService", "CreateNote")); + self.inner.unary(req, path, codec).await + } + pub async fn get_market_hour_config( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result, tonic::Status> + { + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = + http::uri::PathAndQuery::from_static("/ark.v1.AdminService/GetMarketHourConfig"); + let mut req = request.into_request(); + req.extensions_mut().insert(GrpcMethod::new( + "ark.v1.AdminService", + "GetMarketHourConfig", + )); + self.inner.unary(req, path, codec).await + } + pub async fn update_market_hour_config( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = + http::uri::PathAndQuery::from_static("/ark.v1.AdminService/UpdateMarketHourConfig"); + let mut req = request.into_request(); + req.extensions_mut().insert(GrpcMethod::new( + "ark.v1.AdminService", + "UpdateMarketHourConfig", + )); + self.inner.unary(req, path, codec).await + } + pub async fn list_intents( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result, tonic::Status> + { + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static("/ark.v1.AdminService/ListIntents"); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("ark.v1.AdminService", "ListIntents")); + self.inner.unary(req, path, codec).await + } + pub async fn delete_intents( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result, tonic::Status> + { + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static("/ark.v1.AdminService/DeleteIntents"); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("ark.v1.AdminService", "DeleteIntents")); + self.inner.unary(req, path, codec).await + } + } +} diff --git a/e2e-tests/tests/e2e_arknote.rs b/e2e-tests/tests/e2e_arknote.rs new file mode 100644 index 00000000..3abb31ef --- /dev/null +++ b/e2e-tests/tests/e2e_arknote.rs @@ -0,0 +1,91 @@ +#![allow(clippy::unwrap_used)] + +use ark_core::ArkNote; +use bitcoin::key::Secp256k1; +use bitcoin::Amount; +use common::init_tracing; +use common::set_up_client; +use common::Nigiri; +use rand::thread_rng; +use std::sync::Arc; + +mod common; + +#[tokio::test] +#[ignore] +pub async fn e2e_arknote_redemption() { + init_tracing(); + let nigiri = Arc::new(Nigiri::new()); + + let secp = Secp256k1::new(); + + let (alice, _) = set_up_client("alice".to_string(), nigiri.clone(), secp.clone()).await; + let alice_offchain_address = alice.get_offchain_address().unwrap().0; + + tracing::info!( + ?alice_offchain_address, + "Created Alice's wallet for ArkNote redemption test" + ); + + let fund_amount = Amount::from_sat(1000); + + // Create ArkNote using client API + let note = alice.create_arknote(fund_amount).await.unwrap(); + + tracing::info!( + arknote_string = %note.to_encoded_string(), + value = %fund_amount, + "Created ArkNote using client API for redemption test" + ); + + let parsed_note = ArkNote::from_string(¬e.to_encoded_string()).unwrap(); + assert_eq!(parsed_note.value(), fund_amount); + assert!(note.status().confirmed); + assert!(note.extra_witness().is_some()); + + // Verify tap tree is not empty + let tap_tree = note.tap_tree(); + assert!(!tap_tree.is_empty()); + + // Verify outpoint creation + let outpoint = note.outpoint(); + assert_eq!(outpoint.vout, 0); + + // Verify TxOut creation + let tx_out = note.to_tx_out(); + assert_eq!(tx_out.value, fund_amount); + + // Redeem the ArkNote using the new redeem_note method + let mut rng = thread_rng(); + let txid_opt = alice.redeem_note(&mut rng, note).await.unwrap(); + + assert!( + txid_opt.is_some(), + "Expected a transaction ID from ArkNote redemption" + ); + let txid = txid_opt.unwrap(); + + tracing::info!( + %txid, + "Successfully redeemed ArkNote" + ); + + // Verify the balance has been updated + let balance = alice.offchain_balance().await.unwrap(); + tracing::info!( + confirmed_balance = %balance.confirmed(), + pending_balance = %balance.pending(), + "Balance after ArkNote redemption" + ); + + // Assert that the balance has increased by the redeemed amount + assert!( + balance.confirmed() >= fund_amount || balance.pending() >= fund_amount, + "Expected balance to increase by at least {} sats after redemption, but got confirmed: {} and pending: {}", + fund_amount, + balance.confirmed(), + balance.pending() + ); + + tracing::info!("ArkNote redemption test completed successfully"); +} diff --git a/justfile b/justfile index 54a50ae6..a7877437 100644 --- a/justfile +++ b/justfile @@ -107,10 +107,6 @@ arkd-setup: set -euxo pipefail - echo "Starting redis" - - just arkd-redis-run - echo "Running arkd from $ARKD_DIR" just arkd-wallet-run @@ -329,3 +325,20 @@ test: e2e-tests: @echo running e2e tests cargo test -p e2e-tests -- --ignored --nocapture + +integration-tests: + @echo running integration tests + nigiri stop --delete && just arkd-kill arkd-wipe arkd-wallet-kill arkd-wallet-wipe + nigiri start + sleep 1 + if [ -z "$ARKD_DIR" ] || [ "$ARKD_DIR" = "/" ] || [ "$ARKD_DIR" = "$HOME" ]; then \ + echo "Error: ARKD_DIR is not set or is set to a dangerous value ('$ARKD_DIR'). Aborting rm -rf." >&2; \ + exit 1; \ + fi + rm -rf "$ARKD_DIR" + just arkd-checkout master + just arkd-build + just arkd-setup + just arkd-run + just arkd-fund 20 + just e2e-tests \ No newline at end of file