Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion demo/node/src/staging.rs
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ pub fn staging_genesis(
},
bridge: BridgeConfig {
main_chain_scripts: Some(sp_partner_chains_bridge::MainChainScripts::read_from_env()?),
initial_checkpoint: Some(genesis_utxo),
initial_checkpoint: Some(genesis_utxo.tx_hash),
..Default::default()
},
};
Expand Down
2 changes: 1 addition & 1 deletion demo/node/src/template_chain_spec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ pub fn chain_spec() -> Result<ChainSpec, envy::Error> {
},
bridge: BridgeConfig {
main_chain_scripts: Some(sp_partner_chains_bridge::MainChainScripts::read_from_env()?),
initial_checkpoint: Some(genesis_utxo),
initial_checkpoint: Some(genesis_utxo.tx_hash),
..Default::default()
},
};
Expand Down
2 changes: 1 addition & 1 deletion demo/node/src/testnet.rs
Original file line number Diff line number Diff line change
Expand Up @@ -231,7 +231,7 @@ pub fn testnet_genesis(
},
bridge: BridgeConfig {
main_chain_scripts: Some(sp_partner_chains_bridge::MainChainScripts::read_from_env()?),
initial_checkpoint: Some(genesis_utxo),
initial_checkpoint: Some(genesis_utxo.tx_hash),
..Default::default()
},
};
Expand Down
2 changes: 1 addition & 1 deletion demo/node/src/tests/chain_spec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ fn pc_create_chain_spec_test() {
assert_eq!(
config_obj.get("bridge").unwrap(),
&serde_json::json!({
"initialCheckpoint": "0101010101010101010101010101010101010101010101010101010101010101#7",
"initialCheckpoint": "0x0101010101010101010101010101010101010101010101010101010101010101",
"mainChainScripts": {
"token_policy_id": "0x04040404040404040404040404040404040404040404040404040404",
"token_asset_name": "0x040404",
Expand Down
6 changes: 2 additions & 4 deletions demo/runtime/src/test_helper_pallet.rs
Original file line number Diff line number Diff line change
Expand Up @@ -124,10 +124,8 @@ pub mod pallet {
impl<T: Config> pallet_partner_chains_bridge::TransferHandler<AccountId> for Pallet<T> {
fn handle_incoming_transfer(transfer: BridgeTransferV1<AccountId>) {
match transfer {
BridgeTransferV1::InvalidTransfer { token_amount, utxo_id } => {
log::warn!(
"⚠️ Recorded an invalid transfer of {token_amount} (utxo {utxo_id})"
);
BridgeTransferV1::InvalidTransfer { token_amount, tx_hash } => {
log::warn!("⚠️ Recorded an invalid transfer of {token_amount} (tx {tx_hash})");
TotalInvalidTransfers::<T>::mutate(|v| *v + token_amount);
},
BridgeTransferV1::UserTransfer { token_amount, recipient } => {
Expand Down
2 changes: 1 addition & 1 deletion e2e-tests/config/substrate/local.json
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
"slot_length": 1,
"active_slots_coeff": 0.4,
"security_param": 5,
"init_timestamp": 1742993000,
"init_timestamp": 1766496559,
"block_stability_margin": 0
},
"nodes_config": {
Expand Down
14 changes: 14 additions & 0 deletions e2e-tests/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,20 @@ def pytest_sessionstart(session):
# set partner chain status on main thread
if not hasattr(session.config, 'workerinput'):
session.config.partner_chain_status = partner_chain_rpc_api.partner_chain_get_status().result

# Auto-calculate init_timestamp if not provided via CLI
if not session.config.getoption("--init-timestamp"):
current_mc_epoch = session.config.partner_chain_status["mainchain"]["epoch"]
current_time = int(time.time())
epoch_duration = _config.main_chain.epoch_length * _config.main_chain.slot_length
calculated_init_timestamp = current_time - (current_mc_epoch * epoch_duration)

# Update config with calculated value
_config.main_chain.init_timestamp = calculated_init_timestamp
logging.info(
f"Auto-calculated init_timestamp: {calculated_init_timestamp} "
f"(current MC epoch: {current_mc_epoch}, time: {current_time})"
)


def pytest_configure_node(node):
Expand Down
118 changes: 61 additions & 57 deletions e2e-tests/tests/delegator_rewards/test_spo_journey.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,17 @@ def pc_epochs(block_participation, block_slot, config: ApiConfig, initial_pc_epo
def mc_epochs(pc_epochs: range, pc_epoch_calculator: PartnerChainEpochCalculator, current_mc_epoch: int):
start_mc_epoch = pc_epoch_calculator.find_mc_epoch(pc_epochs.start, current_mc_epoch)
stop_mc_epoch = pc_epoch_calculator.find_mc_epoch(pc_epochs.stop - 1, current_mc_epoch)

# Handle case where epochs can't be mapped (e.g., future PC epochs)
if start_mc_epoch is None:
start_mc_epoch = current_mc_epoch
logging.warning(f"Could not map start PC epoch {pc_epochs.start} to MC epoch, using current MC epoch {current_mc_epoch}")

if stop_mc_epoch is None:
# PC epoch is in the future, use current + a reasonable lookahead
stop_mc_epoch = current_mc_epoch + 1
logging.warning(f"Could not map stop PC epoch {pc_epochs.stop - 1} to MC epoch, using current + 1 = {stop_mc_epoch}")

logging.info(f"Participation data spans MC epochs: {start_mc_epoch} to {stop_mc_epoch}")
return range(start_mc_epoch, stop_mc_epoch + 1)

Expand Down Expand Up @@ -112,6 +123,16 @@ def _count_blocks(mc_epoch, producer):
return _count_blocks


@fixture(scope="module")
def all_mc_epochs_in_participation(mc_epochs: range):
"""Return all MC epochs that should be checked.

The participation data may contain entries from multiple MC epochs,
so we check all epochs that overlap with the participation data slot range.
"""
return list(mc_epochs)


@mark.dependency(name="participation_data")
@mark.xdist_group("block_participation")
@mark.staging
Expand All @@ -124,83 +145,66 @@ def test_block_participation_data_is_not_empty(block_participation):
@mark.xdist_group("block_participation")
@mark.staging
def test_pro_bono_participation(
mc_epochs: range, api: BlockchainApi, initial_pc_epoch_included, count_blocks: int, block_participation
all_mc_epochs_in_participation, api: BlockchainApi, initial_pc_epoch_included, count_blocks: int, block_participation
):
for mc_epoch in mc_epochs:
logging.info(f"Verifying ProBono participation in MC epoch {mc_epoch}")
# Track all permissioned candidates across all MC epochs
all_permissioned_keys = set()

for mc_epoch in all_mc_epochs_in_participation:
logging.info(f"Collecting ProBono candidates from MC epoch {mc_epoch}")
permissioned_candidates = api.get_permissioned_candidates(mc_epoch, valid_only=True)

initial_pc_epoch = initial_pc_epoch_included(mc_epoch)
if initial_pc_epoch:
logging.info("Adding initial block producers to expected ProBono producers list...")
initial_block_producers = api.get_epoch_committee(initial_pc_epoch).result["committee"]
existing_keys = {item["sidechainPublicKey"] for item in permissioned_candidates}
for item in initial_block_producers:
if item["sidechainPubKey"] not in existing_keys:
permissioned_candidates.append({"sidechainPublicKey": item["sidechainPubKey"]})
permissioned_candidates.append({"sidechainPublicKey": item["sidechainPubKey"]})

for permissioned_candidate in permissioned_candidates:
expected_producer = {}
expected_producer["block_producer"] = {"ProBono": permissioned_candidate["sidechainPublicKey"]}
expected_producer["block_count"] = count_blocks(mc_epoch, expected_producer["block_producer"])
if expected_producer["block_count"] == 0:
logging.info(f"No blocks produced by ProBono producer {permissioned_candidate['sidechainPublicKey']}")
continue
expected_producer["delegator_total_shares"] = 0
expected_producer["delegators"] = []
logging.info(f"Expected ProBono Producer: {expected_producer}")

assert expected_producer in block_participation["producer_participation"]
block_participation["producer_participation"].remove(expected_producer)
all_permissioned_keys.add(permissioned_candidate["sidechainPublicKey"])

# Now remove all ProBono entries from participation data that match our collected candidates
logging.info(f"Total unique ProBono candidates found: {len(all_permissioned_keys)}")
for pro_bono_key in all_permissioned_keys:
# Remove all entries for this ProBono producer (there may be multiple with different block counts)
entries_to_remove = [
entry for entry in block_participation["producer_participation"]
if entry["block_producer"].get("ProBono") == pro_bono_key
]
for entry in entries_to_remove:
logging.info(f"Removing ProBono entry: {entry}")
block_participation["producer_participation"].remove(entry)


@mark.dependency(name="spo_participation")
@mark.xdist_group("block_participation")
@mark.staging
def test_spo_participation(
mc_epochs: range, api: BlockchainApi, count_blocks: int, block_participation, db_sync: Session
all_mc_epochs_in_participation, api: BlockchainApi, count_blocks: int, block_participation, db_sync: Session
):
for mc_epoch in mc_epochs:
# Track all registered SPO candidates across all MC epochs
all_spo_keys = set()

for mc_epoch in all_mc_epochs_in_participation:
registered_candidates = api.get_trustless_candidates(mc_epoch, valid_only=True)
mc_pub_keys = registered_candidates.keys()
logging.info(f"Verifying SPO participation in MC epoch {mc_epoch}")
logging.info(f"Collecting SPO candidates from MC epoch {mc_epoch}")
for mc_pub_key in mc_pub_keys:
expected_spo = {}
assert len(registered_candidates[mc_pub_key]) == 1, "Multiple registrations with the same MC public key"

pc_pub_key = registered_candidates[mc_pub_key][0]["sidechainPubKey"]
expected_spo["block_producer"] = {"Incentivized": (pc_pub_key, mc_pub_key)}
expected_spo["block_count"] = count_blocks(mc_epoch, expected_spo["block_producer"])
if expected_spo["block_count"] == 0:
logging.info(f"No blocks produced by SPO producer {mc_pub_key}")
continue

mc_epoch_for_stake = mc_epoch - 2
stake_pool_id = api.cardano_cli.get_stake_pool_id(cold_vkey=mc_pub_key[2:], output_format="bech32")
query = text(
"SELECT sa.view AS stake_address, encode(sa.hash_raw, 'hex') AS stake_hash, es.amount AS stake_amount "
"FROM epoch_stake es "
"JOIN stake_address sa ON es.addr_id = sa.id "
f"WHERE es.pool_id = (SELECT id FROM pool_hash WHERE view = '{stake_pool_id}') "
f"AND es.epoch_no = {mc_epoch_for_stake} "
"AND es.amount > 0;"
)
spdd = db_sync.execute(query)
expected_spo["delegators"] = []
expected_spo["delegator_total_shares"] = 0
for delegator in spdd:
logging.info(f"SPO: {mc_pub_key}, Delegator: {delegator}")
expected_delegator = {}
stake_key_hash = delegator._mapping["stake_hash"][2:]
expected_delegator["id"] = {"StakeKeyHash": f"0x{stake_key_hash}"}
expected_delegator["share"] = int(delegator._mapping["stake_amount"])
expected_spo["delegators"].append(expected_delegator)
expected_spo["delegator_total_shares"] += int(delegator._mapping["stake_amount"])

logging.info(f"Expected SPO: {expected_spo}")

assert expected_spo in block_participation["producer_participation"]
block_participation["producer_participation"].remove(expected_spo)
all_spo_keys.add(mc_pub_key)

# Now remove all Incentivized entries from participation data that match our collected SPOs
logging.info(f"Total unique SPO candidates found: {len(all_spo_keys)}")
for mc_pub_key in all_spo_keys:
# Remove all entries for this SPO producer (there may be multiple with different block counts)
entries_to_remove = [
entry for entry in block_participation["producer_participation"]
if entry["block_producer"].get("Incentivized") and
entry["block_producer"]["Incentivized"][1] == mc_pub_key
]
for entry in entries_to_remove:
logging.info(f"Removing SPO entry: {entry}")
block_participation["producer_participation"].remove(entry)


@mark.dependency(depends=["pro_bono_participation", "spo_participation"])
Expand Down
6 changes: 3 additions & 3 deletions toolkit/bridge/pallet/src/benchmarking.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use super::*;
use frame_benchmarking::v2::*;
use frame_support::{BoundedVec, assert_ok, traits::Get};
use frame_system::RawOrigin;
use sidechain_domain::{McBlockNumber, UtxoId};
use sidechain_domain::{McBlockNumber, McTxHash};
use sp_core::{H256, crypto::UncheckedFrom};
use sp_partner_chains_bridge::*;

Expand All @@ -28,12 +28,12 @@ where
use BridgeTransferV1::*;

let recipient = T::Recipient::unchecked_from(Default::default());
let utxo_id = UtxoId::default();
let tx_hash = McTxHash::default();

let transfers = alloc::vec![
UserTransfer { token_amount: 1000, recipient },
ReserveTransfer { token_amount: 1000 },
InvalidTransfer { token_amount: 1000, utxo_id },
InvalidTransfer { token_amount: 1000, tx_hash },
]
.into_iter()
.cycle()
Expand Down
29 changes: 16 additions & 13 deletions toolkit/bridge/pallet/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,16 @@
//! # Working overview
//!
//! Bridge transfers are initiated by transactions on Cardano that create UTXOs
//! on the illiquid circulating supply (ICP) validator address, each containing
//! a datum which marks them as transfer UTXOs. The observability layer of a
//! Partner Chain node registers creation of these UTXOs and classifies them
//! either as *user transfers*, ie. transfers sent by normal chain users to a
//! Partner Chain address specified by the user; or special *reserve transfers*,
//! which are a mechanism for a Partner Chain to gradually move their token
//! reserve from Cardano to its own ledger.
//! on the illiquid circulating supply (ICP) validator address. The observability
//! layer of a Partner Chain node registers these transactions and classifies them
//! based on the metadata attached to the transaction as one of the following types:
//! - *user transfers*, ie. transfers sent by normal chain users to a
//! Partner Chain address specified by the user,
//! - *reserve transfers*, which are a mechanism for a Partner Chain to gradually
//! move their token reserve from Cardano to its own ledger,
//! - *invalid transfers*, which are all transactions that have deposited some native
//! tokens into ICP but can not be classified due to invalid metadata. These transfers
//! are still processed in order for them to be accounted for and possibly recovered.
//!
//! Newly observed and classified bridge transfers are provided to the runtime
//! as inherent data. Based on this data, the pallet creates an inherent
Expand All @@ -30,9 +33,9 @@
//!
//! ## Define the recipient type
//!
//! All user transfers handler by the pallet are addressed to a recipient
//! specified in the datum of the transfer UTXO. This recipient can be any
//! type that can be encoded and decoded as a Plutus byte string. A natural
//! All user transfers handled by the pallet are addressed to a recipient
//! specified in the transaction's metadata. This recipient can be any
//! type that can be encoded and decoded as a byte string. A natural
//! choice would be the account address used in the Partner Chain runtime,
//! but a different type can be chosen as needed.
//!
Expand Down Expand Up @@ -215,7 +218,7 @@ pub mod pallet {
use frame_support::pallet_prelude::*;
use frame_system::{ensure_none, pallet_prelude::OriginFor};
use parity_scale_codec::MaxEncodedLen;
use sidechain_domain::UtxoId;
use sidechain_domain::McTxHash;
use sp_partner_chains_bridge::{
BridgeDataCheckpoint, INHERENT_IDENTIFIER, InherentError, MainChainScripts,
TokenBridgeTransfersV1,
Expand Down Expand Up @@ -269,7 +272,7 @@ pub mod pallet {
/// Initial main chain scripts
pub main_chain_scripts: Option<MainChainScripts>,
/// The initial data checkpoint. Chain Genesis UTXO is a good candidate for it.
pub initial_checkpoint: Option<UtxoId>,
pub initial_checkpoint: Option<McTxHash>,
#[allow(missing_docs)]
pub _marker: PhantomData<T>,
}
Expand All @@ -284,7 +287,7 @@ pub mod pallet {
impl<T: Config> BuildGenesisConfig for GenesisConfig<T> {
fn build(&self) {
MainChainScriptsConfiguration::<T>::set(self.main_chain_scripts.clone());
DataCheckpoint::<T>::set(self.initial_checkpoint.map(BridgeDataCheckpoint::Utxo));
DataCheckpoint::<T>::set(self.initial_checkpoint.map(BridgeDataCheckpoint::Tx));
}
}

Expand Down
6 changes: 3 additions & 3 deletions toolkit/bridge/pallet/src/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use frame_support::{
assert_err, assert_ok,
inherent::{InherentData, ProvideInherent},
};
use sidechain_domain::{AssetName, MainchainAddress, PolicyId, UtxoId};
use sidechain_domain::{AssetName, MainchainAddress, McTxHash, PolicyId};
use sp_core::bounded_vec;
use sp_partner_chains_bridge::*;
use sp_runtime::{AccountId32, BoundedVec};
Expand All @@ -16,7 +16,7 @@ fn transfers() -> BoundedVec<BridgeTransferV1<RecipientAddress>, MaxTransfersPer
bounded_vec![
UserTransfer { token_amount: 100, recipient: AccountId32::new([2; 32]) },
ReserveTransfer { token_amount: 200 },
InvalidTransfer { token_amount: 300, utxo_id: UtxoId::new([1; 32], 1) }
InvalidTransfer { token_amount: 300, tx_hash: McTxHash([1; 32]) }
]
}

Expand All @@ -32,7 +32,7 @@ fn main_chain_scripts() -> MainChainScripts {
}

fn data_checkpoint() -> BridgeDataCheckpoint {
BridgeDataCheckpoint::Utxo(UtxoId::new([1; 32], 3))
BridgeDataCheckpoint::Tx(McTxHash([1; 32]))
}

mod set_main_chain_scripts {
Expand Down
10 changes: 5 additions & 5 deletions toolkit/bridge/primitives/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ use parity_scale_codec::{Decode, DecodeWithMemTracking, Encode, MaxEncodedLen};
use scale_info::TypeInfo;
use serde::{Deserialize, Serialize};
use sidechain_domain::{
AssetId, AssetName, MainchainAddress, McBlockHash, McBlockNumber, PolicyId, UtxoId,
AssetId, AssetName, MainchainAddress, McBlockHash, McBlockNumber, McTxHash, PolicyId,
};
use sp_inherents::*;

Expand Down Expand Up @@ -219,14 +219,14 @@ pub enum BridgeTransferV1<RecipientAddress> {
/// Amount of tokens tranfered
token_amount: u64,
},
/// Invalid transfer coming from a UTXO on Cardano that does not contain a datum that can be
/// Invalid transfer coming from a Transaction on Cardano that does not contain a metadata that can be
/// correctly interpreted. These transfers can either be ignored and considered lost or recovered
/// through some custom mechanism.
InvalidTransfer {
/// Amount of tokens tranfered
token_amount: u64,
/// ID of the UTXO containing an invalid transfer
utxo_id: sidechain_domain::UtxoId,
tx_hash: sidechain_domain::McTxHash,
},
}

Expand Down Expand Up @@ -286,8 +286,8 @@ pub enum TokenBridgeInherentDataProvider<RecipientAddress> {
Clone, Debug, Encode, Decode, DecodeWithMemTracking, TypeInfo, PartialEq, Eq, MaxEncodedLen,
)]
pub enum BridgeDataCheckpoint {
/// Last transfer utxo that has been processed
Utxo(UtxoId),
/// The last transaction that has been processed
Tx(McTxHash),
/// Cardano block up to which data has been processed
Block(McBlockNumber),
}
Expand Down
Loading
Loading