From 86f899e814ae6e21e1f11348d0be9dfd86b3046a Mon Sep 17 00:00:00 2001 From: Immortal Izzy Date: Thu, 10 Apr 2025 08:16:01 +0200 Subject: [PATCH 01/28] Support async key implementations --- async_substrate_interface/async_substrate.py | 9 ++-- async_substrate_interface/const.py | 4 ++ async_substrate_interface/protocols.py | 43 ++++++++++++++++++++ async_substrate_interface/sync_substrate.py | 4 +- async_substrate_interface/types.py | 2 +- pyproject.toml | 1 - 6 files changed, 54 insertions(+), 9 deletions(-) create mode 100644 async_substrate_interface/const.py create mode 100644 async_substrate_interface/protocols.py diff --git a/async_substrate_interface/async_substrate.py b/async_substrate_interface/async_substrate.py index 502b743..a171635 100644 --- a/async_substrate_interface/async_substrate.py +++ b/async_substrate_interface/async_substrate.py @@ -22,8 +22,6 @@ ) import asyncstdlib as a -from bittensor_wallet.keypair import Keypair -from bittensor_wallet.utils import SS58_FORMAT from bt_decode import MetadataV15, PortableRegistry, decode as decode_by_type_string from scalecodec.base import ScaleBytes, ScaleType, RuntimeConfigurationObject from scalecodec.types import ( @@ -35,11 +33,13 @@ from websockets.asyncio.client import connect from websockets.exceptions import ConnectionClosed +from async_substrate_interface.const import SS58_FORMAT from async_substrate_interface.errors import ( SubstrateRequestException, ExtrinsicNotFound, BlockNotFound, ) +from async_substrate_interface.protocols import Keypair from async_substrate_interface.types import ( ScaleObj, RequestManager, @@ -2406,6 +2406,8 @@ async def create_signed_extrinsic( # Sign payload signature = keypair.sign(signature_payload) + if inspect.isawaitable(signature): + signature = await signature # Create extrinsic extrinsic = self.runtime_config.create_scale_object( @@ -2692,9 +2694,6 @@ async def get_payment_info( if not isinstance(call, GenericCall): raise TypeError("'call' must be of type Call") - if not isinstance(keypair, Keypair): - raise TypeError("'keypair' must be of type Keypair") - # No valid signature is required for fee estimation signature = "0x" + "00" * 64 diff --git a/async_substrate_interface/const.py b/async_substrate_interface/const.py new file mode 100644 index 0000000..4e9a2eb --- /dev/null +++ b/async_substrate_interface/const.py @@ -0,0 +1,4 @@ + + +# Re-define SS58 format here to remove unnecessary dependencies. +SS58_FORMAT = 42 \ No newline at end of file diff --git a/async_substrate_interface/protocols.py b/async_substrate_interface/protocols.py new file mode 100644 index 0000000..fc6889a --- /dev/null +++ b/async_substrate_interface/protocols.py @@ -0,0 +1,43 @@ +from typing import Protocol + + +__all__: list[str] = [ + 'Keypair' +] + + +# For reference only +# class KeypairType: +# """ +# Type of cryptography, used in `Keypair` instance to encrypt and sign data +# +# * ED25519 = 0 +# * SR25519 = 1 +# * ECDSA = 2 +# +# """ +# ED25519 = 0 +# SR25519 = 1 +# ECDSA = 2 + + +class Keypair(Protocol): + + @property + def crypto_type(self) -> int: + ... + + @property + def public_key(self) -> bytes | None: + ... + + @property + def ss58_address(self) -> str: + ... + + @property + def ss58_format(self) -> int: + ... + + def sign(self, data: bytes | str) -> bytes: + ... \ No newline at end of file diff --git a/async_substrate_interface/sync_substrate.py b/async_substrate_interface/sync_substrate.py index c2c9b3c..da667ce 100644 --- a/async_substrate_interface/sync_substrate.py +++ b/async_substrate_interface/sync_substrate.py @@ -4,8 +4,6 @@ from hashlib import blake2b from typing import Optional, Union, Callable, Any -from bittensor_wallet.keypair import Keypair -from bittensor_wallet.utils import SS58_FORMAT from bt_decode import MetadataV15, PortableRegistry, decode as decode_by_type_string from scalecodec import ( GenericCall, @@ -17,11 +15,13 @@ from websockets.sync.client import connect from websockets.exceptions import ConnectionClosed +from async_substrate_interface.const import SS58_FORMAT from async_substrate_interface.errors import ( ExtrinsicNotFound, SubstrateRequestException, BlockNotFound, ) +from async_substrate_interface.protocols import Keypair from async_substrate_interface.types import ( SubstrateMixin, RuntimeCache, diff --git a/async_substrate_interface/types.py b/async_substrate_interface/types.py index 754b860..54786c3 100644 --- a/async_substrate_interface/types.py +++ b/async_substrate_interface/types.py @@ -7,12 +7,12 @@ from typing import Optional, Union, Any from bt_decode import PortableRegistry, encode as encode_by_type_string -from bittensor_wallet.utils import SS58_FORMAT from scalecodec import ss58_encode, ss58_decode, is_valid_ss58_address from scalecodec.base import RuntimeConfigurationObject, ScaleBytes from scalecodec.type_registry import load_type_registry_preset from scalecodec.types import GenericCall, ScaleType +from .const import SS58_FORMAT from .utils import json diff --git a/pyproject.toml b/pyproject.toml index f80eb46..8051467 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,7 +9,6 @@ keywords = ["substrate", "development", "bittensor"] dependencies = [ "wheel", "asyncstdlib~=3.13.0", - "bittensor-wallet>=2.1.3", "bt-decode==v0.5.0", "scalecodec~=1.2.11", "websockets>=14.1", From d6aa47d0aa86945b87673b1b1bacf700b51c0370 Mon Sep 17 00:00:00 2001 From: Benjamin Himes Date: Fri, 25 Apr 2025 15:54:36 +0200 Subject: [PATCH 02/28] [WIP] moving retry to async-substrate-interface --- async_substrate_interface/types.py | 202 +++++++++++++++++++++++++++++ 1 file changed, 202 insertions(+) diff --git a/async_substrate_interface/types.py b/async_substrate_interface/types.py index 754b860..c95db76 100644 --- a/async_substrate_interface/types.py +++ b/async_substrate_interface/types.py @@ -4,6 +4,8 @@ from collections.abc import Iterable from dataclasses import dataclass from datetime import datetime +from functools import partial +from itertools import cycle from typing import Optional, Union, Any from bt_decode import PortableRegistry, encode as encode_by_type_string @@ -13,6 +15,7 @@ from scalecodec.type_registry import load_type_registry_preset from scalecodec.types import GenericCall, ScaleType +from .errors import MaxRetriesExceeded from .utils import json @@ -358,6 +361,205 @@ def serialize(self): def decode(self): return self.value +SubstrateClass = Type[Union[SubstrateInterface, AsyncSubstrateInterface]] + + +class RetrySubstrate: + def __init__( + self, + substrate: SubstrateClass, + main_url: str, + ss58_format: int, + type_registry: dict, + use_remote_preset: bool, + chain_name: str, + _mock: bool, + fallback_chains: Optional[list[str]] = None, + retry_forever: bool = False, + ): + fallback_chains = fallback_chains or [] + self._substrate_class: SubstrateClass = substrate + self.ss58_format: int = ss58_format + self.type_registry: dict = type_registry + self.use_remote_preset: bool = use_remote_preset + self.chain_name: str = chain_name + self._mock = _mock + self.fallback_chains = ( + iter(fallback_chains) + if not retry_forever + else cycle(fallback_chains + [main_url]) + ) + initialized = False + for chain_url in [main_url] + fallback_chains: + try: + self._substrate = self._substrate_class( + url=chain_url, + ss58_format=ss58_format, + type_registry=type_registry, + use_remote_preset=use_remote_preset, + chain_name=chain_name, + _mock=_mock, + ) + initialized = True + break + except ConnectionError: + logging.warning(f"Unable to connect to {chain_url}") + if not initialized: + raise ConnectionError( + f"Unable to connect at any chains specified: {[main_url]+fallback_chains}" + ) + + # retries + + # TODO: properties that need retry logic + # properties + # version + # token_decimals + # token_symbol + # name + + retry = ( + self._async_retry + if self._substrate_class == AsyncSubstrateInterface + else self._retry + ) + + self._get_block_handler = partial(retry, "_get_block_handler") + self.apply_type_registry_presets = partial(retry, "apply_type_registry_presets") + self.close = partial(retry, "close") + self.compose_call = partial(retry, "compose_call") + self.connect = partial(retry, "connect") + self.create_scale_object = partial(retry, "create_scale_object") + self.create_signed_extrinsic = partial(retry, "create_signed_extrinsic") + self.create_storage_key = partial(retry, "create_storage_key") + self.decode_scale = partial(retry, "decode_scale") + self.encode_scale = partial(retry, "encode_scale") + self.extension_call = partial(retry, "extension_call") + self.filter_events = partial(retry, "filter_events") + self.filter_extrinsics = partial(retry, "filter_extrinsics") + self.generate_signature_payload = partial(retry, "generate_signature_payload") + self.get_account_next_index = partial(retry, "get_account_next_index") + self.get_account_nonce = partial(retry, "get_account_nonce") + self.get_block = partial(retry, "get_block") + self.get_block_hash = partial(retry, "get_block_hash") + self.get_block_header = partial(retry, "get_block_header") + self.get_block_metadata = partial(retry, "get_block_metadata") + self.get_block_number = partial(retry, "get_block_number") + self.get_block_runtime_info = partial(retry, "get_block_runtime_info") + self.get_block_runtime_version_for = partial( + retry, "get_block_runtime_version_for" + ) + self.get_block_timestamp = partial(retry, "get_block_timestamp") + self.get_chain_finalised_head = partial(retry, "get_chain_finalised_head") + self.get_chain_head = partial(retry, "get_chain_head") + self.get_constant = partial(retry, "get_constant") + self.get_events = partial(retry, "get_events") + self.get_extrinsics = partial(retry, "get_extrinsics") + self.get_metadata_call_function = partial(retry, "get_metadata_call_function") + self.get_metadata_constant = partial(retry, "get_metadata_constant") + self.get_metadata_error = partial(retry, "get_metadata_error") + self.get_metadata_errors = partial(retry, "get_metadata_errors") + self.get_metadata_module = partial(retry, "get_metadata_module") + self.get_metadata_modules = partial(retry, "get_metadata_modules") + self.get_metadata_runtime_call_function = partial( + retry, "get_metadata_runtime_call_function" + ) + self.get_metadata_runtime_call_functions = partial( + retry, "get_metadata_runtime_call_functions" + ) + self.get_metadata_storage_function = partial( + retry, "get_metadata_storage_function" + ) + self.get_metadata_storage_functions = partial( + retry, "get_metadata_storage_functions" + ) + self.get_parent_block_hash = partial(retry, "get_parent_block_hash") + self.get_payment_info = partial(retry, "get_payment_info") + self.get_storage_item = partial(retry, "get_storage_item") + self.get_type_definition = partial(retry, "get_type_definition") + self.get_type_registry = partial(retry, "get_type_registry") + self.init_runtime = partial(retry, "init_runtime") + self.initialize = partial(retry, "initialize") + self.is_valid_ss58_address = partial(retry, "is_valid_ss58_address") + self.load_runtime = partial(retry, "load_runtime") + self.make_payload = partial(retry, "make_payload") + self.query = partial(retry, "query") + self.query_map = partial(retry, "query_map") + self.query_multi = partial(retry, "query_multi") + self.query_multiple = partial(retry, "query_multiple") + self.reload_type_registry = partial(retry, "reload_type_registry") + self.retrieve_extrinsic_by_hash = partial(retry, "retrieve_extrinsic_by_hash") + self.retrieve_extrinsic_by_identifier = partial( + retry, "retrieve_extrinsic_by_identifier" + ) + self.rpc_request = partial(retry, "rpc_request") + self.runtime_call = partial(retry, "runtime_call") + self.search_block_number = partial(retry, "search_block_number") + self.serialize_constant = partial(retry, "serialize_constant") + self.serialize_module_call = partial(retry, "serialize_module_call") + self.serialize_module_error = partial(retry, "serialize_module_error") + self.serialize_module_event = partial(retry, "serialize_module_event") + self.serialize_storage_item = partial(retry, "serialize_storage_item") + self.ss58_decode = partial(retry, "ss58_decode") + self.ss58_encode = partial(retry, "ss58_encode") + self.submit_extrinsic = partial(retry, "submit_extrinsic") + self.subscribe_block_headers = partial(retry, "subscribe_block_headers") + self.supports_rpc_method = partial(retry, "supports_rpc_method") + self.ws = self._substrate.ws + + def _retry(self, method, *args, **kwargs): + try: + method_ = getattr(self._substrate, method) + return method_(*args, **kwargs) + except (MaxRetriesExceeded, ConnectionError, ConnectionRefusedError) as e: + try: + self._reinstantiate_substrate() + method_ = getattr(self._substrate, method) + return self._retry(method_(*args, **kwargs)) + except StopIteration: + logging.error( + f"Max retries exceeded with {self._substrate.url}. No more fallback chains." + ) + raise MaxRetriesExceeded + + async def _async_retry(self, method, *args, **kwargs): + try: + method_ = getattr(self._substrate, method) + if asyncio.iscoroutinefunction(method_): + return await method_(*args, **kwargs) + else: + return method_(*args, **kwargs) + except (MaxRetriesExceeded, ConnectionError, ConnectionRefusedError) as e: + try: + self._reinstantiate_substrate(e) + method_ = getattr(self._substrate, method) + if asyncio.iscoroutinefunction(method_): + return await method_(*args, **kwargs) + else: + return method_(*args, **kwargs) + except StopIteration: + logging.error( + f"Max retries exceeded with {self._substrate.url}. No more fallback chains." + ) + raise MaxRetriesExceeded + + def _reinstantiate_substrate(self, e: Optional[Exception] = None) -> None: + next_network = next(self.fallback_chains) + if e.__class__ == MaxRetriesExceeded: + logging.error( + f"Max retries exceeded with {self._substrate.url}. Retrying with {next_network}." + ) + else: + print(f"Connection error. Trying again with {next_network}") + self._substrate = self._substrate_class( + url=next_network, + ss58_format=self.ss58_format, + type_registry=self.type_registry, + use_remote_preset=self.use_remote_preset, + chain_name=self.chain_name, + _mock=self._mock, + ) + class SubstrateMixin(ABC): type_registry_preset = None From 743f708575fde3f6742fa49a63c18878449bc67e Mon Sep 17 00:00:00 2001 From: Benjamin Himes Date: Fri, 25 Apr 2025 17:36:37 +0200 Subject: [PATCH 03/28] moved around some stuff --- async_substrate_interface/substrate_addons.py | 214 ++++++++++++++++++ async_substrate_interface/types.py | 199 ---------------- 2 files changed, 214 insertions(+), 199 deletions(-) create mode 100644 async_substrate_interface/substrate_addons.py diff --git a/async_substrate_interface/substrate_addons.py b/async_substrate_interface/substrate_addons.py new file mode 100644 index 0000000..dedb968 --- /dev/null +++ b/async_substrate_interface/substrate_addons.py @@ -0,0 +1,214 @@ +import asyncio +import logging +from functools import partial +from itertools import cycle +from typing import Optional, Type, Union + +from async_substrate_interface.async_substrate import AsyncSubstrateInterface +from async_substrate_interface.errors import MaxRetriesExceeded +from async_substrate_interface.sync_substrate import SubstrateInterface + +SubstrateClass = Type[Union[SubstrateInterface, AsyncSubstrateInterface]] + + +class RetrySubstrate: + def __init__( + self, + substrate: SubstrateClass, + main_url: str, + use_remote_preset: bool = False, + fallback_chains: Optional[list[str]] = None, + retry_forever: bool = False, + ss58_format: Optional[int] = None, + type_registry: Optional[dict] = None, + type_registry_preset: Optional[str] = None, + chain_name: str = "", + max_retries: int = 5, + retry_timeout: float = 60.0, + _mock: bool = False, + ): + fallback_chains = fallback_chains or [] + self._substrate_class: SubstrateClass = substrate + self.ss58_format: int = ss58_format + self.type_registry: dict = type_registry + self.use_remote_preset: bool = use_remote_preset + self.chain_name: Optional[str] = chain_name + self.max_retries: int = max_retries + self.retry_timeout: float = retry_timeout + self._mock = _mock + self.type_registry_preset: Optional[str] = type_registry_preset + self.fallback_chains = ( + iter(fallback_chains) + if not retry_forever + else cycle(fallback_chains + [main_url]) + ) + initialized = False + for chain_url in [main_url] + fallback_chains: + try: + self._substrate = self._substrate_class( + url=chain_url, + ss58_format=ss58_format, + type_registry=type_registry, + use_remote_preset=use_remote_preset, + chain_name=chain_name, + _mock=_mock, + ) + initialized = True + break + except ConnectionError: + logging.warning(f"Unable to connect to {chain_url}") + if not initialized: + raise ConnectionError( + f"Unable to connect at any chains specified: {[main_url]+fallback_chains}" + ) + + # retries + + # TODO: properties that need retry logic + # properties + # version + # token_decimals + # token_symbol + # name + + retry = ( + self._async_retry + if self._substrate_class == AsyncSubstrateInterface + else self._retry + ) + + self._get_block_handler = partial(retry, "_get_block_handler") + self.apply_type_registry_presets = partial(retry, "apply_type_registry_presets") + self.close = partial(retry, "close") + self.compose_call = partial(retry, "compose_call") + self.connect = partial(retry, "connect") + self.create_scale_object = partial(retry, "create_scale_object") + self.create_signed_extrinsic = partial(retry, "create_signed_extrinsic") + self.create_storage_key = partial(retry, "create_storage_key") + self.decode_scale = partial(retry, "decode_scale") + self.encode_scale = partial(retry, "encode_scale") + self.extension_call = partial(retry, "extension_call") + self.filter_events = partial(retry, "filter_events") + self.filter_extrinsics = partial(retry, "filter_extrinsics") + self.generate_signature_payload = partial(retry, "generate_signature_payload") + self.get_account_next_index = partial(retry, "get_account_next_index") + self.get_account_nonce = partial(retry, "get_account_nonce") + self.get_block = partial(retry, "get_block") + self.get_block_hash = partial(retry, "get_block_hash") + self.get_block_header = partial(retry, "get_block_header") + self.get_block_metadata = partial(retry, "get_block_metadata") + self.get_block_number = partial(retry, "get_block_number") + self.get_block_runtime_info = partial(retry, "get_block_runtime_info") + self.get_block_runtime_version_for = partial( + retry, "get_block_runtime_version_for" + ) + self.get_block_timestamp = partial(retry, "get_block_timestamp") + self.get_chain_finalised_head = partial(retry, "get_chain_finalised_head") + self.get_chain_head = partial(retry, "get_chain_head") + self.get_constant = partial(retry, "get_constant") + self.get_events = partial(retry, "get_events") + self.get_extrinsics = partial(retry, "get_extrinsics") + self.get_metadata_call_function = partial(retry, "get_metadata_call_function") + self.get_metadata_constant = partial(retry, "get_metadata_constant") + self.get_metadata_error = partial(retry, "get_metadata_error") + self.get_metadata_errors = partial(retry, "get_metadata_errors") + self.get_metadata_module = partial(retry, "get_metadata_module") + self.get_metadata_modules = partial(retry, "get_metadata_modules") + self.get_metadata_runtime_call_function = partial( + retry, "get_metadata_runtime_call_function" + ) + self.get_metadata_runtime_call_functions = partial( + retry, "get_metadata_runtime_call_functions" + ) + self.get_metadata_storage_function = partial( + retry, "get_metadata_storage_function" + ) + self.get_metadata_storage_functions = partial( + retry, "get_metadata_storage_functions" + ) + self.get_parent_block_hash = partial(retry, "get_parent_block_hash") + self.get_payment_info = partial(retry, "get_payment_info") + self.get_storage_item = partial(retry, "get_storage_item") + self.get_type_definition = partial(retry, "get_type_definition") + self.get_type_registry = partial(retry, "get_type_registry") + self.init_runtime = partial(retry, "init_runtime") + self.initialize = partial(retry, "initialize") + self.is_valid_ss58_address = partial(retry, "is_valid_ss58_address") + self.load_runtime = partial(retry, "load_runtime") + self.make_payload = partial(retry, "make_payload") + self.query = partial(retry, "query") + self.query_map = partial(retry, "query_map") + self.query_multi = partial(retry, "query_multi") + self.query_multiple = partial(retry, "query_multiple") + self.reload_type_registry = partial(retry, "reload_type_registry") + self.retrieve_extrinsic_by_hash = partial(retry, "retrieve_extrinsic_by_hash") + self.retrieve_extrinsic_by_identifier = partial( + retry, "retrieve_extrinsic_by_identifier" + ) + self.rpc_request = partial(retry, "rpc_request") + self.runtime_call = partial(retry, "runtime_call") + self.search_block_number = partial(retry, "search_block_number") + self.serialize_constant = partial(retry, "serialize_constant") + self.serialize_module_call = partial(retry, "serialize_module_call") + self.serialize_module_error = partial(retry, "serialize_module_error") + self.serialize_module_event = partial(retry, "serialize_module_event") + self.serialize_storage_item = partial(retry, "serialize_storage_item") + self.ss58_decode = partial(retry, "ss58_decode") + self.ss58_encode = partial(retry, "ss58_encode") + self.submit_extrinsic = partial(retry, "submit_extrinsic") + self.subscribe_block_headers = partial(retry, "subscribe_block_headers") + self.supports_rpc_method = partial(retry, "supports_rpc_method") + self.ws = self._substrate.ws + + def _retry(self, method, *args, **kwargs): + try: + method_ = getattr(self._substrate, method) + return method_(*args, **kwargs) + except (MaxRetriesExceeded, ConnectionError, ConnectionRefusedError) as e: + try: + self._reinstantiate_substrate(e) + method_ = getattr(self._substrate, method) + return self._retry(method_(*args, **kwargs)) + except StopIteration: + logging.error( + f"Max retries exceeded with {self._substrate.url}. No more fallback chains." + ) + raise MaxRetriesExceeded + + async def _async_retry(self, method, *args, **kwargs): + try: + method_ = getattr(self._substrate, method) + if asyncio.iscoroutinefunction(method_): + return await method_(*args, **kwargs) + else: + return method_(*args, **kwargs) + except (MaxRetriesExceeded, ConnectionError, ConnectionRefusedError) as e: + try: + self._reinstantiate_substrate(e) + method_ = getattr(self._substrate, method) + if asyncio.iscoroutinefunction(method_): + return await method_(*args, **kwargs) + else: + return method_(*args, **kwargs) + except StopIteration: + logging.error( + f"Max retries exceeded with {self._substrate.url}. No more fallback chains." + ) + raise MaxRetriesExceeded + + def _reinstantiate_substrate(self, e: Optional[Exception] = None) -> None: + next_network = next(self.fallback_chains) + if e.__class__ == MaxRetriesExceeded: + logging.error( + f"Max retries exceeded with {self._substrate.url}. Retrying with {next_network}." + ) + else: + print(f"Connection error. Trying again with {next_network}") + self._substrate = self._substrate_class( + url=next_network, + ss58_format=self.ss58_format, + type_registry=self.type_registry, + use_remote_preset=self.use_remote_preset, + chain_name=self.chain_name, + _mock=self._mock, + ) diff --git a/async_substrate_interface/types.py b/async_substrate_interface/types.py index c95db76..e1a3266 100644 --- a/async_substrate_interface/types.py +++ b/async_substrate_interface/types.py @@ -361,205 +361,6 @@ def serialize(self): def decode(self): return self.value -SubstrateClass = Type[Union[SubstrateInterface, AsyncSubstrateInterface]] - - -class RetrySubstrate: - def __init__( - self, - substrate: SubstrateClass, - main_url: str, - ss58_format: int, - type_registry: dict, - use_remote_preset: bool, - chain_name: str, - _mock: bool, - fallback_chains: Optional[list[str]] = None, - retry_forever: bool = False, - ): - fallback_chains = fallback_chains or [] - self._substrate_class: SubstrateClass = substrate - self.ss58_format: int = ss58_format - self.type_registry: dict = type_registry - self.use_remote_preset: bool = use_remote_preset - self.chain_name: str = chain_name - self._mock = _mock - self.fallback_chains = ( - iter(fallback_chains) - if not retry_forever - else cycle(fallback_chains + [main_url]) - ) - initialized = False - for chain_url in [main_url] + fallback_chains: - try: - self._substrate = self._substrate_class( - url=chain_url, - ss58_format=ss58_format, - type_registry=type_registry, - use_remote_preset=use_remote_preset, - chain_name=chain_name, - _mock=_mock, - ) - initialized = True - break - except ConnectionError: - logging.warning(f"Unable to connect to {chain_url}") - if not initialized: - raise ConnectionError( - f"Unable to connect at any chains specified: {[main_url]+fallback_chains}" - ) - - # retries - - # TODO: properties that need retry logic - # properties - # version - # token_decimals - # token_symbol - # name - - retry = ( - self._async_retry - if self._substrate_class == AsyncSubstrateInterface - else self._retry - ) - - self._get_block_handler = partial(retry, "_get_block_handler") - self.apply_type_registry_presets = partial(retry, "apply_type_registry_presets") - self.close = partial(retry, "close") - self.compose_call = partial(retry, "compose_call") - self.connect = partial(retry, "connect") - self.create_scale_object = partial(retry, "create_scale_object") - self.create_signed_extrinsic = partial(retry, "create_signed_extrinsic") - self.create_storage_key = partial(retry, "create_storage_key") - self.decode_scale = partial(retry, "decode_scale") - self.encode_scale = partial(retry, "encode_scale") - self.extension_call = partial(retry, "extension_call") - self.filter_events = partial(retry, "filter_events") - self.filter_extrinsics = partial(retry, "filter_extrinsics") - self.generate_signature_payload = partial(retry, "generate_signature_payload") - self.get_account_next_index = partial(retry, "get_account_next_index") - self.get_account_nonce = partial(retry, "get_account_nonce") - self.get_block = partial(retry, "get_block") - self.get_block_hash = partial(retry, "get_block_hash") - self.get_block_header = partial(retry, "get_block_header") - self.get_block_metadata = partial(retry, "get_block_metadata") - self.get_block_number = partial(retry, "get_block_number") - self.get_block_runtime_info = partial(retry, "get_block_runtime_info") - self.get_block_runtime_version_for = partial( - retry, "get_block_runtime_version_for" - ) - self.get_block_timestamp = partial(retry, "get_block_timestamp") - self.get_chain_finalised_head = partial(retry, "get_chain_finalised_head") - self.get_chain_head = partial(retry, "get_chain_head") - self.get_constant = partial(retry, "get_constant") - self.get_events = partial(retry, "get_events") - self.get_extrinsics = partial(retry, "get_extrinsics") - self.get_metadata_call_function = partial(retry, "get_metadata_call_function") - self.get_metadata_constant = partial(retry, "get_metadata_constant") - self.get_metadata_error = partial(retry, "get_metadata_error") - self.get_metadata_errors = partial(retry, "get_metadata_errors") - self.get_metadata_module = partial(retry, "get_metadata_module") - self.get_metadata_modules = partial(retry, "get_metadata_modules") - self.get_metadata_runtime_call_function = partial( - retry, "get_metadata_runtime_call_function" - ) - self.get_metadata_runtime_call_functions = partial( - retry, "get_metadata_runtime_call_functions" - ) - self.get_metadata_storage_function = partial( - retry, "get_metadata_storage_function" - ) - self.get_metadata_storage_functions = partial( - retry, "get_metadata_storage_functions" - ) - self.get_parent_block_hash = partial(retry, "get_parent_block_hash") - self.get_payment_info = partial(retry, "get_payment_info") - self.get_storage_item = partial(retry, "get_storage_item") - self.get_type_definition = partial(retry, "get_type_definition") - self.get_type_registry = partial(retry, "get_type_registry") - self.init_runtime = partial(retry, "init_runtime") - self.initialize = partial(retry, "initialize") - self.is_valid_ss58_address = partial(retry, "is_valid_ss58_address") - self.load_runtime = partial(retry, "load_runtime") - self.make_payload = partial(retry, "make_payload") - self.query = partial(retry, "query") - self.query_map = partial(retry, "query_map") - self.query_multi = partial(retry, "query_multi") - self.query_multiple = partial(retry, "query_multiple") - self.reload_type_registry = partial(retry, "reload_type_registry") - self.retrieve_extrinsic_by_hash = partial(retry, "retrieve_extrinsic_by_hash") - self.retrieve_extrinsic_by_identifier = partial( - retry, "retrieve_extrinsic_by_identifier" - ) - self.rpc_request = partial(retry, "rpc_request") - self.runtime_call = partial(retry, "runtime_call") - self.search_block_number = partial(retry, "search_block_number") - self.serialize_constant = partial(retry, "serialize_constant") - self.serialize_module_call = partial(retry, "serialize_module_call") - self.serialize_module_error = partial(retry, "serialize_module_error") - self.serialize_module_event = partial(retry, "serialize_module_event") - self.serialize_storage_item = partial(retry, "serialize_storage_item") - self.ss58_decode = partial(retry, "ss58_decode") - self.ss58_encode = partial(retry, "ss58_encode") - self.submit_extrinsic = partial(retry, "submit_extrinsic") - self.subscribe_block_headers = partial(retry, "subscribe_block_headers") - self.supports_rpc_method = partial(retry, "supports_rpc_method") - self.ws = self._substrate.ws - - def _retry(self, method, *args, **kwargs): - try: - method_ = getattr(self._substrate, method) - return method_(*args, **kwargs) - except (MaxRetriesExceeded, ConnectionError, ConnectionRefusedError) as e: - try: - self._reinstantiate_substrate() - method_ = getattr(self._substrate, method) - return self._retry(method_(*args, **kwargs)) - except StopIteration: - logging.error( - f"Max retries exceeded with {self._substrate.url}. No more fallback chains." - ) - raise MaxRetriesExceeded - - async def _async_retry(self, method, *args, **kwargs): - try: - method_ = getattr(self._substrate, method) - if asyncio.iscoroutinefunction(method_): - return await method_(*args, **kwargs) - else: - return method_(*args, **kwargs) - except (MaxRetriesExceeded, ConnectionError, ConnectionRefusedError) as e: - try: - self._reinstantiate_substrate(e) - method_ = getattr(self._substrate, method) - if asyncio.iscoroutinefunction(method_): - return await method_(*args, **kwargs) - else: - return method_(*args, **kwargs) - except StopIteration: - logging.error( - f"Max retries exceeded with {self._substrate.url}. No more fallback chains." - ) - raise MaxRetriesExceeded - - def _reinstantiate_substrate(self, e: Optional[Exception] = None) -> None: - next_network = next(self.fallback_chains) - if e.__class__ == MaxRetriesExceeded: - logging.error( - f"Max retries exceeded with {self._substrate.url}. Retrying with {next_network}." - ) - else: - print(f"Connection error. Trying again with {next_network}") - self._substrate = self._substrate_class( - url=next_network, - ss58_format=self.ss58_format, - type_registry=self.type_registry, - use_remote_preset=self.use_remote_preset, - chain_name=self.chain_name, - _mock=self._mock, - ) - class SubstrateMixin(ABC): type_registry_preset = None From 5e3d6f576188ea174bb8a1326f8c5db7b1cf5ca0 Mon Sep 17 00:00:00 2001 From: Benjamin Himes Date: Fri, 25 Apr 2025 17:36:49 +0200 Subject: [PATCH 04/28] moved around some stuff --- async_substrate_interface/types.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/async_substrate_interface/types.py b/async_substrate_interface/types.py index e1a3266..754b860 100644 --- a/async_substrate_interface/types.py +++ b/async_substrate_interface/types.py @@ -4,8 +4,6 @@ from collections.abc import Iterable from dataclasses import dataclass from datetime import datetime -from functools import partial -from itertools import cycle from typing import Optional, Union, Any from bt_decode import PortableRegistry, encode as encode_by_type_string @@ -15,7 +13,6 @@ from scalecodec.type_registry import load_type_registry_preset from scalecodec.types import GenericCall, ScaleType -from .errors import MaxRetriesExceeded from .utils import json From 36e6e68cc5a1ff5d759749093c77a49c28f76deb Mon Sep 17 00:00:00 2001 From: Immortal Izzy Date: Sun, 27 Apr 2025 01:54:49 +0200 Subject: [PATCH 05/28] Update async_substrate_interface/protocols.py Co-authored-by: BD Himes <37844818+thewhaleking@users.noreply.github.com> --- async_substrate_interface/protocols.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/async_substrate_interface/protocols.py b/async_substrate_interface/protocols.py index fc6889a..914d7a7 100644 --- a/async_substrate_interface/protocols.py +++ b/async_substrate_interface/protocols.py @@ -28,7 +28,7 @@ def crypto_type(self) -> int: ... @property - def public_key(self) -> bytes | None: + def public_key(self) -> Optional[bytes]: ... @property From d18120710d67c485b64ddc49fc8ae64da8655301 Mon Sep 17 00:00:00 2001 From: Immortal Izzy Date: Sun, 27 Apr 2025 01:54:58 +0200 Subject: [PATCH 06/28] Update async_substrate_interface/protocols.py Co-authored-by: BD Himes <37844818+thewhaleking@users.noreply.github.com> --- async_substrate_interface/protocols.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/async_substrate_interface/protocols.py b/async_substrate_interface/protocols.py index 914d7a7..015f51a 100644 --- a/async_substrate_interface/protocols.py +++ b/async_substrate_interface/protocols.py @@ -39,5 +39,5 @@ def ss58_address(self) -> str: def ss58_format(self) -> int: ... - def sign(self, data: bytes | str) -> bytes: + def sign(self, data: Union[bytes, str]) -> bytes: ... \ No newline at end of file From 8fdbc194af3b588e6c15b653ba18f1bce9172868 Mon Sep 17 00:00:00 2001 From: Immortal Izzy Date: Sun, 27 Apr 2025 01:55:03 +0200 Subject: [PATCH 07/28] Update async_substrate_interface/protocols.py Co-authored-by: BD Himes <37844818+thewhaleking@users.noreply.github.com> --- async_substrate_interface/protocols.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/async_substrate_interface/protocols.py b/async_substrate_interface/protocols.py index 015f51a..4b266e3 100644 --- a/async_substrate_interface/protocols.py +++ b/async_substrate_interface/protocols.py @@ -1,4 +1,4 @@ -from typing import Protocol +from typing import Protocol, Union, Optional __all__: list[str] = [ From f3b721c438620d3ab6979b1b95b9663c5bbb0430 Mon Sep 17 00:00:00 2001 From: Immortal Izzy Date: Sun, 27 Apr 2025 01:59:04 +0200 Subject: [PATCH 08/28] Annotate return value to allow async function --- async_substrate_interface/protocols.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/async_substrate_interface/protocols.py b/async_substrate_interface/protocols.py index 4b266e3..bbaf372 100644 --- a/async_substrate_interface/protocols.py +++ b/async_substrate_interface/protocols.py @@ -1,4 +1,4 @@ -from typing import Protocol, Union, Optional +from typing import Awaitable, Protocol, Union, Optional __all__: list[str] = [ @@ -39,5 +39,5 @@ def ss58_address(self) -> str: def ss58_format(self) -> int: ... - def sign(self, data: Union[bytes, str]) -> bytes: + def sign(self, data: Union[bytes, str]) -> Union[bytes, Awaitable[bytes]]: ... \ No newline at end of file From be2e1ed86cd1b4245aecc37e68fc5e3f7e33af6b Mon Sep 17 00:00:00 2001 From: Benjamin Himes Date: Tue, 29 Apr 2025 20:47:36 +0200 Subject: [PATCH 09/28] Ruff --- async_substrate_interface/substrate_addons.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/async_substrate_interface/substrate_addons.py b/async_substrate_interface/substrate_addons.py index dedb968..362c8b7 100644 --- a/async_substrate_interface/substrate_addons.py +++ b/async_substrate_interface/substrate_addons.py @@ -59,7 +59,7 @@ def __init__( logging.warning(f"Unable to connect to {chain_url}") if not initialized: raise ConnectionError( - f"Unable to connect at any chains specified: {[main_url]+fallback_chains}" + f"Unable to connect at any chains specified: {[main_url] + fallback_chains}" ) # retries From 045b22b69064abf7a1890c75072c60f721d34c4d Mon Sep 17 00:00:00 2001 From: Benjamin Himes Date: Tue, 29 Apr 2025 20:48:27 +0200 Subject: [PATCH 10/28] subscript_storage added to asyncsubstrate --- async_substrate_interface/async_substrate.py | 99 ++++++++++++++++++++ 1 file changed, 99 insertions(+) diff --git a/async_substrate_interface/async_substrate.py b/async_substrate_interface/async_substrate.py index 94abf59..0f12e83 100644 --- a/async_substrate_interface/async_substrate.py +++ b/async_substrate_interface/async_substrate.py @@ -1039,6 +1039,105 @@ async def create_storage_key( metadata=self.runtime.metadata, ) + async def subscribe_storage( + self, + storage_keys: list[StorageKey], + subscription_handler: Callable[ + [StorageKey, Any, str], + Awaitable[Any] + ], + ): + """ + + Subscribe to provided storage_keys and keep tracking until `subscription_handler` returns a value + + Example of a StorageKey: + ``` + StorageKey.create_from_storage_function( + "System", "Account", ["5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY"] + ) + ``` + + Example of a subscription handler: + ``` + async def subscription_handler(storage_key, obj, subscription_id): + if obj is not None: + # the subscription will run until your subscription_handler returns something other than `None` + return obj + ``` + + Args: + storage_keys: StorageKey list of storage keys to subscribe to + subscription_handler: coroutine function to handle value changes of subscription + + """ + await self.init_runtime() + + storage_key_map = {s.to_hex(): s for s in storage_keys} + + async def result_handler(message: dict, subscription_id: str) -> tuple[bool, Optional[ScaleType]]: + result_found = False + subscription_result = None + if "params" in message: + # Process changes + for change_storage_key, change_data in message["params"]["result"][ + "changes" + ]: + # Check for target storage key + storage_key = storage_key_map[change_storage_key] + + if change_data is not None: + change_scale_type = storage_key.value_scale_type + result_found = True + elif ( + storage_key.metadata_storage_function.value["modifier"] + == "Default" + ): + # Fallback to default value of storage function if no result + change_scale_type = storage_key.value_scale_type + change_data = ( + storage_key.metadata_storage_function.value_object[ + "default" + ].value_object + ) + else: + # No result is interpreted as an Option<...> result + change_scale_type = f"Option<{storage_key.value_scale_type}>" + change_data = ( + storage_key.metadata_storage_function.value_object[ + "default" + ].value_object + ) + + # Decode SCALE result data + updated_obj = await self.decode_scale( + type_string=change_scale_type, + scale_bytes=hex_to_bytes(change_data) + ) + + subscription_result = await subscription_handler( + storage_key, updated_obj, subscription_id + ) + + if subscription_result is not None: + # Handler returned end result: unsubscribe from further updates + self._forgettable_task = asyncio.create_task( + self.rpc_request( + "state_unsubscribeStorage", [subscription_id] + ) + ) + + return result_found, subscription_result + + if not callable(subscription_handler): + raise ValueError("Provided `subscription_handler` is not callable") + + return await self.rpc_request( + "state_subscribeStorage", + [[s.to_hex() for s in storage_keys]], + result_handler=result_handler, + ) + async def get_metadata_storage_functions(self, block_hash=None) -> list: """ Retrieves a list of all storage functions in metadata active at given block_hash (or chaintip if block_hash is From 58e850fff93094391e6c2a6c2f62022ef96f1b03 Mon Sep 17 00:00:00 2001 From: Benjamin Himes Date: Tue, 29 Apr 2025 20:52:37 +0200 Subject: [PATCH 11/28] subscript_storage added to sync --- async_substrate_interface/async_substrate.py | 11 ++- async_substrate_interface/sync_substrate.py | 94 ++++++++++++++++++++ 2 files changed, 99 insertions(+), 6 deletions(-) diff --git a/async_substrate_interface/async_substrate.py b/async_substrate_interface/async_substrate.py index 0f12e83..13538af 100644 --- a/async_substrate_interface/async_substrate.py +++ b/async_substrate_interface/async_substrate.py @@ -1042,10 +1042,7 @@ async def create_storage_key( async def subscribe_storage( self, storage_keys: list[StorageKey], - subscription_handler: Callable[ - [StorageKey, Any, str], - Awaitable[Any] - ], + subscription_handler: Callable[[StorageKey, Any, str], Awaitable[Any]], ): """ @@ -1075,7 +1072,9 @@ async def subscription_handler(storage_key, obj, subscription_id): storage_key_map = {s.to_hex(): s for s in storage_keys} - async def result_handler(message: dict, subscription_id: str) -> tuple[bool, Optional[ScaleType]]: + async def result_handler( + message: dict, subscription_id: str + ) -> tuple[bool, Optional[Any]]: result_found = False subscription_result = None if "params" in message: @@ -1112,7 +1111,7 @@ async def result_handler(message: dict, subscription_id: str) -> tuple[bool, Opt # Decode SCALE result data updated_obj = await self.decode_scale( type_string=change_scale_type, - scale_bytes=hex_to_bytes(change_data) + scale_bytes=hex_to_bytes(change_data), ) subscription_result = await subscription_handler( diff --git a/async_substrate_interface/sync_substrate.py b/async_substrate_interface/sync_substrate.py index a3a9f4d..62ab7d2 100644 --- a/async_substrate_interface/sync_substrate.py +++ b/async_substrate_interface/sync_substrate.py @@ -796,6 +796,100 @@ def create_storage_key( metadata=self.runtime.metadata, ) + def subscribe_storage( + self, + storage_keys: list[StorageKey], + subscription_handler: Callable[[StorageKey, Any, str], Any], + ): + """ + + Subscribe to provided storage_keys and keep tracking until `subscription_handler` returns a value + + Example of a StorageKey: + ``` + StorageKey.create_from_storage_function( + "System", "Account", ["5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY"] + ) + ``` + + Example of a subscription handler: + ``` + def subscription_handler(storage_key, obj, subscription_id): + if obj is not None: + # the subscription will run until your subscription_handler returns something other than `None` + return obj + ``` + + Args: + storage_keys: StorageKey list of storage keys to subscribe to + subscription_handler: coroutine function to handle value changes of subscription + + """ + self.init_runtime() + + storage_key_map = {s.to_hex(): s for s in storage_keys} + + def result_handler( + message: dict, subscription_id: str + ) -> tuple[bool, Optional[Any]]: + result_found = False + subscription_result = None + if "params" in message: + # Process changes + for change_storage_key, change_data in message["params"]["result"][ + "changes" + ]: + # Check for target storage key + storage_key = storage_key_map[change_storage_key] + + if change_data is not None: + change_scale_type = storage_key.value_scale_type + result_found = True + elif ( + storage_key.metadata_storage_function.value["modifier"] + == "Default" + ): + # Fallback to default value of storage function if no result + change_scale_type = storage_key.value_scale_type + change_data = ( + storage_key.metadata_storage_function.value_object[ + "default" + ].value_object + ) + else: + # No result is interpreted as an Option<...> result + change_scale_type = f"Option<{storage_key.value_scale_type}>" + change_data = ( + storage_key.metadata_storage_function.value_object[ + "default" + ].value_object + ) + + # Decode SCALE result data + updated_obj = self.decode_scale( + type_string=change_scale_type, + scale_bytes=hex_to_bytes(change_data), + ) + + subscription_result = subscription_handler( + storage_key, updated_obj, subscription_id + ) + + if subscription_result is not None: + # Handler returned end result: unsubscribe from further updates + self.rpc_request("state_unsubscribeStorage", [subscription_id]) + + return result_found, subscription_result + + if not callable(subscription_handler): + raise ValueError("Provided `subscription_handler` is not callable") + + return self.rpc_request( + "state_subscribeStorage", + [[s.to_hex() for s in storage_keys]], + result_handler=result_handler, + ) + def get_metadata_storage_functions(self, block_hash=None) -> list: """ Retrieves a list of all storage functions in metadata active at given block_hash (or chaintip if block_hash is From 19223acffb46cecb83d3d9958e28473f0823ac7c Mon Sep 17 00:00:00 2001 From: Benjamin Himes Date: Tue, 29 Apr 2025 21:08:45 +0200 Subject: [PATCH 12/28] Metadata methods --- async_substrate_interface/async_substrate.py | 122 ++++++++++++++++++ async_substrate_interface/sync_substrate.py | 124 ++++++++++++++++++- 2 files changed, 245 insertions(+), 1 deletion(-) diff --git a/async_substrate_interface/async_substrate.py b/async_substrate_interface/async_substrate.py index 13538af..7c727c3 100644 --- a/async_substrate_interface/async_substrate.py +++ b/async_substrate_interface/async_substrate.py @@ -1291,6 +1291,41 @@ async def get_metadata_runtime_call_function( return runtime_call_def_obj + async def get_metadata_runtime_call_function( + self, api: str, method: str + ) -> GenericRuntimeCallDefinition: + """ + Get details of a runtime API call + + Args: + api: Name of the runtime API e.g. 'TransactionPaymentApi' + method: Name of the method e.g. 'query_fee_details' + + Returns: + GenericRuntimeCallDefinition + """ + await self.init_runtime(block_hash=block_hash) + + try: + runtime_call_def = self.runtime_config.type_registry["runtime_api"][api][ + "methods" + ][method] + runtime_call_def["api"] = api + runtime_call_def["method"] = method + runtime_api_types = self.runtime_config.type_registry["runtime_api"][ + api + ].get("types", {}) + except KeyError: + raise ValueError(f"Runtime API Call '{api}.{method}' not found in registry") + + # Add runtime API types to registry + self.runtime_config.update_type_registry_types(runtime_api_types) + + runtime_call_def_obj = await self.create_scale_object("RuntimeCallDefinition") + runtime_call_def_obj.encode(runtime_call_def) + + return runtime_call_def_obj + async def _get_block_handler( self, block_hash: str, @@ -1767,6 +1802,21 @@ def convert_event_data(data): events.append(convert_event_data(item)) return events + async def get_metadata(self, block_hash=None): + """ + Returns `MetadataVersioned` object for given block_hash or chaintip if block_hash is omitted + + + Args: + block_hash + + Returns: + MetadataVersioned + """ + runtime = await self.init_runtime(block_hash=block_hash) + + return runtime.metadata + @a.lru_cache(maxsize=512) async def get_parent_block_hash(self, block_hash): return await self._get_parent_block_hash(block_hash) @@ -2725,6 +2775,29 @@ async def get_account_next_index(self, account_address: str) -> int: self._nonces[account_address] += 1 return self._nonces[account_address] + async def get_metadata_constants(self, block_hash=None) -> list[dict]: + """ + Retrieves a list of all constants in metadata active at given block_hash (or chaintip if block_hash is omitted) + + Args: + block_hash: hash of the block + + Returns: + list of constants + """ + + runtime = await self.init_runtime(block_hash=block_hash) + + constant_list = [] + + for module_idx, module in enumerate(self.metadata.pallets): + for constant in module.constants or []: + constant_list.append( + self.serialize_constant(constant, module, runtime.runtime_version) + ) + + return constant_list + async def get_metadata_constant(self, module_name, constant_name, block_hash=None): """ Retrieves the details of a constant for given module name, call function name and block_hash @@ -3234,6 +3307,55 @@ async def get_metadata_call_function( return call return None + async def get_metadata_events(self, block_hash=None) -> list[dict]: + """ + Retrieves a list of all events in metadata active for given block_hash (or chaintip if block_hash is omitted) + + Args: + block_hash + + Returns: + list of module events + """ + + runtime = await self.init_runtime(block_hash=block_hash) + + event_list = [] + + for event_index, (module, event) in self.metadata.event_index.items(): + event_list.append( + self.serialize_module_event( + module, event, runtime.runtime_version, event_index + ) + ) + + return event_list + + async def get_metadata_event( + self, module_name, event_name, block_hash=None + ) -> Optional[Any]: + """ + Retrieves the details of an event for given module name, call function name and block_hash + (or chaintip if block_hash is omitted) + + Args: + module_name: name of the module to call + event_name: name of the event + block_hash: hash of the block + + Returns: + Metadata event + + """ + + runtime = await self.init_runtime(block_hash=block_hash) + + for pallet in runtime.metadata.pallets: + if pallet.name == module_name and pallet.events: + for event in pallet.events: + if event.name == event_name: + return event + async def get_block_number(self, block_hash: Optional[str] = None) -> int: """Async version of `substrateinterface.base.get_block_number` method.""" response = await self.rpc_request("chain_getHeader", [block_hash]) diff --git a/async_substrate_interface/sync_substrate.py b/async_substrate_interface/sync_substrate.py index 62ab7d2..b1b6269 100644 --- a/async_substrate_interface/sync_substrate.py +++ b/async_substrate_interface/sync_substrate.py @@ -822,7 +822,7 @@ def subscription_handler(storage_key, obj, subscription_id): Args: storage_keys: StorageKey list of storage keys to subscribe to - subscription_handler: coroutine function to handle value changes of subscription + subscription_handler: function to handle value changes of subscription """ self.init_runtime() @@ -1040,6 +1040,41 @@ def get_metadata_runtime_call_function( return runtime_call_def_obj + def get_metadata_runtime_call_function( + self, api: str, method: str + ) -> GenericRuntimeCallDefinition: + """ + Get details of a runtime API call + + Args: + api: Name of the runtime API e.g. 'TransactionPaymentApi' + method: Name of the method e.g. 'query_fee_details' + + Returns: + GenericRuntimeCallDefinition + """ + self.init_runtime() + + try: + runtime_call_def = self.runtime_config.type_registry["runtime_api"][api][ + "methods" + ][method] + runtime_call_def["api"] = api + runtime_call_def["method"] = method + runtime_api_types = self.runtime_config.type_registry["runtime_api"][ + api + ].get("types", {}) + except KeyError: + raise ValueError(f"Runtime API Call '{api}.{method}' not found in registry") + + # Add runtime API types to registry + self.runtime_config.update_type_registry_types(runtime_api_types) + + runtime_call_def_obj = self.create_scale_object("RuntimeCallDefinition") + runtime_call_def_obj.encode(runtime_call_def) + + return runtime_call_def_obj + def _get_block_handler( self, block_hash: str, @@ -1510,6 +1545,21 @@ def convert_event_data(data): events.append(convert_event_data(item)) return events + def get_metadata(self, block_hash=None): + """ + Returns `MetadataVersioned` object for given block_hash or chaintip if block_hash is omitted + + + Args: + block_hash + + Returns: + MetadataVersioned + """ + runtime = self.init_runtime(block_hash=block_hash) + + return runtime.metadata + @functools.lru_cache(maxsize=512) def get_parent_block_hash(self, block_hash): block_header = self.rpc_request("chain_getHeader", [block_hash]) @@ -2428,6 +2478,29 @@ def get_account_next_index(self, account_address: str) -> int: nonce_obj = self.rpc_request("account_nextIndex", [account_address]) return nonce_obj["result"] + def get_metadata_constants(self, block_hash=None) -> list[dict]: + """ + Retrieves a list of all constants in metadata active at given block_hash (or chaintip if block_hash is omitted) + + Args: + block_hash: hash of the block + + Returns: + list of constants + """ + + runtime = self.init_runtime(block_hash=block_hash) + + constant_list = [] + + for module_idx, module in enumerate(self.metadata.pallets): + for constant in module.constants or []: + constant_list.append( + self.serialize_constant(constant, module, runtime.runtime_version) + ) + + return constant_list + def get_metadata_constant(self, module_name, constant_name, block_hash=None): """ Retrieves the details of a constant for given module name, call function name and block_hash @@ -2926,6 +2999,55 @@ def get_metadata_call_function( return call return None + def get_metadata_events(self, block_hash=None) -> list[dict]: + """ + Retrieves a list of all events in metadata active for given block_hash (or chaintip if block_hash is omitted) + + Args: + block_hash + + Returns: + list of module events + """ + + runtime = self.init_runtime(block_hash=block_hash) + + event_list = [] + + for event_index, (module, event) in self.metadata.event_index.items(): + event_list.append( + self.serialize_module_event( + module, event, runtime.runtime_version, event_index + ) + ) + + return event_list + + def get_metadata_event( + self, module_name, event_name, block_hash=None + ) -> Optional[Any]: + """ + Retrieves the details of an event for given module name, call function name and block_hash + (or chaintip if block_hash is omitted) + + Args: + module_name: name of the module to call + event_name: name of the event + block_hash: hash of the block + + Returns: + Metadata event + + """ + + runtime = self.init_runtime(block_hash=block_hash) + + for pallet in runtime.metadata.pallets: + if pallet.name == module_name and pallet.events: + for event in pallet.events: + if event.name == event_name: + return event + def get_block_number(self, block_hash: Optional[str] = None) -> int: """Async version of `substrateinterface.base.get_block_number` method.""" response = self.rpc_request("chain_getHeader", [block_hash]) From d2c1ed95032a6b90404b3072720fb7fd5948fb54 Mon Sep 17 00:00:00 2001 From: Benjamin Himes Date: Tue, 29 Apr 2025 21:55:08 +0200 Subject: [PATCH 13/28] Other methods added. --- async_substrate_interface/async_substrate.py | 186 ++++++++++++++++++- async_substrate_interface/sync_substrate.py | 182 +++++++++++++++++- async_substrate_interface/types.py | 26 ++- 3 files changed, 389 insertions(+), 5 deletions(-) diff --git a/async_substrate_interface/async_substrate.py b/async_substrate_interface/async_substrate.py index 7c727c3..d38cc53 100644 --- a/async_substrate_interface/async_substrate.py +++ b/async_substrate_interface/async_substrate.py @@ -30,6 +30,7 @@ GenericExtrinsic, GenericRuntimeCallDefinition, ss58_encode, + MultiAccountId, ) from websockets.asyncio.client import connect from websockets.exceptions import ConnectionClosed @@ -1137,6 +1138,32 @@ async def result_handler( result_handler=result_handler, ) + async def retrieve_pending_extrinsics(self) -> list: + """ + Retrieves and decodes pending extrinsics from the node's transaction pool + + Returns: + list of extrinsics + """ + + runtime = await self.init_runtime() + + result_data = await self.rpc_request("author_pendingExtrinsics", []) + + extrinsics = [] + + for extrinsic_data in result_data["result"]: + extrinsic = runtime.runtime_config.create_scale_object( + "Extrinsic", metadata=runtime.metadata + ) + extrinsic.decode( + ScaleBytes(extrinsic_data), + check_remaining=self.config.get("strict_scale_decode"), + ) + extrinsics.append(extrinsic) + + return extrinsics + async def get_metadata_storage_functions(self, block_hash=None) -> list: """ Retrieves a list of all storage functions in metadata active at given block_hash (or chaintip if block_hash is @@ -1802,7 +1829,7 @@ def convert_event_data(data): events.append(convert_event_data(item)) return events - async def get_metadata(self, block_hash=None): + async def get_metadata(self, block_hash=None) -> MetadataV15: """ Returns `MetadataVersioned` object for given block_hash or chaintip if block_hash is omitted @@ -1815,7 +1842,7 @@ async def get_metadata(self, block_hash=None): """ runtime = await self.init_runtime(block_hash=block_hash) - return runtime.metadata + return runtime.metadata_v15 @a.lru_cache(maxsize=512) async def get_parent_block_hash(self, block_hash): @@ -1833,10 +1860,43 @@ async def _get_parent_block_hash(self, block_hash): return block_hash return parent_block_hash + async def get_storage_by_key(self, block_hash: str, storage_key: str) -> Any: + """ + A pass-though to existing JSONRPC method `state_getStorage`/`state_getStorageAt` + + Args: + block_hash: hash of the block + storage_key: storage key to query + + Returns: + result of the query + + """ + + if await self.supports_rpc_method("state_getStorageAt"): + response = await self.rpc_request( + "state_getStorageAt", [storage_key, block_hash] + ) + else: + response = await self.rpc_request( + "state_getStorage", [storage_key, block_hash] + ) + + if "result" in response: + return response.get("result") + elif "error" in response: + raise SubstrateRequestException(response["error"]["message"]) + else: + raise SubstrateRequestException( + "Unknown error occurred during retrieval of events" + ) + @a.lru_cache(maxsize=16) async def get_block_runtime_info(self, block_hash: str) -> dict: return await self._get_block_runtime_info(block_hash) + get_block_runtime_version = get_block_runtime_info + async def _get_block_runtime_info(self, block_hash: str) -> dict: """ Retrieve the runtime info of given block_hash @@ -2591,6 +2651,34 @@ async def create_signed_extrinsic( return extrinsic + async def create_unsigned_extrinsic(self, call: GenericCall) -> GenericExtrinsic: + """ + Create unsigned extrinsic for given `Call` + + Args: + call: GenericCall the call the extrinsic should contain + + Returns: + GenericExtrinsic + """ + + runtime = await self.init_runtime() + + # Create extrinsic + extrinsic = self.runtime_config.create_scale_object( + type_string="Extrinsic", metadata=runtime.metadata + ) + + extrinsic.encode( + { + "call_function": call.value["call_function"], + "call_module": call.value["call_module"], + "call_args": call.value["call_args"], + } + ) + + return extrinsic + async def get_chain_finalised_head(self): """ A pass-though to existing JSONRPC method `chain_getFinalizedHead` @@ -3165,6 +3253,100 @@ async def query_map( ignore_decoding_errors=ignore_decoding_errors, ) + async def create_multisig_extrinsic( + self, + call: GenericCall, + keypair: Keypair, + multisig_account: MultiAccountId, + max_weight: Optional[Union[dict, int]] = None, + era: dict = None, + nonce: int = None, + tip: int = 0, + tip_asset_id: int = None, + signature: Union[bytes, str] = None, + ) -> GenericExtrinsic: + """ + Create a Multisig extrinsic that will be signed by one of the signatories. Checks on-chain if the threshold + of the multisig account is reached and try to execute the call accordingly. + + Args: + call: GenericCall to create extrinsic for + keypair: Keypair of the signatory to approve given call + multisig_account: MultiAccountId to use of origin of the extrinsic (see `generate_multisig_account()`) + max_weight: Maximum allowed weight to execute the call ( Uses `get_payment_info()` by default) + era: Specify mortality in blocks in follow format: {'period': [amount_blocks]} If omitted the extrinsic is + immortal + nonce: nonce to include in extrinsics, if omitted the current nonce is retrieved on-chain + tip: The tip for the block author to gain priority during network congestion + tip_asset_id: Optional asset ID with which to pay the tip + signature: Optionally provide signature if externally signed + + Returns: + GenericExtrinsic + """ + if max_weight is None: + payment_info = await self.get_payment_info(call, keypair) + max_weight = payment_info["weight"] + + # Check if call has existing approvals + multisig_details_ = await self.query( + "Multisig", "Multisigs", [multisig_account.value, call.call_hash] + ) + multisig_details = getattr(multisig_details_, "value", multisig_details_) + if multisig_details: + maybe_timepoint = multisig_details["when"] + else: + maybe_timepoint = None + + # Compose 'as_multi' when final, 'approve_as_multi' otherwise + if ( + multisig_details.value + and len(multisig_details.value["approvals"]) + 1 + == multisig_account.threshold + ): + multi_sig_call = await self.compose_call( + "Multisig", + "as_multi", + { + "other_signatories": [ + s + for s in multisig_account.signatories + if s != f"0x{keypair.public_key.hex()}" + ], + "threshold": multisig_account.threshold, + "maybe_timepoint": maybe_timepoint, + "call": call, + "store_call": False, + "max_weight": max_weight, + }, + ) + else: + multi_sig_call = await self.compose_call( + "Multisig", + "approve_as_multi", + { + "other_signatories": [ + s + for s in multisig_account.signatories + if s != f"0x{keypair.public_key.hex()}" + ], + "threshold": multisig_account.threshold, + "maybe_timepoint": maybe_timepoint, + "call_hash": call.call_hash, + "max_weight": max_weight, + }, + ) + + return await self.create_signed_extrinsic( + multi_sig_call, + keypair, + era=era, + nonce=nonce, + tip=tip, + tip_asset_id=tip_asset_id, + signature=signature, + ) + async def submit_extrinsic( self, extrinsic: GenericExtrinsic, diff --git a/async_substrate_interface/sync_substrate.py b/async_substrate_interface/sync_substrate.py index b1b6269..45eaddb 100644 --- a/async_substrate_interface/sync_substrate.py +++ b/async_substrate_interface/sync_substrate.py @@ -11,6 +11,7 @@ GenericExtrinsic, GenericRuntimeCallDefinition, ss58_encode, + MultiAccountId, ) from scalecodec.base import RuntimeConfigurationObject, ScaleBytes, ScaleType from websockets.sync.client import connect @@ -890,6 +891,32 @@ def result_handler( result_handler=result_handler, ) + def retrieve_pending_extrinsics(self) -> list: + """ + Retrieves and decodes pending extrinsics from the node's transaction pool + + Returns: + list of extrinsics + """ + + runtime = self.init_runtime() + + result_data = self.rpc_request("author_pendingExtrinsics", []) + + extrinsics = [] + + for extrinsic_data in result_data["result"]: + extrinsic = runtime.runtime_config.create_scale_object( + "Extrinsic", metadata=runtime.metadata + ) + extrinsic.decode( + ScaleBytes(extrinsic_data), + check_remaining=self.config.get("strict_scale_decode"), + ) + extrinsics.append(extrinsic) + + return extrinsics + def get_metadata_storage_functions(self, block_hash=None) -> list: """ Retrieves a list of all storage functions in metadata active at given block_hash (or chaintip if block_hash is @@ -1545,7 +1572,7 @@ def convert_event_data(data): events.append(convert_event_data(item)) return events - def get_metadata(self, block_hash=None): + def get_metadata(self, block_hash=None) -> MetadataV15: """ Returns `MetadataVersioned` object for given block_hash or chaintip if block_hash is omitted @@ -1558,7 +1585,7 @@ def get_metadata(self, block_hash=None): """ runtime = self.init_runtime(block_hash=block_hash) - return runtime.metadata + return runtime.metadata_v15 @functools.lru_cache(maxsize=512) def get_parent_block_hash(self, block_hash): @@ -1573,6 +1600,33 @@ def get_parent_block_hash(self, block_hash): return block_hash return parent_block_hash + def get_storage_by_key(self, block_hash: str, storage_key: str) -> Any: + """ + A pass-though to existing JSONRPC method `state_getStorage`/`state_getStorageAt` + + Args: + block_hash: hash of the block + storage_key: storage key to query + + Returns: + result of the query + + """ + + if self.supports_rpc_method("state_getStorageAt"): + response = self.rpc_request("state_getStorageAt", [storage_key, block_hash]) + else: + response = self.rpc_request("state_getStorage", [storage_key, block_hash]) + + if "result" in response: + return response.get("result") + elif "error" in response: + raise SubstrateRequestException(response["error"]["message"]) + else: + raise SubstrateRequestException( + "Unknown error occurred during retrieval of events" + ) + @functools.lru_cache(maxsize=16) def get_block_runtime_info(self, block_hash: str) -> dict: """ @@ -1581,6 +1635,8 @@ def get_block_runtime_info(self, block_hash: str) -> dict: response = self.rpc_request("state_getRuntimeVersion", [block_hash]) return response.get("result") + get_block_runtime_version = get_block_runtime_info + @functools.lru_cache(maxsize=512) def get_block_runtime_version_for(self, block_hash: str): """ @@ -2305,6 +2361,34 @@ def create_signed_extrinsic( return extrinsic + def create_unsigned_extrinsic(self, call: GenericCall) -> GenericExtrinsic: + """ + Create unsigned extrinsic for given `Call` + + Args: + call: GenericCall the call the extrinsic should contain + + Returns: + GenericExtrinsic + """ + + runtime = self.init_runtime() + + # Create extrinsic + extrinsic = self.runtime_config.create_scale_object( + type_string="Extrinsic", metadata=runtime.metadata + ) + + extrinsic.encode( + { + "call_function": call.value["call_function"], + "call_module": call.value["call_module"], + "call_args": call.value["call_args"], + } + ) + + return extrinsic + def get_chain_finalised_head(self): """ A pass-though to existing JSONRPC method `chain_getFinalizedHead` @@ -2865,6 +2949,100 @@ def query_map( ignore_decoding_errors=ignore_decoding_errors, ) + def create_multisig_extrinsic( + self, + call: GenericCall, + keypair: Keypair, + multisig_account: MultiAccountId, + max_weight: Optional[Union[dict, int]] = None, + era: dict = None, + nonce: int = None, + tip: int = 0, + tip_asset_id: int = None, + signature: Union[bytes, str] = None, + ) -> GenericExtrinsic: + """ + Create a Multisig extrinsic that will be signed by one of the signatories. Checks on-chain if the threshold + of the multisig account is reached and try to execute the call accordingly. + + Args: + call: GenericCall to create extrinsic for + keypair: Keypair of the signatory to approve given call + multisig_account: MultiAccountId to use of origin of the extrinsic (see `generate_multisig_account()`) + max_weight: Maximum allowed weight to execute the call ( Uses `get_payment_info()` by default) + era: Specify mortality in blocks in follow format: {'period': [amount_blocks]} If omitted the extrinsic is + immortal + nonce: nonce to include in extrinsics, if omitted the current nonce is retrieved on-chain + tip: The tip for the block author to gain priority during network congestion + tip_asset_id: Optional asset ID with which to pay the tip + signature: Optionally provide signature if externally signed + + Returns: + GenericExtrinsic + """ + if max_weight is None: + payment_info = self.get_payment_info(call, keypair) + max_weight = payment_info["weight"] + + # Check if call has existing approvals + multisig_details = self.query( + "Multisig", "Multisigs", [multisig_account.value, call.call_hash] + ) + + if multisig_details.value: + maybe_timepoint = multisig_details.value["when"] + else: + maybe_timepoint = None + + # Compose 'as_multi' when final, 'approve_as_multi' otherwise + if ( + multisig_details.value + and len(multisig_details.value["approvals"]) + 1 + == multisig_account.threshold + ): + multi_sig_call = self.compose_call( + "Multisig", + "as_multi", + { + "other_signatories": [ + s + for s in multisig_account.signatories + if s != f"0x{keypair.public_key.hex()}" + ], + "threshold": multisig_account.threshold, + "maybe_timepoint": maybe_timepoint, + "call": call, + "store_call": False, + "max_weight": max_weight, + }, + ) + else: + multi_sig_call = self.compose_call( + "Multisig", + "approve_as_multi", + { + "other_signatories": [ + s + for s in multisig_account.signatories + if s != f"0x{keypair.public_key.hex()}" + ], + "threshold": multisig_account.threshold, + "maybe_timepoint": maybe_timepoint, + "call_hash": call.call_hash, + "max_weight": max_weight, + }, + ) + + return self.create_signed_extrinsic( + multi_sig_call, + keypair, + era=era, + nonce=nonce, + tip=tip, + tip_asset_id=tip_asset_id, + signature=signature, + ) + def submit_extrinsic( self, extrinsic: GenericExtrinsic, diff --git a/async_substrate_interface/types.py b/async_substrate_interface/types.py index 754b860..261f73a 100644 --- a/async_substrate_interface/types.py +++ b/async_substrate_interface/types.py @@ -11,7 +11,7 @@ from scalecodec import ss58_encode, ss58_decode, is_valid_ss58_address from scalecodec.base import RuntimeConfigurationObject, ScaleBytes from scalecodec.type_registry import load_type_registry_preset -from scalecodec.types import GenericCall, ScaleType +from scalecodec.types import GenericCall, ScaleType, MultiAccountId from .utils import json @@ -919,3 +919,27 @@ def _encode_account_id(self, account) -> bytes: if isinstance(account, bytes): return account # Already encoded return bytes.fromhex(ss58_decode(account, SS58_FORMAT)) # SS58 string + + def generate_multisig_account( + self, signatories: list, threshold: int + ) -> MultiAccountId: + """ + Generate deterministic Multisig account with supplied signatories and threshold + + Args: + signatories: List of signatories + threshold: Amount of approvals needed to execute + + Returns: + MultiAccountId + """ + + multi_sig_account = MultiAccountId.create_from_account_list( + signatories, threshold + ) + + multi_sig_account.ss58_address = ss58_encode( + multi_sig_account.value.replace("0x", ""), self.ss58_format + ) + + return multi_sig_account From c3f60065d65851127d1c6123556b447c38f3ccd8 Mon Sep 17 00:00:00 2001 From: Benjamin Himes Date: Fri, 2 May 2025 17:50:48 +0200 Subject: [PATCH 14/28] Max connections semaphore object added. --- async_substrate_interface/async_substrate.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/async_substrate_interface/async_substrate.py b/async_substrate_interface/async_substrate.py index 94abf59..cc78a0e 100644 --- a/async_substrate_interface/async_substrate.py +++ b/async_substrate_interface/async_substrate.py @@ -516,7 +516,7 @@ def __init__( # TODO reconnection logic self.ws_url = ws_url self.ws: Optional["ClientConnection"] = None - self.max_subscriptions = max_subscriptions + self.max_subscriptions = asyncio.Semaphore(max_subscriptions) self.max_connections = max_connections self.shutdown_timer = shutdown_timer self._received = {} @@ -631,6 +631,7 @@ async def send(self, payload: dict) -> int: # async with self._lock: original_id = get_next_id() # self._open_subscriptions += 1 + await self.max_subscriptions.acquire() try: await self.ws.send(json.dumps({**payload, **{"id": original_id}})) return original_id @@ -649,7 +650,9 @@ async def retrieve(self, item_id: int) -> Optional[dict]: retrieved item """ try: - return self._received.pop(item_id) + item = self._received.pop(item_id) + self.max_subscriptions.release() + return item except KeyError: await asyncio.sleep(0.001) return None @@ -876,7 +879,7 @@ async def decode_scale( scale_bytes: bytes, _attempt=1, _retries=3, - return_scale_obj=False, + return_scale_obj: bool = False, ) -> Union[ScaleObj, Any]: """ Helper function to decode arbitrary SCALE-bytes (e.g. 0x02000000) according to given RUST type_string @@ -2528,13 +2531,13 @@ async def runtime_call( Returns: ScaleType from the runtime call """ - await self.init_runtime(block_hash=block_hash) + runtime = await self.init_runtime(block_hash=block_hash) if params is None: params = {} try: - metadata_v15_value = self.runtime.metadata_v15.value() + metadata_v15_value = runtime.metadata_v15.value() apis = {entry["name"]: entry for entry in metadata_v15_value["apis"]} api_entry = apis[api] From 8a04ec0293618f89b583b979a4fcd1481a4f8e21 Mon Sep 17 00:00:00 2001 From: Benjamin Himes Date: Fri, 2 May 2025 17:57:40 +0200 Subject: [PATCH 15/28] Exposes `_get_block_handler` publicly in both async and sync substrate interfaces. --- async_substrate_interface/async_substrate.py | 2 ++ async_substrate_interface/sync_substrate.py | 2 ++ 2 files changed, 4 insertions(+) diff --git a/async_substrate_interface/async_substrate.py b/async_substrate_interface/async_substrate.py index 94abf59..7795630 100644 --- a/async_substrate_interface/async_substrate.py +++ b/async_substrate_interface/async_substrate.py @@ -1394,6 +1394,8 @@ async def result_handler( response["result"]["block"], block_data_hash=block_hash ) + get_block_handler = _get_block_handler + async def get_block( self, block_hash: Optional[str] = None, diff --git a/async_substrate_interface/sync_substrate.py b/async_substrate_interface/sync_substrate.py index a3a9f4d..f2265fe 100644 --- a/async_substrate_interface/sync_substrate.py +++ b/async_substrate_interface/sync_substrate.py @@ -1141,6 +1141,8 @@ def result_handler(message: dict, subscription_id: str) -> tuple[Any, bool]: response["result"]["block"], block_data_hash=block_hash ) + get_block_handler = _get_block_handler + def get_block( self, block_hash: Optional[str] = None, From d80c1e62f552d1a35d9d56ac2b897fc63b38a7b9 Mon Sep 17 00:00:00 2001 From: Benjamin Himes Date: Mon, 5 May 2025 23:14:07 +0200 Subject: [PATCH 16/28] New direction --- async_substrate_interface/substrate_addons.py | 315 ++++++++++-------- 1 file changed, 175 insertions(+), 140 deletions(-) diff --git a/async_substrate_interface/substrate_addons.py b/async_substrate_interface/substrate_addons.py index 362c8b7..b6d4daa 100644 --- a/async_substrate_interface/substrate_addons.py +++ b/async_substrate_interface/substrate_addons.py @@ -2,20 +2,90 @@ import logging from functools import partial from itertools import cycle -from typing import Optional, Type, Union +from typing import Optional from async_substrate_interface.async_substrate import AsyncSubstrateInterface from async_substrate_interface.errors import MaxRetriesExceeded from async_substrate_interface.sync_substrate import SubstrateInterface -SubstrateClass = Type[Union[SubstrateInterface, AsyncSubstrateInterface]] +RETRY_METHODS = [ + "_get_block_handler", + "apply_type_registry_presets", + "close", + "compose_call", + "connect", + "create_scale_object", + "create_signed_extrinsic", + "create_storage_key", + "decode_scale", + "encode_scale", + "extension_call", + "filter_events", + "filter_extrinsics", + "generate_signature_payload", + "get_account_next_index", + "get_account_nonce", + "get_block", + "get_block_hash", + "get_block_header", + "get_block_metadata", + "get_block_number", + "get_block_runtime_info", + "get_block_runtime_version_for", + "get_block_timestamp", + "get_chain_finalised_head", + "get_chain_head", + "get_constant", + "get_events", + "get_extrinsics", + "get_metadata_call_function", + "get_metadata_constant", + "get_metadata_error", + "get_metadata_errors", + "get_metadata_module", + "get_metadata_modules", + "get_metadata_runtime_call_function", + "get_metadata_runtime_call_functions", + "get_metadata_storage_function", + "get_metadata_storage_functions", + "get_parent_block_hash", + "get_payment_info", + "get_storage_item", + "get_type_definition", + "get_type_registry", + "init_runtime", + "initialize", + "is_valid_ss58_address", + "load_runtime", + "make_payload", + "query", + "query_map", + "query_multi", + "query_multiple", + "reload_type_registry", + "retrieve_extrinsic_by_hash", + "retrieve_extrinsic_by_identifier", + "rpc_request", + "runtime_call", + "search_block_number", + "serialize_constant", + "serialize_module_call", + "serialize_module_error", + "serialize_module_event", + "serialize_storage_item", + "ss58_decode", + "ss58_encode", + "submit_extrinsic", + "subscribe_block_headers", + "supports_rpc_method", +] -class RetrySubstrate: + +class RetrySyncSubstrate(SubstrateInterface): def __init__( self, - substrate: SubstrateClass, - main_url: str, + url: str, use_remote_preset: bool = False, fallback_chains: Optional[list[str]] = None, retry_forever: bool = False, @@ -28,30 +98,29 @@ def __init__( _mock: bool = False, ): fallback_chains = fallback_chains or [] - self._substrate_class: SubstrateClass = substrate - self.ss58_format: int = ss58_format - self.type_registry: dict = type_registry - self.use_remote_preset: bool = use_remote_preset - self.chain_name: Optional[str] = chain_name - self.max_retries: int = max_retries - self.retry_timeout: float = retry_timeout - self._mock = _mock - self.type_registry_preset: Optional[str] = type_registry_preset self.fallback_chains = ( iter(fallback_chains) if not retry_forever - else cycle(fallback_chains + [main_url]) + else cycle(fallback_chains + [url]) ) + self.use_remote_preset = use_remote_preset + self.chain_name = chain_name + self._mock = _mock + self.retry_timeout = retry_timeout + self.max_retries = max_retries initialized = False - for chain_url in [main_url] + fallback_chains: + for chain_url in [url] + fallback_chains: try: - self._substrate = self._substrate_class( + super().__init__( url=chain_url, ss58_format=ss58_format, type_registry=type_registry, use_remote_preset=use_remote_preset, + type_registry_preset=type_registry_preset, chain_name=chain_name, _mock=_mock, + retry_timeout=retry_timeout, + max_retries=max_retries, ) initialized = True break @@ -59,125 +128,108 @@ def __init__( logging.warning(f"Unable to connect to {chain_url}") if not initialized: raise ConnectionError( - f"Unable to connect at any chains specified: {[main_url] + fallback_chains}" + f"Unable to connect at any chains specified: {[url] + fallback_chains}" ) - - # retries - - # TODO: properties that need retry logic - # properties - # version - # token_decimals - # token_symbol - # name - - retry = ( - self._async_retry - if self._substrate_class == AsyncSubstrateInterface - else self._retry - ) - - self._get_block_handler = partial(retry, "_get_block_handler") - self.apply_type_registry_presets = partial(retry, "apply_type_registry_presets") - self.close = partial(retry, "close") - self.compose_call = partial(retry, "compose_call") - self.connect = partial(retry, "connect") - self.create_scale_object = partial(retry, "create_scale_object") - self.create_signed_extrinsic = partial(retry, "create_signed_extrinsic") - self.create_storage_key = partial(retry, "create_storage_key") - self.decode_scale = partial(retry, "decode_scale") - self.encode_scale = partial(retry, "encode_scale") - self.extension_call = partial(retry, "extension_call") - self.filter_events = partial(retry, "filter_events") - self.filter_extrinsics = partial(retry, "filter_extrinsics") - self.generate_signature_payload = partial(retry, "generate_signature_payload") - self.get_account_next_index = partial(retry, "get_account_next_index") - self.get_account_nonce = partial(retry, "get_account_nonce") - self.get_block = partial(retry, "get_block") - self.get_block_hash = partial(retry, "get_block_hash") - self.get_block_header = partial(retry, "get_block_header") - self.get_block_metadata = partial(retry, "get_block_metadata") - self.get_block_number = partial(retry, "get_block_number") - self.get_block_runtime_info = partial(retry, "get_block_runtime_info") - self.get_block_runtime_version_for = partial( - retry, "get_block_runtime_version_for" - ) - self.get_block_timestamp = partial(retry, "get_block_timestamp") - self.get_chain_finalised_head = partial(retry, "get_chain_finalised_head") - self.get_chain_head = partial(retry, "get_chain_head") - self.get_constant = partial(retry, "get_constant") - self.get_events = partial(retry, "get_events") - self.get_extrinsics = partial(retry, "get_extrinsics") - self.get_metadata_call_function = partial(retry, "get_metadata_call_function") - self.get_metadata_constant = partial(retry, "get_metadata_constant") - self.get_metadata_error = partial(retry, "get_metadata_error") - self.get_metadata_errors = partial(retry, "get_metadata_errors") - self.get_metadata_module = partial(retry, "get_metadata_module") - self.get_metadata_modules = partial(retry, "get_metadata_modules") - self.get_metadata_runtime_call_function = partial( - retry, "get_metadata_runtime_call_function" - ) - self.get_metadata_runtime_call_functions = partial( - retry, "get_metadata_runtime_call_functions" - ) - self.get_metadata_storage_function = partial( - retry, "get_metadata_storage_function" - ) - self.get_metadata_storage_functions = partial( - retry, "get_metadata_storage_functions" - ) - self.get_parent_block_hash = partial(retry, "get_parent_block_hash") - self.get_payment_info = partial(retry, "get_payment_info") - self.get_storage_item = partial(retry, "get_storage_item") - self.get_type_definition = partial(retry, "get_type_definition") - self.get_type_registry = partial(retry, "get_type_registry") - self.init_runtime = partial(retry, "init_runtime") - self.initialize = partial(retry, "initialize") - self.is_valid_ss58_address = partial(retry, "is_valid_ss58_address") - self.load_runtime = partial(retry, "load_runtime") - self.make_payload = partial(retry, "make_payload") - self.query = partial(retry, "query") - self.query_map = partial(retry, "query_map") - self.query_multi = partial(retry, "query_multi") - self.query_multiple = partial(retry, "query_multiple") - self.reload_type_registry = partial(retry, "reload_type_registry") - self.retrieve_extrinsic_by_hash = partial(retry, "retrieve_extrinsic_by_hash") - self.retrieve_extrinsic_by_identifier = partial( - retry, "retrieve_extrinsic_by_identifier" - ) - self.rpc_request = partial(retry, "rpc_request") - self.runtime_call = partial(retry, "runtime_call") - self.search_block_number = partial(retry, "search_block_number") - self.serialize_constant = partial(retry, "serialize_constant") - self.serialize_module_call = partial(retry, "serialize_module_call") - self.serialize_module_error = partial(retry, "serialize_module_error") - self.serialize_module_event = partial(retry, "serialize_module_event") - self.serialize_storage_item = partial(retry, "serialize_storage_item") - self.ss58_decode = partial(retry, "ss58_decode") - self.ss58_encode = partial(retry, "ss58_encode") - self.submit_extrinsic = partial(retry, "submit_extrinsic") - self.subscribe_block_headers = partial(retry, "subscribe_block_headers") - self.supports_rpc_method = partial(retry, "supports_rpc_method") - self.ws = self._substrate.ws + for method in RETRY_METHODS: + setattr(self, method, partial(self._retry, method)) def _retry(self, method, *args, **kwargs): try: - method_ = getattr(self._substrate, method) + method_ = getattr(self, method) return method_(*args, **kwargs) except (MaxRetriesExceeded, ConnectionError, ConnectionRefusedError) as e: try: self._reinstantiate_substrate(e) - method_ = getattr(self._substrate, method) + method_ = getattr(self, method) return self._retry(method_(*args, **kwargs)) except StopIteration: logging.error( - f"Max retries exceeded with {self._substrate.url}. No more fallback chains." + f"Max retries exceeded with {self.url}. No more fallback chains." ) raise MaxRetriesExceeded - async def _async_retry(self, method, *args, **kwargs): + def _reinstantiate_substrate(self, e: Optional[Exception] = None) -> None: + next_network = next(self.fallback_chains) + if e.__class__ == MaxRetriesExceeded: + logging.error( + f"Max retries exceeded with {self.url}. Retrying with {next_network}." + ) + else: + print(f"Connection error. Trying again with {next_network}") + super().__init__( + url=next_network, + ss58_format=self.ss58_format, + type_registry=self.type_registry, + use_remote_preset=self.use_remote_preset, + chain_name=self.chain_name, + _mock=self._mock, + retry_timeout=self.retry_timeout, + max_retries=self.max_retries, + ) + + +class RetryAsyncSubstrate(AsyncSubstrateInterface): + def __init__( + self, + url: str, + use_remote_preset: bool = False, + fallback_chains: Optional[list[str]] = None, + retry_forever: bool = False, + ss58_format: Optional[int] = None, + type_registry: Optional[dict] = None, + type_registry_preset: Optional[str] = None, + chain_name: str = "", + max_retries: int = 5, + retry_timeout: float = 60.0, + _mock: bool = False, + ): + fallback_chains = fallback_chains or [] + self.fallback_chains = ( + iter(fallback_chains) + if not retry_forever + else cycle(fallback_chains + [url]) + ) + self.use_remote_preset = use_remote_preset + self.chain_name = chain_name + self._mock = _mock + self.retry_timeout = retry_timeout + self.max_retries = max_retries + super().__init__( + url=url, + ss58_format=ss58_format, + type_registry=type_registry, + use_remote_preset=use_remote_preset, + type_registry_preset=type_registry_preset, + chain_name=chain_name, + _mock=_mock, + retry_timeout=retry_timeout, + max_retries=max_retries, + ) + for method in RETRY_METHODS: + setattr(self, method, partial(self._retry, method)) + + def _reinstantiate_substrate(self, e: Optional[Exception] = None) -> None: + next_network = next(self.fallback_chains) + if e.__class__ == MaxRetriesExceeded: + logging.error( + f"Max retries exceeded with {self.url}. Retrying with {next_network}." + ) + else: + print(f"Connection error. Trying again with {next_network}") + super().__init__( + url=next_network, + ss58_format=self.ss58_format, + type_registry=self.type_registry, + use_remote_preset=self.use_remote_preset, + chain_name=self.chain_name, + _mock=self._mock, + retry_timeout=self.retry_timeout, + max_retries=self.max_retries, + ) + + async def _retry(self, method, *args, **kwargs): try: - method_ = getattr(self._substrate, method) + method_ = getattr(self, method) if asyncio.iscoroutinefunction(method_): return await method_(*args, **kwargs) else: @@ -185,30 +237,13 @@ async def _async_retry(self, method, *args, **kwargs): except (MaxRetriesExceeded, ConnectionError, ConnectionRefusedError) as e: try: self._reinstantiate_substrate(e) - method_ = getattr(self._substrate, method) + method_ = getattr(self, method) if asyncio.iscoroutinefunction(method_): return await method_(*args, **kwargs) else: return method_(*args, **kwargs) except StopIteration: logging.error( - f"Max retries exceeded with {self._substrate.url}. No more fallback chains." + f"Max retries exceeded with {self.url}. No more fallback chains." ) raise MaxRetriesExceeded - - def _reinstantiate_substrate(self, e: Optional[Exception] = None) -> None: - next_network = next(self.fallback_chains) - if e.__class__ == MaxRetriesExceeded: - logging.error( - f"Max retries exceeded with {self._substrate.url}. Retrying with {next_network}." - ) - else: - print(f"Connection error. Trying again with {next_network}") - self._substrate = self._substrate_class( - url=next_network, - ss58_format=self.ss58_format, - type_registry=self.type_registry, - use_remote_preset=self.use_remote_preset, - chain_name=self.chain_name, - _mock=self._mock, - ) From a64def5826665e55f75b9bd447c377783feedbe0 Mon Sep 17 00:00:00 2001 From: Benjamin Himes Date: Mon, 5 May 2025 23:58:12 +0200 Subject: [PATCH 17/28] Add properties as well. --- async_substrate_interface/substrate_addons.py | 33 +++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/async_substrate_interface/substrate_addons.py b/async_substrate_interface/substrate_addons.py index b6d4daa..2409cf0 100644 --- a/async_substrate_interface/substrate_addons.py +++ b/async_substrate_interface/substrate_addons.py @@ -81,6 +81,8 @@ "supports_rpc_method", ] +RETRY_PROPS = ["properties", "version", "token_decimals", "token_symbol", "name"] + class RetrySyncSubstrate(SubstrateInterface): def __init__( @@ -132,6 +134,8 @@ def __init__( ) for method in RETRY_METHODS: setattr(self, method, partial(self._retry, method)) + for property_ in RETRY_PROPS: + setattr(self, property_, partial(self._retry_property, property_)) def _retry(self, method, *args, **kwargs): try: @@ -148,6 +152,19 @@ def _retry(self, method, *args, **kwargs): ) raise MaxRetriesExceeded + def _retry_property(self, property_): + try: + return getattr(self, property_) + except (MaxRetriesExceeded, ConnectionError, ConnectionRefusedError) as e: + try: + self._reinstantiate_substrate(e) + return self._retry_property(property_) + except StopIteration: + logging.error( + f"Max retries exceeded with {self.url}. No more fallback chains." + ) + raise MaxRetriesExceeded + def _reinstantiate_substrate(self, e: Optional[Exception] = None) -> None: next_network = next(self.fallback_chains) if e.__class__ == MaxRetriesExceeded: @@ -207,6 +224,8 @@ def __init__( ) for method in RETRY_METHODS: setattr(self, method, partial(self._retry, method)) + for property_ in RETRY_PROPS: + setattr(self, property_, partial(self._retry_property, property_)) def _reinstantiate_substrate(self, e: Optional[Exception] = None) -> None: next_network = next(self.fallback_chains) @@ -237,6 +256,7 @@ async def _retry(self, method, *args, **kwargs): except (MaxRetriesExceeded, ConnectionError, ConnectionRefusedError) as e: try: self._reinstantiate_substrate(e) + await self.initialize() method_ = getattr(self, method) if asyncio.iscoroutinefunction(method_): return await method_(*args, **kwargs) @@ -247,3 +267,16 @@ async def _retry(self, method, *args, **kwargs): f"Max retries exceeded with {self.url}. No more fallback chains." ) raise MaxRetriesExceeded + + async def _retry_property(self, property_): + try: + return await getattr(self, property_) + except (MaxRetriesExceeded, ConnectionError, ConnectionRefusedError) as e: + try: + self._reinstantiate_substrate(e) + return await self._retry_property(property_) + except StopIteration: + logging.error( + f"Max retries exceeded with {self.url}. No more fallback chains." + ) + raise MaxRetriesExceeded From d0cf9a3b7a29e9345f8f789f691155d84960a6ad Mon Sep 17 00:00:00 2001 From: Benjamin Himes Date: Tue, 6 May 2025 08:27:21 +0200 Subject: [PATCH 18/28] Fixes #109 --- async_substrate_interface/sync_substrate.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/async_substrate_interface/sync_substrate.py b/async_substrate_interface/sync_substrate.py index 91986a8..01f5fdb 100644 --- a/async_substrate_interface/sync_substrate.py +++ b/async_substrate_interface/sync_substrate.py @@ -532,8 +532,10 @@ def __enter__(self): return self def __del__(self): - self.ws.close() - print("DELETING SUBSTATE") + try: + self.ws.close() + except AttributeError: + pass # self.ws.protocol.fail(code=1006) # ABNORMAL_CLOSURE def initialize(self): From f3d82ae77b4368bdc2ca5d11fe32c4d7597b091a Mon Sep 17 00:00:00 2001 From: Benjamin Himes Date: Tue, 6 May 2025 16:28:24 +0200 Subject: [PATCH 19/28] Make protocol runtime checkable, ruff --- async_substrate_interface/async_substrate.py | 3 +++ async_substrate_interface/const.py | 4 +-- async_substrate_interface/protocols.py | 27 ++++++++------------ 3 files changed, 14 insertions(+), 20 deletions(-) diff --git a/async_substrate_interface/async_substrate.py b/async_substrate_interface/async_substrate.py index e3edcbe..8df2341 100644 --- a/async_substrate_interface/async_substrate.py +++ b/async_substrate_interface/async_substrate.py @@ -2967,6 +2967,9 @@ async def get_payment_info( if not isinstance(call, GenericCall): raise TypeError("'call' must be of type Call") + if not isinstance(keypair, Keypair): + raise TypeError("'keypair' must be of type Keypair") + # No valid signature is required for fee estimation signature = "0x" + "00" * 64 diff --git a/async_substrate_interface/const.py b/async_substrate_interface/const.py index 4e9a2eb..983f9e4 100644 --- a/async_substrate_interface/const.py +++ b/async_substrate_interface/const.py @@ -1,4 +1,2 @@ - - # Re-define SS58 format here to remove unnecessary dependencies. -SS58_FORMAT = 42 \ No newline at end of file +SS58_FORMAT = 42 diff --git a/async_substrate_interface/protocols.py b/async_substrate_interface/protocols.py index bbaf372..b50605f 100644 --- a/async_substrate_interface/protocols.py +++ b/async_substrate_interface/protocols.py @@ -1,43 +1,36 @@ -from typing import Awaitable, Protocol, Union, Optional +from typing import Awaitable, Protocol, Union, Optional, runtime_checkable -__all__: list[str] = [ - 'Keypair' -] +__all__: list[str] = ["Keypair"] # For reference only # class KeypairType: # """ # Type of cryptography, used in `Keypair` instance to encrypt and sign data -# +# # * ED25519 = 0 # * SR25519 = 1 # * ECDSA = 2 -# +# # """ # ED25519 = 0 # SR25519 = 1 # ECDSA = 2 +@runtime_checkable class Keypair(Protocol): - @property - def crypto_type(self) -> int: - ... + def crypto_type(self) -> int: ... @property - def public_key(self) -> Optional[bytes]: - ... + def public_key(self) -> Optional[bytes]: ... @property - def ss58_address(self) -> str: - ... + def ss58_address(self) -> str: ... @property - def ss58_format(self) -> int: - ... + def ss58_format(self) -> int: ... - def sign(self, data: Union[bytes, str]) -> Union[bytes, Awaitable[bytes]]: - ... \ No newline at end of file + def sign(self, data: Union[bytes, str]) -> Union[bytes, Awaitable[bytes]]: ... From 951d558bdda67619d10b8fa8023d72a9c16f9506 Mon Sep 17 00:00:00 2001 From: Benjamin Himes Date: Tue, 6 May 2025 17:38:01 +0200 Subject: [PATCH 20/28] Update. --- async_substrate_interface/substrate_addons.py | 38 +++++-------------- 1 file changed, 9 insertions(+), 29 deletions(-) diff --git a/async_substrate_interface/substrate_addons.py b/async_substrate_interface/substrate_addons.py index 2409cf0..7eb9628 100644 --- a/async_substrate_interface/substrate_addons.py +++ b/async_substrate_interface/substrate_addons.py @@ -11,18 +11,13 @@ RETRY_METHODS = [ "_get_block_handler", - "apply_type_registry_presets", "close", "compose_call", - "connect", "create_scale_object", "create_signed_extrinsic", "create_storage_key", "decode_scale", "encode_scale", - "extension_call", - "filter_events", - "filter_extrinsics", "generate_signature_payload", "get_account_next_index", "get_account_nonce", @@ -33,7 +28,6 @@ "get_block_number", "get_block_runtime_info", "get_block_runtime_version_for", - "get_block_timestamp", "get_chain_finalised_head", "get_chain_head", "get_constant", @@ -56,26 +50,13 @@ "get_type_registry", "init_runtime", "initialize", - "is_valid_ss58_address", - "load_runtime", - "make_payload", "query", "query_map", "query_multi", "query_multiple", - "reload_type_registry", - "retrieve_extrinsic_by_hash", "retrieve_extrinsic_by_identifier", "rpc_request", "runtime_call", - "search_block_number", - "serialize_constant", - "serialize_module_call", - "serialize_module_error", - "serialize_module_event", - "serialize_storage_item", - "ss58_decode", - "ss58_encode", "submit_extrinsic", "subscribe_block_headers", "supports_rpc_method", @@ -132,14 +113,15 @@ def __init__( raise ConnectionError( f"Unable to connect at any chains specified: {[url] + fallback_chains}" ) + self._original_methods = { + method: getattr(self, method) for method in RETRY_METHODS + } for method in RETRY_METHODS: setattr(self, method, partial(self._retry, method)) - for property_ in RETRY_PROPS: - setattr(self, property_, partial(self._retry_property, property_)) def _retry(self, method, *args, **kwargs): try: - method_ = getattr(self, method) + method_ = self._original_methods[method] return method_(*args, **kwargs) except (MaxRetriesExceeded, ConnectionError, ConnectionRefusedError) as e: try: @@ -222,10 +204,11 @@ def __init__( retry_timeout=retry_timeout, max_retries=max_retries, ) + self._original_methods = { + method: getattr(self, method) for method in RETRY_METHODS + } for method in RETRY_METHODS: setattr(self, method, partial(self._retry, method)) - for property_ in RETRY_PROPS: - setattr(self, property_, partial(self._retry_property, property_)) def _reinstantiate_substrate(self, e: Optional[Exception] = None) -> None: next_network = next(self.fallback_chains) @@ -248,11 +231,8 @@ def _reinstantiate_substrate(self, e: Optional[Exception] = None) -> None: async def _retry(self, method, *args, **kwargs): try: - method_ = getattr(self, method) - if asyncio.iscoroutinefunction(method_): - return await method_(*args, **kwargs) - else: - return method_(*args, **kwargs) + method_ = self._original_methods[method] + return await method_(*args, **kwargs) except (MaxRetriesExceeded, ConnectionError, ConnectionRefusedError) as e: try: self._reinstantiate_substrate(e) From 91bfe7a9fa86e09a2d25cc8f96833bb2e2aa93c5 Mon Sep 17 00:00:00 2001 From: Benjamin Himes Date: Tue, 6 May 2025 19:17:57 +0200 Subject: [PATCH 21/28] Sync substrate retry working. --- async_substrate_interface/substrate_addons.py | 62 +++++++++++-------- async_substrate_interface/sync_substrate.py | 14 +++-- 2 files changed, 47 insertions(+), 29 deletions(-) diff --git a/async_substrate_interface/substrate_addons.py b/async_substrate_interface/substrate_addons.py index 7eb9628..bbca1e8 100644 --- a/async_substrate_interface/substrate_addons.py +++ b/async_substrate_interface/substrate_addons.py @@ -3,11 +3,14 @@ from functools import partial from itertools import cycle from typing import Optional +from websockets.exceptions import ConnectionClosed from async_substrate_interface.async_substrate import AsyncSubstrateInterface from async_substrate_interface.errors import MaxRetriesExceeded from async_substrate_interface.sync_substrate import SubstrateInterface +logger = logging.getLogger("async_substrate_interface") + RETRY_METHODS = [ "_get_block_handler", @@ -106,30 +109,32 @@ def __init__( max_retries=max_retries, ) initialized = True + logger.info(f"Connected to {chain_url}") break except ConnectionError: - logging.warning(f"Unable to connect to {chain_url}") + logger.warning(f"Unable to connect to {chain_url}") if not initialized: raise ConnectionError( f"Unable to connect at any chains specified: {[url] + fallback_chains}" ) + retry_methods = ["connect"] + RETRY_METHODS self._original_methods = { - method: getattr(self, method) for method in RETRY_METHODS + method: getattr(self, method) for method in retry_methods } - for method in RETRY_METHODS: + for method in retry_methods: setattr(self, method, partial(self._retry, method)) def _retry(self, method, *args, **kwargs): try: method_ = self._original_methods[method] return method_(*args, **kwargs) - except (MaxRetriesExceeded, ConnectionError, ConnectionRefusedError) as e: + except (MaxRetriesExceeded, ConnectionError, EOFError, ConnectionClosed) as e: try: self._reinstantiate_substrate(e) - method_ = getattr(self, method) - return self._retry(method_(*args, **kwargs)) + method_ = self._original_methods[method] + return method_(*args, **kwargs) except StopIteration: - logging.error( + logger.error( f"Max retries exceeded with {self.url}. No more fallback chains." ) raise MaxRetriesExceeded @@ -137,34 +142,31 @@ def _retry(self, method, *args, **kwargs): def _retry_property(self, property_): try: return getattr(self, property_) - except (MaxRetriesExceeded, ConnectionError, ConnectionRefusedError) as e: + except (MaxRetriesExceeded, ConnectionError, EOFError, ConnectionClosed) as e: try: self._reinstantiate_substrate(e) return self._retry_property(property_) except StopIteration: - logging.error( + logger.error( f"Max retries exceeded with {self.url}. No more fallback chains." ) raise MaxRetriesExceeded def _reinstantiate_substrate(self, e: Optional[Exception] = None) -> None: next_network = next(self.fallback_chains) + self.ws.close() if e.__class__ == MaxRetriesExceeded: - logging.error( + logger.error( f"Max retries exceeded with {self.url}. Retrying with {next_network}." ) else: - print(f"Connection error. Trying again with {next_network}") - super().__init__( - url=next_network, - ss58_format=self.ss58_format, - type_registry=self.type_registry, - use_remote_preset=self.use_remote_preset, - chain_name=self.chain_name, - _mock=self._mock, - retry_timeout=self.retry_timeout, - max_retries=self.max_retries, - ) + logger.error(f"Connection error. Trying again with {next_network}") + self.url = next_network + self.chain_endpoint = next_network + self.initialized = False + self.ws = self.connect(init=True) + if not self._mock: + self.initialize() class RetryAsyncSubstrate(AsyncSubstrateInterface): @@ -213,7 +215,7 @@ def __init__( def _reinstantiate_substrate(self, e: Optional[Exception] = None) -> None: next_network = next(self.fallback_chains) if e.__class__ == MaxRetriesExceeded: - logging.error( + logger.error( f"Max retries exceeded with {self.url}. Retrying with {next_network}." ) else: @@ -228,12 +230,22 @@ def _reinstantiate_substrate(self, e: Optional[Exception] = None) -> None: retry_timeout=self.retry_timeout, max_retries=self.max_retries, ) + self._original_methods = { + method: getattr(self, method) for method in RETRY_METHODS + } + for method in RETRY_METHODS: + setattr(self, method, partial(self._retry, method)) async def _retry(self, method, *args, **kwargs): try: method_ = self._original_methods[method] return await method_(*args, **kwargs) - except (MaxRetriesExceeded, ConnectionError, ConnectionRefusedError) as e: + except ( + MaxRetriesExceeded, + ConnectionError, + ConnectionRefusedError, + EOFError, + ) as e: try: self._reinstantiate_substrate(e) await self.initialize() @@ -243,7 +255,7 @@ async def _retry(self, method, *args, **kwargs): else: return method_(*args, **kwargs) except StopIteration: - logging.error( + logger.error( f"Max retries exceeded with {self.url}. No more fallback chains." ) raise MaxRetriesExceeded @@ -256,7 +268,7 @@ async def _retry_property(self, property_): self._reinstantiate_substrate(e) return await self._retry_property(property_) except StopIteration: - logging.error( + logger.error( f"Max retries exceeded with {self.url}. No more fallback chains." ) raise MaxRetriesExceeded diff --git a/async_substrate_interface/sync_substrate.py b/async_substrate_interface/sync_substrate.py index 21664d4..221a04a 100644 --- a/async_substrate_interface/sync_substrate.py +++ b/async_substrate_interface/sync_substrate.py @@ -1,5 +1,6 @@ import functools import logging +import socket from hashlib import blake2b from typing import Optional, Union, Callable, Any @@ -511,7 +512,6 @@ def __init__( "strict_scale_decode": True, } self.initialized = False - self._forgettable_task = None self.ss58_format = ss58_format self.type_registry = type_registry self.type_registry_preset = type_registry_preset @@ -587,13 +587,19 @@ def name(self): def connect(self, init=False): if init is True: - return connect(self.chain_endpoint, max_size=self.ws_max_size) + try: + return connect(self.chain_endpoint, max_size=self.ws_max_size) + except (ConnectionError, socket.gaierror) as e: + raise ConnectionError(e) else: if not self.ws.close_code: return self.ws else: - self.ws = connect(self.chain_endpoint, max_size=self.ws_max_size) - return self.ws + try: + self.ws = connect(self.chain_endpoint, max_size=self.ws_max_size) + return self.ws + except (ConnectionError, socket.gaierror) as e: + raise ConnectionError(e) def get_storage_item( self, module: str, storage_function: str, block_hash: str = None From 3f50aaff7a5f9faabe41ba71b8bccd3b971c66d5 Mon Sep 17 00:00:00 2001 From: Benjamin Himes Date: Tue, 6 May 2025 20:52:42 +0200 Subject: [PATCH 22/28] Async also working. --- async_substrate_interface/async_substrate.py | 17 +- async_substrate_interface/substrate_addons.py | 151 +++++++++++------- 2 files changed, 104 insertions(+), 64 deletions(-) diff --git a/async_substrate_interface/async_substrate.py b/async_substrate_interface/async_substrate.py index 8df2341..f10b582 100644 --- a/async_substrate_interface/async_substrate.py +++ b/async_substrate_interface/async_substrate.py @@ -734,15 +734,14 @@ async def initialize(self): """ Initialize the connection to the chain. """ - async with self._lock: - self._initializing = True - if not self.initialized: - if not self._chain: - chain = await self.rpc_request("system_chain", []) - self._chain = chain.get("result") - await self.init_runtime() - self.initialized = True - self._initializing = False + self._initializing = True + if not self.initialized: + if not self._chain: + chain = await self.rpc_request("system_chain", []) + self._chain = chain.get("result") + await self.init_runtime() + self.initialized = True + self._initializing = False async def __aexit__(self, exc_type, exc_val, exc_tb): pass diff --git a/async_substrate_interface/substrate_addons.py b/async_substrate_interface/substrate_addons.py index bbca1e8..f36670b 100644 --- a/async_substrate_interface/substrate_addons.py +++ b/async_substrate_interface/substrate_addons.py @@ -1,11 +1,18 @@ +""" +A number of "plugins" for SubstrateInterface (and AsyncSubstrateInterface). At initial creation, it contains only +Retry (sync and async versions). +""" + import asyncio import logging +import socket from functools import partial from itertools import cycle from typing import Optional + from websockets.exceptions import ConnectionClosed -from async_substrate_interface.async_substrate import AsyncSubstrateInterface +from async_substrate_interface.async_substrate import AsyncSubstrateInterface, Websocket from async_substrate_interface.errors import MaxRetriesExceeded from async_substrate_interface.sync_substrate import SubstrateInterface @@ -69,6 +76,34 @@ class RetrySyncSubstrate(SubstrateInterface): + """ + A subclass of SubstrateInterface that allows for handling chain failures by using backup chains. If a sustained + network failure is encountered on a chain endpoint, the object will initialize a new connection on the next chain in + the `fallback_chains` list. If the `retry_forever` flag is set, upon reaching the last chain in `fallback_chains`, + the connection will attempt to iterate over the list (starting with `url`) again. + + E.g. + ``` + substrate = RetrySyncSubstrate( + "wss://entrypoint-finney.opentensor.ai:443", + fallback_chains=["ws://127.0.0.1:9946"] + ) + ``` + In this case, if there is a failure on entrypoint-finney, the connection will next attempt to hit localhost. If this + also fails, a `MaxRetriesExceeded` exception will be raised. + + ``` + substrate = RetrySyncSubstrate( + "wss://entrypoint-finney.opentensor.ai:443", + fallback_chains=["ws://127.0.0.1:9946"], + retry_forever=True + ) + ``` + In this case, rather than a MaxRetriesExceeded exception being raised upon failure of the second chain (localhost), + the object will again being to initialize a new connection on entrypoint-finney, and then localhost, and so on and + so forth. + """ + def __init__( self, url: str, @@ -117,6 +152,7 @@ def __init__( raise ConnectionError( f"Unable to connect at any chains specified: {[url] + fallback_chains}" ) + # "connect" is only used by SubstrateInterface, not AsyncSubstrateInterface retry_methods = ["connect"] + RETRY_METHODS self._original_methods = { method: getattr(self, method) for method in retry_methods @@ -125,13 +161,12 @@ def __init__( setattr(self, method, partial(self._retry, method)) def _retry(self, method, *args, **kwargs): + method_ = self._original_methods[method] try: - method_ = self._original_methods[method] return method_(*args, **kwargs) except (MaxRetriesExceeded, ConnectionError, EOFError, ConnectionClosed) as e: try: self._reinstantiate_substrate(e) - method_ = self._original_methods[method] return method_(*args, **kwargs) except StopIteration: logger.error( @@ -139,19 +174,6 @@ def _retry(self, method, *args, **kwargs): ) raise MaxRetriesExceeded - def _retry_property(self, property_): - try: - return getattr(self, property_) - except (MaxRetriesExceeded, ConnectionError, EOFError, ConnectionClosed) as e: - try: - self._reinstantiate_substrate(e) - return self._retry_property(property_) - except StopIteration: - logger.error( - f"Max retries exceeded with {self.url}. No more fallback chains." - ) - raise MaxRetriesExceeded - def _reinstantiate_substrate(self, e: Optional[Exception] = None) -> None: next_network = next(self.fallback_chains) self.ws.close() @@ -170,6 +192,34 @@ def _reinstantiate_substrate(self, e: Optional[Exception] = None) -> None: class RetryAsyncSubstrate(AsyncSubstrateInterface): + """ + A subclass of AsyncSubstrateInterface that allows for handling chain failures by using backup chains. If a + sustained network failure is encountered on a chain endpoint, the object will initialize a new connection on + the next chain in the `fallback_chains` list. If the `retry_forever` flag is set, upon reaching the last chain + in `fallback_chains`, the connection will attempt to iterate over the list (starting with `url`) again. + + E.g. + ``` + substrate = RetryAsyncSubstrate( + "wss://entrypoint-finney.opentensor.ai:443", + fallback_chains=["ws://127.0.0.1:9946"] + ) + ``` + In this case, if there is a failure on entrypoint-finney, the connection will next attempt to hit localhost. If this + also fails, a `MaxRetriesExceeded` exception will be raised. + + ``` + substrate = RetryAsyncSubstrate( + "wss://entrypoint-finney.opentensor.ai:443", + fallback_chains=["ws://127.0.0.1:9946"], + retry_forever=True + ) + ``` + In this case, rather than a MaxRetriesExceeded exception being raised upon failure of the second chain (localhost), + the object will again being to initialize a new connection on entrypoint-finney, and then localhost, and so on and + so forth. + """ + def __init__( self, url: str, @@ -212,62 +262,53 @@ def __init__( for method in RETRY_METHODS: setattr(self, method, partial(self._retry, method)) - def _reinstantiate_substrate(self, e: Optional[Exception] = None) -> None: + async def _reinstantiate_substrate(self, e: Optional[Exception] = None) -> None: next_network = next(self.fallback_chains) if e.__class__ == MaxRetriesExceeded: logger.error( f"Max retries exceeded with {self.url}. Retrying with {next_network}." ) else: - print(f"Connection error. Trying again with {next_network}") - super().__init__( - url=next_network, - ss58_format=self.ss58_format, - type_registry=self.type_registry, - use_remote_preset=self.use_remote_preset, - chain_name=self.chain_name, - _mock=self._mock, - retry_timeout=self.retry_timeout, - max_retries=self.max_retries, + logger.error(f"Connection error. Trying again with {next_network}") + try: + await self.ws.shutdown() + except AttributeError: + pass + if self._forgettable_task is not None: + self._forgettable_task: asyncio.Task + self._forgettable_task.cancel() + try: + await self._forgettable_task + except asyncio.CancelledError: + pass + self.chain_endpoint = next_network + self.url = next_network + self.ws = Websocket( + next_network, + options={ + "max_size": self.ws_max_size, + "write_limit": 2**16, + }, ) - self._original_methods = { - method: getattr(self, method) for method in RETRY_METHODS - } - for method in RETRY_METHODS: - setattr(self, method, partial(self._retry, method)) + self._initialized = False + self._initializing = False + await self.initialize() async def _retry(self, method, *args, **kwargs): + method_ = self._original_methods[method] try: - method_ = self._original_methods[method] return await method_(*args, **kwargs) except ( MaxRetriesExceeded, ConnectionError, - ConnectionRefusedError, + ConnectionClosed, EOFError, + socket.gaierror, ) as e: try: - self._reinstantiate_substrate(e) - await self.initialize() - method_ = getattr(self, method) - if asyncio.iscoroutinefunction(method_): - return await method_(*args, **kwargs) - else: - return method_(*args, **kwargs) - except StopIteration: - logger.error( - f"Max retries exceeded with {self.url}. No more fallback chains." - ) - raise MaxRetriesExceeded - - async def _retry_property(self, property_): - try: - return await getattr(self, property_) - except (MaxRetriesExceeded, ConnectionError, ConnectionRefusedError) as e: - try: - self._reinstantiate_substrate(e) - return await self._retry_property(property_) - except StopIteration: + await self._reinstantiate_substrate(e) + return await method_(*args, **kwargs) + except StopAsyncIteration: logger.error( f"Max retries exceeded with {self.url}. No more fallback chains." ) From a83f57e1966e5a1e4122d0e61359fbcb53b4f2ad Mon Sep 17 00:00:00 2001 From: Benjamin Himes Date: Tue, 6 May 2025 21:21:19 +0200 Subject: [PATCH 23/28] Add MetadataAtVersionNotFound exception. --- async_substrate_interface/async_substrate.py | 6 ++---- async_substrate_interface/errors.py | 10 ++++++++++ async_substrate_interface/sync_substrate.py | 20 +++++++++++++++----- 3 files changed, 27 insertions(+), 9 deletions(-) diff --git a/async_substrate_interface/async_substrate.py b/async_substrate_interface/async_substrate.py index 8df2341..d026e6c 100644 --- a/async_substrate_interface/async_substrate.py +++ b/async_substrate_interface/async_substrate.py @@ -39,6 +39,7 @@ ExtrinsicNotFound, BlockNotFound, MaxRetriesExceeded, + MetadataAtVersionNotFound, ) from async_substrate_interface.protocols import Keypair from async_substrate_interface.types import ( @@ -817,10 +818,7 @@ async def _load_registry_at_block( "Client error: Execution failed: Other: Exported method Metadata_metadata_at_version is not found" in e.args ): - raise SubstrateRequestException( - "You are attempting to call a block too old for this version of async-substrate-interface. Please" - " instead use legacy py-substrate-interface for these very old blocks." - ) + raise MetadataAtVersionNotFound else: raise e metadata_option_hex_str = metadata_rpc_result["result"] diff --git a/async_substrate_interface/errors.py b/async_substrate_interface/errors.py index 98114fe..c6a2d8d 100644 --- a/async_substrate_interface/errors.py +++ b/async_substrate_interface/errors.py @@ -12,6 +12,16 @@ class MaxRetriesExceeded(SubstrateRequestException): pass +class MetadataAtVersionNotFound(SubstrateRequestException): + def __init__(self): + message = ( + "Exported method Metadata_metadata_at_version is not found. This indicates the block is quite old, and is" + "not supported by async-substrate-interface. If you need this, we recommend using the legacy " + "substrate-interface (https://github.com/JAMdotTech/py-polkadot-sdk)." + ) + super().__init__(message) + + class StorageFunctionNotFound(ValueError): pass diff --git a/async_substrate_interface/sync_substrate.py b/async_substrate_interface/sync_substrate.py index 21664d4..cbd7136 100644 --- a/async_substrate_interface/sync_substrate.py +++ b/async_substrate_interface/sync_substrate.py @@ -21,6 +21,7 @@ SubstrateRequestException, BlockNotFound, MaxRetriesExceeded, + MetadataAtVersionNotFound, ) from async_substrate_interface.protocols import Keypair from async_substrate_interface.types import ( @@ -617,11 +618,20 @@ def _get_current_block_hash( def _load_registry_at_block(self, block_hash: Optional[str]) -> MetadataV15: # Should be called for any block that fails decoding. # Possibly the metadata was different. - metadata_rpc_result = self.rpc_request( - "state_call", - ["Metadata_metadata_at_version", self.metadata_version_hex], - block_hash=block_hash, - ) + try: + metadata_rpc_result = self.rpc_request( + "state_call", + ["Metadata_metadata_at_version", self.metadata_version_hex], + block_hash=block_hash, + ) + except SubstrateRequestException as e: + if ( + "Client error: Execution failed: Other: Exported method Metadata_metadata_at_version is not found" + in e.args + ): + raise MetadataAtVersionNotFound + else: + raise e metadata_option_hex_str = metadata_rpc_result["result"] metadata_option_bytes = bytes.fromhex(metadata_option_hex_str[2:]) metadata = MetadataV15.decode_from_metadata_option(metadata_option_bytes) From 767e122570b10a2768f95ee2109ffd15eb53cfd4 Mon Sep 17 00:00:00 2001 From: Benjamin Himes Date: Tue, 6 May 2025 23:03:38 +0200 Subject: [PATCH 24/28] [WIP] tests --- tests/conftest.py | 24 +++++++++++++++++++++++ tests/test_substrate_addons.py | 36 ++++++++++++++++++++++++++++++++++ 2 files changed, 60 insertions(+) create mode 100644 tests/conftest.py create mode 100644 tests/test_substrate_addons.py diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..16f0050 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,24 @@ +import subprocess +import time + +CONTAINER_NAME_PREFIX = "test_local_chain_" +LOCALNET_IMAGE_NAME = "ghcr.io/opentensor/subtensor-localnet:devnet-ready" + + +def start_docker_container(exposed_port, name_salt: str): + container_name = f"{CONTAINER_NAME_PREFIX}{name_salt}" + + # Command to start container + cmds = [ + "docker", + "run", + "--rm", + "--name", + container_name, + "-p", + f"{exposed_port}:9945", + LOCALNET_IMAGE_NAME, + ] + + proc = subprocess.Popen(cmds, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + return proc, container_name diff --git a/tests/test_substrate_addons.py b/tests/test_substrate_addons.py new file mode 100644 index 0000000..904f36d --- /dev/null +++ b/tests/test_substrate_addons.py @@ -0,0 +1,36 @@ +import threading +import subprocess + +import pytest +import time + +from async_substrate_interface.substrate_addons import RetrySyncSubstrate +from tests.conftest import start_docker_container + + +@pytest.fixture(scope="function") +def start_containers(): + # Store our subprocesses globally + processes = (start_docker_container(9945, 9945), start_docker_container(9946, 9946)) + yield processes + + # To stop the instances, you can iterate over the processes and kill them: + for process in processes: + subprocess.run(["docker", "kill", process[1]]) + process[0].kill() + + +def test_retry_sync_substrate(start_containers): + container1, container2 = start_containers + time.sleep(10) + with RetrySyncSubstrate( + "ws://127.0.0.1:9945", fallback_chains=["ws://127.0.0.1:9946"] + ) as substrate: + for i in range(10): + assert substrate.get_chain_head().startswith("0x") + if i == 8: + subprocess.run(["docker", "kill", container1[1]]) + time.sleep(10) + if i > 8: + assert substrate.chain_endpoint == "ws://127.0.0.1:9946" + time.sleep(2) From d3f647350c199901c17a0dfe5af5ec848f8e6084 Mon Sep 17 00:00:00 2001 From: Benjamin Himes Date: Tue, 6 May 2025 23:24:14 +0200 Subject: [PATCH 25/28] improved test a bit --- tests/conftest.py | 6 ++++-- tests/test_substrate_addons.py | 30 ++++++++++++++++++------------ 2 files changed, 22 insertions(+), 14 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 16f0050..0e312e3 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,9 +1,11 @@ import subprocess -import time +from collections import namedtuple CONTAINER_NAME_PREFIX = "test_local_chain_" LOCALNET_IMAGE_NAME = "ghcr.io/opentensor/subtensor-localnet:devnet-ready" +Container = namedtuple("Container", ["process", "name", "uri"]) + def start_docker_container(exposed_port, name_salt: str): container_name = f"{CONTAINER_NAME_PREFIX}{name_salt}" @@ -21,4 +23,4 @@ def start_docker_container(exposed_port, name_salt: str): ] proc = subprocess.Popen(cmds, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - return proc, container_name + return Container(proc, container_name, f"ws://127.0.0.1:{exposed_port}") diff --git a/tests/test_substrate_addons.py b/tests/test_substrate_addons.py index 904f36d..9ce3fee 100644 --- a/tests/test_substrate_addons.py +++ b/tests/test_substrate_addons.py @@ -9,28 +9,34 @@ @pytest.fixture(scope="function") -def start_containers(): - # Store our subprocesses globally +def docker_containers(): processes = (start_docker_container(9945, 9945), start_docker_container(9946, 9946)) - yield processes + try: + yield processes - # To stop the instances, you can iterate over the processes and kill them: - for process in processes: - subprocess.run(["docker", "kill", process[1]]) - process[0].kill() + finally: + for process in processes: + subprocess.run(["docker", "kill", process[1]]) + process[0].kill() -def test_retry_sync_substrate(start_containers): - container1, container2 = start_containers +def test_retry_sync_substrate(docker_containers): time.sleep(10) with RetrySyncSubstrate( - "ws://127.0.0.1:9945", fallback_chains=["ws://127.0.0.1:9946"] + docker_containers[0].uri, fallback_chains=[docker_containers[1].uri] ) as substrate: for i in range(10): assert substrate.get_chain_head().startswith("0x") if i == 8: - subprocess.run(["docker", "kill", container1[1]]) + subprocess.run(["docker", "stop", docker_containers[0].name]) time.sleep(10) if i > 8: - assert substrate.chain_endpoint == "ws://127.0.0.1:9946" + assert substrate.chain_endpoint == docker_containers[1].uri time.sleep(2) + + +def test_retry_sync_substrate_offline(): + with pytest.raises(ConnectionError): + RetrySyncSubstrate( + "ws://127.0.0.1:9945", fallback_chains=["ws://127.0.0.1:9946"] + ) From 1baab0f43ab5249052ffb6563765008c448e66a4 Mon Sep 17 00:00:00 2001 From: Benjamin Himes Date: Wed, 7 May 2025 20:42:46 +0200 Subject: [PATCH 26/28] Add `chain_endpoint` and `url` prior to super init. --- async_substrate_interface/substrate_addons.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/async_substrate_interface/substrate_addons.py b/async_substrate_interface/substrate_addons.py index f36670b..be611c4 100644 --- a/async_substrate_interface/substrate_addons.py +++ b/async_substrate_interface/substrate_addons.py @@ -129,9 +129,13 @@ def __init__( self._mock = _mock self.retry_timeout = retry_timeout self.max_retries = max_retries + self.chain_endpoint = url + self.url = url initialized = False for chain_url in [url] + fallback_chains: try: + self.chain_endpoint = chain_url + self.url = chain_url super().__init__( url=chain_url, ss58_format=ss58_format, From c9a13ff41149045f4a4ab170f491df387ce6146a Mon Sep 17 00:00:00 2001 From: Benjamin Himes Date: Wed, 7 May 2025 21:43:01 +0200 Subject: [PATCH 27/28] More tests. --- async_substrate_interface/substrate_addons.py | 8 +++- tests/test_substrate_addons.py | 44 ++++++++++++++++--- 2 files changed, 45 insertions(+), 7 deletions(-) diff --git a/async_substrate_interface/substrate_addons.py b/async_substrate_interface/substrate_addons.py index be611c4..5edb26a 100644 --- a/async_substrate_interface/substrate_addons.py +++ b/async_substrate_interface/substrate_addons.py @@ -168,7 +168,13 @@ def _retry(self, method, *args, **kwargs): method_ = self._original_methods[method] try: return method_(*args, **kwargs) - except (MaxRetriesExceeded, ConnectionError, EOFError, ConnectionClosed) as e: + except ( + MaxRetriesExceeded, + ConnectionError, + EOFError, + ConnectionClosed, + TimeoutError, + ) as e: try: self._reinstantiate_substrate(e) return method_(*args, **kwargs) diff --git a/tests/test_substrate_addons.py b/tests/test_substrate_addons.py index 9ce3fee..686a028 100644 --- a/tests/test_substrate_addons.py +++ b/tests/test_substrate_addons.py @@ -5,8 +5,11 @@ import time from async_substrate_interface.substrate_addons import RetrySyncSubstrate +from async_substrate_interface.errors import MaxRetriesExceeded from tests.conftest import start_docker_container +LATENT_LITE_ENTRYPOINT = "wss://lite.sub.latent.to:443" + @pytest.fixture(scope="function") def docker_containers(): @@ -16,22 +19,51 @@ def docker_containers(): finally: for process in processes: - subprocess.run(["docker", "kill", process[1]]) - process[0].kill() + subprocess.run(["docker", "kill", process.name]) + process.process.kill() + + +@pytest.fixture(scope="function") +def single_local_chain(): + process = start_docker_container(9945, 9945) + try: + yield process + finally: + print("TRIGGERED KILL") + subprocess.run(["docker", "kill", process.name]) + process.process.kill() -def test_retry_sync_substrate(docker_containers): +def test_retry_sync_substrate(single_local_chain): time.sleep(10) with RetrySyncSubstrate( - docker_containers[0].uri, fallback_chains=[docker_containers[1].uri] + single_local_chain.uri, fallback_chains=[LATENT_LITE_ENTRYPOINT] ) as substrate: for i in range(10): assert substrate.get_chain_head().startswith("0x") if i == 8: - subprocess.run(["docker", "stop", docker_containers[0].name]) - time.sleep(10) + subprocess.run(["docker", "stop", single_local_chain.name]) if i > 8: + assert substrate.chain_endpoint == LATENT_LITE_ENTRYPOINT + time.sleep(2) + + +def test_retry_sync_substrate_max_retries(docker_containers): + time.sleep(10) + with RetrySyncSubstrate( + docker_containers[0].uri, fallback_chains=[docker_containers[1].uri] + ) as substrate: + for i in range(5): + print("EYE EQUALS", i) + assert substrate.get_chain_head().startswith("0x") + if i == 2: + subprocess.run(["docker", "pause", docker_containers[0].name]) + if i == 3: assert substrate.chain_endpoint == docker_containers[1].uri + if i == 4: + subprocess.run(["docker", "pause", docker_containers[1].name]) + with pytest.raises(MaxRetriesExceeded): + substrate.get_chain_head().startswith("0x") time.sleep(2) From 18f165a8711e3c0587c327d5d1dbbeb330b3e50b Mon Sep 17 00:00:00 2001 From: ibraheem-opentensor Date: Wed, 7 May 2025 13:23:51 -0700 Subject: [PATCH 28/28] Bumps version and changelog --- CHANGELOG.md | 17 +++++++++++++++++ pyproject.toml | 2 +- 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3454ca7..009945c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,22 @@ # Changelog +## 1.2.0 /2025-05-07 + +## What's Changed +* Add missing methods by @thewhaleking in https://github.com/opentensor/async-substrate-interface/pull/104 +* Max subscriptions semaphore added by @thewhaleking in https://github.com/opentensor/async-substrate-interface/pull/107 +* Expose `_get_block_handler` publicly by @thewhaleking in https://github.com/opentensor/async-substrate-interface/pull/108 +* safe `__del__` by @thewhaleking in https://github.com/opentensor/async-substrate-interface/pull/110 +* Tensorshield/main by @thewhaleking in https://github.com/opentensor/async-substrate-interface/pull/111 +* Support async key implementations by @immortalizzy in https://github.com/opentensor/async-substrate-interface/pull/94 +* Add MetadataAtVersionNotFound error by @thewhaleking in https://github.com/opentensor/async-substrate-interface/pull/113 +* Fallback chains by @thewhaleking in https://github.com/opentensor/async-substrate-interface/pull/100 + +## New Contributors +* @immortalizzy made their first contribution in https://github.com/opentensor/async-substrate-interface/pull/94 + +**Full Changelog**: https://github.com/opentensor/async-substrate-interface/compare/v1.1.1...v1.2.0 + ## 1.1.1 /2025-04-26 ## What's Changed diff --git a/pyproject.toml b/pyproject.toml index f5b7cec..cdfa7e0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "async-substrate-interface" -version = "1.1.1" +version = "1.2.0" description = "Asyncio library for interacting with substrate. Mostly API-compatible with py-substrate-interface" readme = "README.md" license = { file = "LICENSE" }