-
Notifications
You must be signed in to change notification settings - Fork 2.1k
CHIA-2638 Full Node RPC Validation Tool #19743
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Merged
Merged
Changes from 15 commits
Commits
Show all changes
24 commits
Select commit
Hold shift + click to select a range
16a3f8d
add rpc validation tool
jack60612 606c6a6
forgot to call function ....
jack60612 71b6354
fix no sync error
jack60612 3602883
add comment
jack60612 ac9b2ca
Update validate_rpcs.py
jack60612 b5c9422
test slight modifications
jack60612 8454ed2
Update validate_rpcs.py
jack60612 22ca930
try fixing time measuring
jack60612 8c70d98
fix stupidity
jack60612 6a7bf8a
make benchmark more precise
jack60612 dbf7a33
fixes + optimizations
jack60612 16f79e3
Update validate_rpcs.py
jack60612 d5169db
speed?
jack60612 444ea37
Update validate_rpcs.py
jack60612 cd13f2b
last tweak
jack60612 113c626
Address Minor Changes
jack60612 1416872
change to task group
jack60612 ddec58b
Revert "change to task group"
jack60612 0625b91
try super speedy code
jack60612 3d6c351
meant to remove
jack60612 8e5031b
add + rem seperate error cond
jack60612 76605d8
loop through dropped blocks.
jack60612 30b704f
rename to pipline and abandon retries
jack60612 848331d
revert changes to rpcs
jack60612 File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,256 @@ | ||
#!/usr/bin/env python3 | ||
|
||
from __future__ import annotations | ||
|
||
import asyncio | ||
import time | ||
from collections.abc import Coroutine | ||
from pathlib import Path | ||
from typing import Any, Optional | ||
|
||
import aiofiles | ||
import click | ||
from chia_rs.sized_bytes import bytes32 | ||
|
||
from chia.cmds.cmds_util import get_any_service_client | ||
from chia.full_node.full_node_rpc_client import FullNodeRpcClient | ||
from chia.util.default_root import resolve_root_path | ||
from chia.util.path import path_from_root | ||
|
||
DEFAULT_REQUESTS_PER_BATCH: int = 25000 | ||
|
||
|
||
def get_height_to_hash_filename(root_path: Path, config: dict[str, Any]) -> Path: | ||
""" | ||
Utility function to get the path to the height-to-hash database file. | ||
""" | ||
db_path_replaced: Path = root_path / config["full_node"]["database_path"] | ||
db_directory: Path = path_from_root(root_path, db_path_replaced).parent | ||
selected_network: str = config["full_node"]["selected_network"] | ||
suffix = "" if (selected_network is None or selected_network == "mainnet") else f"-{selected_network}" | ||
return db_directory / f"height-to-hash{suffix}" | ||
|
||
|
||
async def get_block_cache_bytearray(root_path: Path, config: dict[str, Any], peak: int) -> bytearray: | ||
jack60612 marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
""" | ||
Load the height-to-hash database file into a bytearray. | ||
""" | ||
height_to_hash = bytearray() # Init as bytearray to prep file loading | ||
height_to_hash_filename: Path = get_height_to_hash_filename(root_path, config) | ||
# Load the height-to-hash file | ||
async with aiofiles.open(height_to_hash_filename, "rb") as f: | ||
height_to_hash = bytearray(await f.read()) | ||
# allocate memory for height to hash map | ||
# this may also truncate it, if the file on disk had an invalid size | ||
new_size = (peak + 1) * 32 | ||
size = len(height_to_hash) | ||
if size > new_size: | ||
del height_to_hash[new_size:] | ||
else: | ||
height_to_hash += bytearray([0] * (new_size - size)) | ||
return height_to_hash | ||
jack60612 marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
|
||
|
||
def get_block_header_from_height(height: int, height_to_hash: bytearray) -> bytes32: | ||
jack60612 marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
""" | ||
Get the block header hash from the height-to-hash database. | ||
""" | ||
idx = height * 32 | ||
assert idx + 32 <= len(height_to_hash) | ||
return bytes32(height_to_hash[idx : idx + 32]) | ||
|
||
|
||
@click.command(help="Test RPC endpoints using chain", no_args_is_help=True) | ||
@click.option( | ||
"--root-path", | ||
default=resolve_root_path(override=None), | ||
help="Config file root", | ||
type=click.Path(), | ||
show_default=True, | ||
) | ||
@click.option( | ||
"-p", | ||
"--rpc-port", | ||
help=( | ||
"Set the port where the Full Node is hosting the RPC interface. See the rpc_port under full_node in config.yaml" | ||
), | ||
type=int, | ||
default=None, | ||
) | ||
@click.option( | ||
"-sc", | ||
"--spends_with_conditions", | ||
help="Test get_block_spends_with_conditions", | ||
is_flag=True, | ||
type=bool, | ||
default=False, | ||
) | ||
@click.option( | ||
"-sp", | ||
"--block_spends", | ||
help="Test get_block_spends", | ||
is_flag=True, | ||
type=bool, | ||
default=False, | ||
) | ||
@click.option( | ||
"-ar", | ||
"--additions_and_removals", | ||
help="Test get_additions_and_removals", | ||
is_flag=True, | ||
type=bool, | ||
default=False, | ||
) | ||
@click.option( | ||
"-s", | ||
"--start-height", | ||
help="Start height for the RPC calls", | ||
type=int, | ||
default=None, | ||
) | ||
@click.option( | ||
"-e", | ||
"--end-height", | ||
help="End height for the RPC calls", | ||
type=int, | ||
default=None, | ||
) | ||
@click.option( | ||
"-c", | ||
"--concurrent-requests", | ||
help="Number of concurrent requests to make to the RPC endpoints", | ||
type=int, | ||
default=DEFAULT_REQUESTS_PER_BATCH, | ||
) | ||
def cli( | ||
root_path: str, | ||
arvidn marked this conversation as resolved.
Show resolved
Hide resolved
|
||
rpc_port: Optional[int], | ||
spends_with_conditions: bool, | ||
block_spends: bool, | ||
additions_and_removals: bool, | ||
concurrent_requests: int, | ||
start_height: Optional[int] = None, | ||
end_height: Optional[int] = None, | ||
) -> None: | ||
root_path_path = Path(root_path) | ||
requests_per_batch = 0 | ||
if spends_with_conditions: | ||
requests_per_batch += 1 | ||
if block_spends: | ||
requests_per_batch += 1 | ||
if additions_and_removals: | ||
requests_per_batch += 1 | ||
if requests_per_batch == 0: | ||
print("No RPC calls selected. Exiting.") | ||
return | ||
concurrent_requests = max(1, concurrent_requests // requests_per_batch) | ||
jack60612 marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
if start_height is None: | ||
start_height = 0 | ||
asyncio.run( | ||
cli_async( | ||
root_path=root_path_path, | ||
rpc_port=rpc_port, | ||
spends_with_conditions=spends_with_conditions, | ||
block_spends=block_spends, | ||
additions_and_removals=additions_and_removals, | ||
start_height=start_height, | ||
end_height=end_height, | ||
concurrent_requests=concurrent_requests, | ||
) | ||
) | ||
|
||
|
||
async def cli_async( | ||
root_path: Path, | ||
rpc_port: Optional[int], | ||
spends_with_conditions: bool, | ||
block_spends: bool, | ||
additions_and_removals: bool, | ||
concurrent_requests: int, | ||
start_height: int, | ||
end_height: Optional[int] = None, | ||
) -> None: | ||
blocks_per_status: int = 3000 | ||
last_status_height: int = 0 | ||
|
||
async with get_any_service_client(FullNodeRpcClient, root_path, rpc_port) as ( | ||
node_client, | ||
config, | ||
): | ||
blockchain_state: dict[str, Any] = await node_client.get_blockchain_state() | ||
if blockchain_state is None or blockchain_state["peak"] is None: | ||
# Peak height is required for thep cache. | ||
jack60612 marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
print("No blockchain found. Exiting.") | ||
return | ||
peak_height = blockchain_state["peak"].height | ||
assert peak_height is not None, "Blockchain peak height is None" | ||
if end_height is None: | ||
end_height = blockchain_state["peak"]["height"] | ||
jack60612 marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
|
||
print("Connected to Full Node") | ||
|
||
block_cache_bytearray: bytearray = await get_block_cache_bytearray( | ||
root_path=root_path, | ||
config=config, | ||
peak=peak_height, | ||
) | ||
|
||
print("Bytearray loaded with block header hashes from height-to-hash file.") | ||
|
||
# set initial segment heights | ||
start_segment: int = start_height | ||
end_segment: int = start_height + concurrent_requests | ||
# measure time for performance measurement per segment | ||
cycle_start: float = time.time() | ||
jack60612 marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
# also measure time for the whole process | ||
start_time: float = cycle_start | ||
|
||
while end_segment <= end_height: | ||
arvidn marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
# Create an initial list to hold pending tasks | ||
pending_tasks: list[Coroutine[Any, Any, Any]] = [] | ||
arvidn marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
|
||
for i in range(start_segment, end_segment): | ||
block_header_hash: bytes32 = get_block_header_from_height(i, block_cache_bytearray) | ||
if spends_with_conditions: | ||
pending_tasks.append(node_client.get_block_spends_with_conditions(block_header_hash)) | ||
if block_spends: | ||
pending_tasks.append(node_client.get_block_spends(block_header_hash)) | ||
if additions_and_removals: | ||
pending_tasks.append(node_client.get_additions_and_removals(block_header_hash)) | ||
try: | ||
results = await asyncio.gather(*pending_tasks) | ||
for result in results: | ||
if result is None: | ||
raise ValueError("Received None from RPC call") | ||
except Exception as e: | ||
print(f"Error processing block range {start_segment} to {end_segment}: {e}") | ||
raise e | ||
# Print status every blocks_per_status blocks | ||
if end_segment - last_status_height >= blocks_per_status: | ||
time_taken = time.time() - cycle_start | ||
time_per_block = time_taken / (end_segment - last_status_height) | ||
print( | ||
f"Time taken for segment" | ||
f" {last_status_height} to {end_segment}: {time_taken:.2f} seconds\n" | ||
f"Average time per block: {time_per_block:.4f} seconds" | ||
) | ||
last_status_height = end_segment | ||
cycle_start = time.time() | ||
|
||
# reset variables for the next segment | ||
pending_tasks = [] # clear pending tasks after processing | ||
start_segment = end_segment | ||
end_segment += concurrent_requests | ||
print(f"Finished processing blocks from {start_height} to {end_height} (peak: {peak_height})") | ||
print( | ||
f"Time per block for the whole process: " | ||
f"{(time.time() - start_time) / (end_height - start_height):.4f} seconds" | ||
) | ||
|
||
|
||
def main() -> None: | ||
cli() | ||
|
||
|
||
if __name__ == "__main__": | ||
main() | ||
jack60612 marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
Uh oh!
There was an error while loading. Please reload this page.