Skip to content

Commit 4b66d18

Browse files
Add SMASH service support for testing
1 parent 8b0442e commit 4b66d18

File tree

11 files changed

+522
-4
lines changed

11 files changed

+522
-4
lines changed
Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
#!/usr/bin/env bash
2+
3+
set -uo pipefail
4+
5+
SOCKET_PATH="$(readlink -m "$CARDANO_NODE_SOCKET_PATH")"
6+
STATE_CLUSTER="${SOCKET_PATH%/*}"
7+
STATE_CLUSTER_NAME="${STATE_CLUSTER##*/}"
8+
9+
export PGPASSFILE="$STATE_CLUSTER/pgpass"
10+
export SMASH_ADMIN="${SMASH_ADMIN:-admin}"
11+
export SMASH_PASSWORD="${SMASH_PASSWORD:-password}"
12+
export SMASH_ADMINS_FILE="$STATE_CLUSTER/admins.txt"
13+
14+
echo "${SMASH_ADMIN}, ${SMASH_PASSWORD}" > "$SMASH_ADMINS_FILE"
15+
16+
exec "$DBSYNC_REPO/smash-server/bin/cardano-smash-server" --config "./$STATE_CLUSTER_NAME/dbsync-config.yaml" --port %%SMASH_PORT%% --admins "$SMASH_ADMINS_FILE"

cardano_node_tests/cluster_scripts/conway_fast/start-cluster

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22

33
# controlling environment variables:
44
# DBSYNC_REPO - will start and configure db-sync if the value is path to db-sync repository
5+
# SMASH - if set, will start and configure smash
56
# ENABLE_LEGACY - if set, local cluster will use legacy networking
67
# MIXED_P2P - if set, local cluster will use P2P for some nodes and legacy topology for others
78
# UTXO_BACKEND - 'mem' or 'disk', default is 'mem' (or legacy) if unset
@@ -206,6 +207,23 @@ startsecs=5
206207
EoF
207208
fi
208209

210+
# enable smash service
211+
if [ -n "${DBSYNC_REPO:-""}" ] && [ -n "${SMASH:-""}" ]; then
212+
[ -e "${DBSYNC_REPO}/smash-server/bin/cardano-smash-server" ] || \
213+
{ echo "The \`${DBSYNC_REPO}/smash-server/bin/cardano-smash-server\` not found, line $LINENO" >&2; exit 1; } # assert
214+
215+
cat >> "${STATE_CLUSTER}/supervisor.conf" <<EoF
216+
217+
[program:smash]
218+
command=${SCRIPT_DIR}/run-cardano-smash
219+
stderr_logfile=./${STATE_CLUSTER_NAME}/smash.stderr
220+
stdout_logfile=./${STATE_CLUSTER_NAME}/smash.stdout
221+
autostart=false
222+
autorestart=false
223+
startsecs=5
224+
EoF
225+
fi
226+
209227
# enable cardano-submit-api service
210228
if [ "$ENABLE_SUBMIT_API" -eq 1 ]; then
211229
cat >> "${STATE_CLUSTER}/supervisor.conf" <<EoF
@@ -625,6 +643,12 @@ if [ -n "${DBSYNC_REPO:-""}" ]; then
625643
supervisorctl -s "unix:///${SUPERVISORD_SOCKET_PATH}" start dbsync
626644
fi
627645

646+
# start smash
647+
if [ -n "${DBSYNC_REPO:-""}" ] && [ -n "${SMASH:-""}" ]; then
648+
echo "Starting smash"
649+
supervisorctl -s "unix:///${SUPERVISORD_SOCKET_PATH}" start smash
650+
fi
651+
628652
echo "Sleeping for initial Tx submission delay of $TX_SUBMISSION_DELAY seconds"
629653
sleep "$TX_SUBMISSION_DELAY"
630654

cardano_node_tests/tests/conftest.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -119,6 +119,13 @@ def pytest_configure(config: tp.Any) -> None:
119119
config.stash[metadata_key]["db-sync ghc"] = VERSIONS.dbsync_ghc
120120
config.stash[metadata_key]["db-sync exe"] = str(configuration.DBSYNC_BIN)
121121

122+
config.stash[metadata_key]["HAS_SMASH"] = str(configuration.HAS_SMASH)
123+
if configuration.HAS_SMASH:
124+
config.stash[metadata_key]["smash"] = str(VERSIONS.smash)
125+
config.stash[metadata_key]["smash rev"] = VERSIONS.smash_git_rev
126+
config.stash[metadata_key]["smash ghc"] = VERSIONS.smash_ghc
127+
config.stash[metadata_key]["smash exe"] = str(configuration.SMASH_BIN)
128+
122129
if "nix/store" not in config.stash[metadata_key]["cardano-cli exe"]:
123130
LOGGER.warning(" WARNING: Using `cardano-cli` from custom path!")
124131
if "nix/store" not in config.stash[metadata_key]["cardano-node exe"]:

cardano_node_tests/tests/test_pools.py

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@
2828
from cardano_node_tests.utils import dbsync_utils
2929
from cardano_node_tests.utils import helpers
3030
from cardano_node_tests.utils import locking
31+
from cardano_node_tests.utils import smash_utils
3132
from cardano_node_tests.utils import temptools
3233
from cardano_node_tests.utils import tx_view
3334
from cardano_node_tests.utils import web
@@ -696,6 +697,7 @@ def _query_func():
696697
@pytest.mark.testnets
697698
@pytest.mark.smoke
698699
@pytest.mark.dbsync
700+
@pytest.mark.smash
699701
def test_stake_pool_not_avail_metadata(
700702
self,
701703
cluster_manager: cluster_management.ClusterManager,
@@ -720,14 +722,15 @@ def test_stake_pool_not_avail_metadata(
720722
out_file=f"{pool_name}_registration_metadata.json", content=pool_metadata
721723
)
722724
pool_metadata_url = "https://www.where_metadata_file_is_located.com"
725+
pool_metadata_hash = cluster.g_stake_pool.gen_pool_metadata_hash(pool_metadata_file)
723726

724727
pool_data = clusterlib.PoolData(
725728
pool_name=pool_name,
726729
pool_pledge=1_000,
727730
pool_cost=cluster.g_query.get_protocol_params().get("minPoolCost", 500),
728731
pool_margin=0.2,
729732
pool_metadata_url=pool_metadata_url,
730-
pool_metadata_hash=cluster.g_stake_pool.gen_pool_metadata_hash(pool_metadata_file),
733+
pool_metadata_hash=pool_metadata_hash,
731734
)
732735

733736
# Create pool owners
@@ -766,6 +769,7 @@ def _query_func():
766769
)
767770

768771
dbsync_utils.retry_query(query_func=_query_func, timeout=360)
772+
smash_utils.check_smash_pool_errors(pool_creation_out.stake_pool_id, pool_metadata_hash)
769773

770774
@allure.link(helpers.get_vcs_link())
771775
@common.PARAM_USE_BUILD_CMD
@@ -820,6 +824,7 @@ def test_create_stake_pool(
820824
@common.PARAM_USE_BUILD_CMD
821825
@pytest.mark.testnets
822826
@pytest.mark.dbsync
827+
@pytest.mark.smash
823828
def test_deregister_stake_pool(
824829
self,
825830
cluster_manager: cluster_management.ClusterManager,
@@ -944,6 +949,7 @@ def test_deregister_stake_pool(
944949
dbsync_utils.check_pool_deregistration(
945950
pool_id=pool_creation_out.stake_pool_id, retiring_epoch=depoch
946951
)
952+
smash_utils.check_smash_pool_retired(pool_id=pool_creation_out.stake_pool_id)
947953

948954
# Check `transaction view` command
949955
tx_view.check_tx_view(cluster_obj=cluster, tx_raw_output=tx_raw_output)
@@ -2421,4 +2427,4 @@ def test_stake_pool_long_metadata_url(
24212427
)
24222428
assert "option --metadata-url: The provided string must have at most 64 characters" in str(
24232429
excinfo.value
2424-
)
2430+
)
Lines changed: 204 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,204 @@
1+
"""Tests for basic SMASH operations."""
2+
3+
import logging
4+
import re
5+
from http import HTTPStatus
6+
7+
import pytest
8+
import requests
9+
from cardano_clusterlib import clusterlib
10+
11+
from cardano_node_tests.utils import configuration
12+
from cardano_node_tests.utils import dbsync_queries
13+
from cardano_node_tests.utils import dbsync_utils
14+
from cardano_node_tests.utils import helpers
15+
from cardano_node_tests.utils import logfiles
16+
from cardano_node_tests.utils import smash_utils
17+
18+
LOGGER = logging.getLogger(__name__)
19+
20+
21+
@pytest.fixture(autouse=True)
22+
def check_smash_availability() -> None:
23+
"""Fixture to check SMASH availability before each test."""
24+
if not configuration.HAS_SMASH:
25+
pytest.skip("Skipping test because SMASH service is not available.")
26+
27+
28+
def check_request_error(
29+
err: requests.exceptions.RequestException,
30+
expected_status: HTTPStatus,
31+
expected_code: str | None,
32+
expected_description: str,
33+
) -> None:
34+
"""Assert expected HTTP errors in requests, handling both JSON and text responses."""
35+
response = err.response
36+
assert response.status_code == expected_status
37+
38+
try:
39+
error_data = response.json()
40+
actual_code = error_data.get("code")
41+
actual_description = error_data.get("description")
42+
except ValueError:
43+
# If not JSON, treat the entire response as text
44+
actual_code = None
45+
actual_description = response.text.strip()
46+
47+
if expected_code:
48+
assert actual_code == expected_code
49+
50+
assert actual_description == expected_description
51+
52+
53+
class TestBasicSmash:
54+
"""Basic tests for SMASH service."""
55+
56+
@pytest.fixture()
57+
def locked_pool(
58+
self,
59+
cluster_lock_pool: tuple[clusterlib.ClusterLib, str],
60+
) -> dbsync_queries.PoolDataDBRow:
61+
"""Get id of locked pool from cluster_lock_pool fixture."""
62+
cluster_obj, pool_name = cluster_lock_pool
63+
pools_ids = cluster_obj.g_query.get_stake_pools()
64+
locked_pool_number = pool_name.replace("node-pool", "")
65+
pattern = re.compile(r"pool" + re.escape(locked_pool_number) + r"(\D|$)")
66+
pools = [next(dbsync_queries.query_pool_data(p)) for p in pools_ids]
67+
locked_pool_data = next((p for p in pools if pattern.search(p.metadata_url)), None)
68+
return locked_pool_data
69+
70+
@pytest.fixture(scope="session")
71+
def smash(
72+
self,
73+
) -> None | smash_utils.SmashClient:
74+
"""Create SMASH client."""
75+
smash = smash_utils.get_client()
76+
return smash
77+
78+
def test_fetch_pool_metadata(
79+
self, locked_pool: dbsync_queries.PoolDataDBRow, smash: smash_utils.SmashClient
80+
):
81+
pool_id = locked_pool.view
82+
83+
# Offchain metadata is inserted into database few minutes after start of a cluster
84+
def _query_func():
85+
pool_metadata = next(iter(dbsync_queries.query_off_chain_pool_data(pool_id)), None)
86+
assert pool_metadata is not None, dbsync_utils.NO_RESPONSE_STR
87+
return pool_metadata
88+
89+
metadata_dbsync = dbsync_utils.retry_query(query_func=_query_func, timeout=360)
90+
91+
expected_metadata = smash_utils.PoolMetadata(
92+
name=metadata_dbsync.json["name"],
93+
description=metadata_dbsync.json["description"],
94+
ticker=metadata_dbsync.ticker_name,
95+
homepage=metadata_dbsync.json["homepage"],
96+
)
97+
actual_metadata = smash.get_pool_metadata(pool_id, metadata_dbsync.hash.hex())
98+
assert expected_metadata == actual_metadata
99+
100+
def test_delist_pool(
101+
self,
102+
locked_pool: dbsync_queries.PoolDataDBRow,
103+
smash: smash_utils.SmashClient,
104+
request: pytest.FixtureRequest,
105+
worker_id: str,
106+
):
107+
pool_id = locked_pool.view
108+
109+
# Define and register function that ensures pool is re-enlisted after test completion
110+
def pool_cleanup():
111+
smash.enlist_pool(pool_id)
112+
113+
request.addfinalizer(pool_cleanup)
114+
115+
# Delist the pool
116+
pool_data = dbsync_utils.get_pool_data(pool_id)
117+
expected_delisted_pool = smash_utils.PoolData(pool_id=pool_data.hash)
118+
actual_delisted_pool = smash.delist_pool(pool_id)
119+
assert expected_delisted_pool == actual_delisted_pool
120+
121+
# Check if fetching metadata for a delisted pool returns an error
122+
try:
123+
smash.get_pool_metadata(pool_id, pool_data.metadata_hash)
124+
except requests.exceptions.RequestException as err:
125+
check_request_error(
126+
err, HTTPStatus.FORBIDDEN, None, f"Pool {pool_data.hash} is delisted"
127+
)
128+
129+
# Ignore expected errors in logs that would fail test in teardown phase
130+
err_msg = "Delisted pool already exists!"
131+
expected_err_regexes = [err_msg]
132+
logfiles.add_ignore_rule(
133+
files_glob="smash.stdout",
134+
regex="|".join(expected_err_regexes),
135+
ignore_file_id=worker_id,
136+
)
137+
# Ensure re-delisting an already delisted pool returns an error
138+
try:
139+
smash.delist_pool(pool_id)
140+
except requests.exceptions.RequestException as err:
141+
check_request_error(err, HTTPStatus.BAD_REQUEST, "DbInsertError", err_msg)
142+
143+
def test_enlist_pool(
144+
self,
145+
locked_pool: dbsync_queries.PoolDataDBRow,
146+
smash: smash_utils.SmashClient,
147+
):
148+
pool_id = locked_pool.view
149+
pool_data = dbsync_utils.get_pool_data(pool_id)
150+
if pool_data is None:
151+
raise ValueError(f"Pool data not found for pool_id: {pool_id}")
152+
153+
# Ensure enlisting an already enlisted pool returns an error
154+
try:
155+
smash.enlist_pool(pool_id)
156+
except requests.exceptions.RequestException as err:
157+
check_request_error(
158+
err,
159+
HTTPStatus.NOT_FOUND,
160+
"RecordDoesNotExist",
161+
"The requested record does not exist.",
162+
)
163+
164+
# Delist the pool
165+
smash.delist_pool(pool_id)
166+
try:
167+
smash.get_pool_metadata(pool_id, pool_data.metadata_hash)
168+
except requests.exceptions.RequestException as err:
169+
check_request_error(
170+
err, HTTPStatus.FORBIDDEN, None, f"Pool {pool_data.hash} is delisted"
171+
)
172+
173+
# Enlist the pool
174+
actual_res_enlist = smash.enlist_pool(pool_id)
175+
expected_res_enlist = smash_utils.PoolData(pool_id=pool_data.hash)
176+
assert expected_res_enlist == actual_res_enlist
177+
178+
def test_reserve_ticker(
179+
self,
180+
locked_pool: dbsync_queries.PoolDataDBRow,
181+
smash: smash_utils.SmashClient,
182+
request: pytest.FixtureRequest,
183+
):
184+
pool_id = locked_pool.view
185+
186+
# Register cleanup function that removes ticker from database after test completion
187+
request.addfinalizer(dbsync_queries.delete_reserved_pool_tickers)
188+
189+
# Reserve ticker
190+
ticker = helpers.get_rand_str(length=3)
191+
actual_response = smash.reserve_ticker(ticker_name=ticker, pool_hash=pool_id)
192+
expected_response = smash_utils.PoolTicker(name=f"{ticker}")
193+
assert expected_response == actual_response
194+
195+
# Reserve already taken ticker
196+
try:
197+
smash.reserve_ticker(ticker_name=ticker, pool_hash=pool_id)
198+
except requests.exceptions.RequestException as err:
199+
check_request_error(
200+
err,
201+
HTTPStatus.BAD_REQUEST,
202+
"TickerAlreadyReserved",
203+
f'Ticker name "{ticker}" is already reserved',
204+
)

cardano_node_tests/utils/cluster_scripts.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,7 @@ class InstancePorts:
5151
webserver: int
5252
metrics_submit_api: int
5353
submit_api: int
54+
smash: int
5455
supervisor: int
5556
relay1: int
5657
ekg_relay1: int
@@ -189,6 +190,7 @@ def _get_node_ports(num: int) -> NodePorts:
189190
webserver=last_port,
190191
metrics_submit_api=last_port - 1,
191192
submit_api=last_port - 2,
193+
smash=last_port - 3,
192194
supervisor=12001 + instance_num,
193195
# Relay1
194196
relay1=0,
@@ -266,6 +268,8 @@ def _replace_instance_files(
266268
new_content = new_content.replace(
267269
"%%METRICS_SUBMIT_API_PORT%%", str(instance_ports.metrics_submit_api)
268270
)
271+
# Reconfigure smash port
272+
new_content = new_content.replace("%%SMASH_PORT%%", str(instance_ports.smash))
269273
# Reconfigure webserver port
270274
new_content = new_content.replace("%%WEBSERVER_PORT%%", str(instance_ports.webserver))
271275
return new_content
@@ -542,6 +546,7 @@ def get_instance_ports(self, instance_num: int) -> InstancePorts:
542546
webserver=last_port,
543547
metrics_submit_api=last_port - 1,
544548
submit_api=last_port - 2,
549+
smash=last_port - 3,
545550
supervisor=12001 + instance_num,
546551
relay1=relay1_ports.node,
547552
ekg_relay1=relay1_ports.ekg,
@@ -615,6 +620,8 @@ def _reconfigure_testnet(
615620
new_content = new_content.replace(
616621
"%%METRICS_SUBMIT_API_PORT%%", str(instance_ports.metrics_submit_api)
617622
)
623+
# Reconfigure smash port
624+
new_content = new_content.replace("%%SMASH_PORT%%", str(instance_ports.smash))
618625
# Reconfigure EKG metrics port
619626
new_content = new_content.replace("%%EKG_PORT_RELAY1%%", str(instance_ports.ekg_relay1))
620627
# Reconfigure prometheus metrics port

0 commit comments

Comments
 (0)