Skip to content
This repository was archived by the owner on Nov 17, 2025. It is now read-only.

Commit cdb5823

Browse files
authored
Merge pull request #290 from bancorprotocol/288-add-read-only-setting
288 add read only setting
2 parents edfd9a5 + 1fa6981 commit cdb5823

File tree

6 files changed

+130
-105
lines changed

6 files changed

+130
-105
lines changed

fastlane_bot/events/async_event_update_utils.py

Lines changed: 47 additions & 67 deletions
Original file line numberDiff line numberDiff line change
@@ -148,7 +148,7 @@ def get_pool_info(
148148
tkn1: Dict[str, Any],
149149
pool_data_keys: frozenset,
150150
) -> Dict[str, Any]:
151-
fee_raw = eval(pool["fee"])
151+
fee_raw = pool["fee"]
152152
pool_info = {
153153
"exchange_name": pool["exchange_name"],
154154
"address": pool["address"],
@@ -184,32 +184,35 @@ def get_pool_info(
184184

185185
return pool_info
186186

187-
def sanitize_token_symbol(token_symbol: str, token_address: str) -> str:
187+
def sanitize_token_symbol(token_symbol: str, token_address: str, read_only: bool) -> str:
188188
"""
189189
This function ensures token symbols are compatible with the bot's data structures.
190190
If a symbol is not compatible with Dataframes or CSVs, this function will return the token's address.
191191
192192
:param token_symbol: the token's symbol
193193
:param token_address: the token's address
194+
:param read_only: bool indicating whether the bot is running in read_only mode
194195
195196
returns: str
196197
"""
197198
sanitization_path = os.path.normpath("fastlane_bot/data/data_sanitization_center/sanitary_data.csv")
198199
try:
199-
token_pd = pd.DataFrame([{"symbol": token_symbol}], columns=["symbol"])
200-
token_pd.to_csv(sanitization_path)
200+
if not read_only:
201+
token_pd = pd.DataFrame([{"symbol": token_symbol}], columns=["symbol"])
202+
token_pd.to_csv(sanitization_path)
201203
return token_symbol
202204
except Exception:
203205
return token_address
204206

205207

206208
def add_token_info(
207-
pool_info: Dict[str, Any], tkn0: Dict[str, Any], tkn1: Dict[str, Any]
209+
pool_info: Dict[str, Any], tkn0: Dict[str, Any], tkn1: Dict[str, Any], read_only: bool
208210
) -> Dict[str, Any]:
211+
print(f"called add_token_info")
209212
tkn0_symbol = tkn0["symbol"].replace("/", "_").replace("-", "_")
210213
tkn1_symbol = tkn1["symbol"].replace("/", "_").replace("-", "_")
211-
tkn0_symbol = sanitize_token_symbol(token_symbol=tkn0_symbol, token_address=tkn0["address"])
212-
tkn1_symbol = sanitize_token_symbol(token_symbol=tkn1_symbol, token_address=tkn1["address"])
214+
tkn0_symbol = sanitize_token_symbol(token_symbol=tkn0_symbol, token_address=tkn0["address"], read_only=read_only)
215+
tkn1_symbol = sanitize_token_symbol(token_symbol=tkn1_symbol, token_address=tkn1["address"], read_only=read_only)
213216
tkn0["symbol"] = tkn0_symbol
214217
tkn1["symbol"] = tkn1_symbol
215218

@@ -310,30 +313,45 @@ def process_contract_chunks(
310313
subset: List[str],
311314
func: Callable,
312315
df_combined: pd.DataFrame = None,
316+
read_only: bool = False,
313317
) -> pd.DataFrame:
318+
lst = []
314319
# write chunks to csv
315320
for idx, chunk in enumerate(chunks):
316321
loop = asyncio.get_event_loop()
317322
df = loop.run_until_complete(func(chunk))
318-
df.to_csv(f"{dirname}/{base_filename}{idx}.csv", index=False)
323+
if not read_only:
324+
df.to_csv(f"{dirname}/{base_filename}{idx}.csv", index=False)
325+
else:
326+
lst.append(df)
319327

320-
# concatenate and deduplicate
321328
filepaths = glob(f"{dirname}/*.csv")
322-
if filepaths:
323-
df_orig = df_combined.copy() if df_combined is not None else None
324-
df_combined = pd.concat([pd.read_csv(filepath) for filepath in filepaths])
325-
df_combined = (
326-
pd.concat([df_orig, df_combined]) if df_orig is not None else df_combined
327-
)
328-
df_combined = df_combined.drop_duplicates(subset=subset)
329-
df_combined.to_csv(filename, index=False)
330329

331-
# clear temp dir
332-
for filepath in filepaths:
333-
try:
334-
os.remove(filepath)
335-
except Exception as e:
336-
cfg.logger.error(f"Failed to remove {filepath} {e}??? This is spooky...")
330+
if not read_only:
331+
# concatenate and deduplicate
332+
333+
if filepaths:
334+
df_orig = df_combined.copy() if df_combined is not None else None
335+
df_combined = pd.concat([pd.read_csv(filepath) for filepath in filepaths])
336+
df_combined = (
337+
pd.concat([df_orig, df_combined]) if df_orig is not None else df_combined
338+
)
339+
df_combined = df_combined.drop_duplicates(subset=subset)
340+
df_combined.to_csv(filename, index=False)
341+
# clear temp dir
342+
for filepath in filepaths:
343+
try:
344+
os.remove(filepath)
345+
except Exception as e:
346+
cfg.logger.error(f"Failed to remove {filepath} {e}??? This is spooky...")
347+
else:
348+
if lst:
349+
dfs = pd.concat(lst)
350+
dfs = dfs.drop_duplicates(subset=subset)
351+
if df_combined is not None:
352+
df_combined = pd.concat([df_combined, dfs])
353+
else:
354+
df_combined = dfs
337355

338356
return df_combined
339357

@@ -392,6 +410,7 @@ def async_update_pools_from_contracts(mgr: Any, current_block: int, logging_path
392410
filename="tokens_and_fee_df.csv",
393411
subset=["exchange_name", "address", "cid", "tkn0_address", "tkn1_address"],
394412
func=main_get_tokens_and_fee,
413+
read_only=mgr.read_only,
395414
)
396415

397416
contracts, tokens_df = get_token_contracts(mgr, tokens_and_fee_df)
@@ -405,16 +424,18 @@ def async_update_pools_from_contracts(mgr: Any, current_block: int, logging_path
405424
df_combined=pd.read_csv(
406425
f"fastlane_bot/data/blockchain_data/{mgr.blockchain}/tokens.csv"
407426
),
427+
read_only=mgr.read_only,
408428
)
409429
tokens_df["symbol"] = (
410430
tokens_df["symbol"]
411431
.str.replace(" ", "_")
412432
.str.replace("/", "_")
413433
.str.replace("-", "_")
414434
)
415-
tokens_df.to_csv(
416-
f"fastlane_bot/data/blockchain_data/{mgr.blockchain}/tokens.csv", index=False
417-
)
435+
if not mgr.read_only:
436+
tokens_df.to_csv(
437+
f"fastlane_bot/data/blockchain_data/{mgr.blockchain}/tokens.csv", index=False
438+
)
418439
tokens_df["address"] = tokens_df["address"].apply(
419440
lambda x: Web3.to_checksum_address(x)
420441
)
@@ -440,44 +461,6 @@ def async_update_pools_from_contracts(mgr: Any, current_block: int, logging_path
440461
]
441462
)
442463

443-
# def correct_tkn(tkn_address, keyname):
444-
# try:
445-
# return tokens_df[tokens_df["address"] == tkn_address][keyname].values[0]
446-
# except IndexError:
447-
# return np.nan
448-
#
449-
# static_pool_data = new_pool_data_df.copy()
450-
# static_pool_data["tkn0_address"] = static_pool_data["tkn0_address"].apply(
451-
# lambda x: Web3.to_checksum_address(x)
452-
# )
453-
# static_pool_data["tkn1_address"] = static_pool_data["tkn1_address"].apply(
454-
# lambda x: Web3.to_checksum_address(x)
455-
# )
456-
# static_pool_data["tkn0_decimals"] = static_pool_data["tkn0_address"].apply(
457-
# lambda x: correct_tkn(x, "decimals")
458-
# )
459-
# static_pool_data["tkn1_decimals"] = static_pool_data["tkn1_address"].apply(
460-
# lambda x: correct_tkn(x, "decimals")
461-
# )
462-
# static_pool_data["tkn0_key"] = static_pool_data["tkn0_address"].apply(
463-
# lambda x: correct_tkn(x, "key")
464-
# )
465-
# static_pool_data["tkn1_key"] = static_pool_data["tkn1_address"].apply(
466-
# lambda x: correct_tkn(x, "key")
467-
# )
468-
# static_pool_data["tkn0_symbol"] = static_pool_data["tkn0_address"].apply(
469-
# lambda x: correct_tkn(x, "symbol")
470-
# )
471-
# static_pool_data["tkn1_symbol"] = static_pool_data["tkn1_address"].apply(
472-
# lambda x: correct_tkn(x, "symbol")
473-
# )
474-
# static_pool_data["pair_name"] = (
475-
# static_pool_data["tkn0_key"] + "/" + static_pool_data["tkn1_key"]
476-
# )
477-
#
478-
# new_pool_data_df = static_pool_data.copy()
479-
# del static_pool_data
480-
481464
new_pool_data_df["descr"] = (
482465
new_pool_data_df["exchange_name"]
483466
+ " "
@@ -502,9 +485,6 @@ def async_update_pools_from_contracts(mgr: Any, current_block: int, logging_path
502485
)
503486

504487
duplicate_new_pool_ct = len(duplicate_cid_rows)
505-
# assert len(mgr.pools_to_add_from_contracts) == (
506-
# len(new_pool_data_df) + duplicate_new_pool_ct
507-
# )
508488

509489
all_pools_df = (
510490
pd.DataFrame(mgr.pool_data)

fastlane_bot/events/exceptions.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
class ReadOnlyException(Exception):
2+
def __init__(self, filepath):
3+
self.filepath = filepath
4+
5+
def __str__(self):
6+
return (f"tokens.csv does not exist at {self.filepath}. Please run the bot without the `read_only` flag to "
7+
f"create this file.")

fastlane_bot/events/managers/base.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -53,6 +53,8 @@ class BaseManager:
5353
The tokens mapping.
5454
SUPPORTED_EXCHANGES : Dict[str, Any]
5555
The supported exchanges.
56+
read_only : bool
57+
Whether the bot is running in read only mode.
5658
"""
5759

5860
web3: Web3
@@ -99,6 +101,7 @@ class BaseManager:
99101
static_pools: Dict[str, List[str]] = field(default_factory=dict)
100102

101103
prefix_path: str = ""
104+
read_only: bool = False
102105

103106
def __post_init__(self):
104107
initialized_exchanges = []

fastlane_bot/events/managers/contracts.py

Lines changed: 11 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -328,7 +328,7 @@ def get_token_info_from_contract(
328328
contract=contract,
329329
addr=addr,
330330
token_data=token_data,
331-
tokens_filepath=tokens_filepath,
331+
tokens_filepath=tokens_filepath
332332
)
333333
except self.FailedToGetTokenDetailsException as e:
334334
self.cfg.logger.debug(
@@ -420,14 +420,15 @@ def _get_and_save_token_info_from_contract(
420420
except FileExistsError:
421421
pass
422422

423-
collision_safety = str(random.randrange(1, 1000))
424-
ts = datetime.now().strftime("%d-%H-%M-%S-%f")
425-
ts += collision_safety
426-
row.to_csv(
427-
os.path.normpath(
428-
f"{self.prefix_path}fastlane_bot/data/blockchain_data/{self.cfg.NETWORK}/token_detail/{ts}.csv"
429-
),
430-
index=False,
431-
)
423+
if not self.read_only:
424+
collision_safety = str(random.randrange(1, 1000))
425+
ts = datetime.now().strftime("%d-%H-%M-%S-%f")
426+
ts += collision_safety
427+
row.to_csv(
428+
os.path.normpath(
429+
f"{self.prefix_path}fastlane_bot/data/blockchain_data/{self.cfg.NETWORK}/token_detail/{ts}.csv"
430+
),
431+
index=False,
432+
)
432433

433434
return (symbol, decimals)

fastlane_bot/events/utils.py

Lines changed: 33 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@
2626
from fastlane_bot import Config
2727
from fastlane_bot.bot import CarbonBot
2828
from fastlane_bot.config.multiprovider import MultiProviderContractWrapper
29+
from fastlane_bot.events.exceptions import ReadOnlyException
2930
from fastlane_bot.events.interface import QueryInterface
3031
from fastlane_bot.events.managers.manager import Manager
3132

@@ -242,6 +243,7 @@ def get_static_data(
242243
exchanges: List[str],
243244
blockchain: str,
244245
static_pool_data_filename: str,
246+
read_only: bool = False,
245247
) -> Tuple[pd.DataFrame, pd.DataFrame, Dict[str, str], Dict[str, str]]:
246248
"""
247249
Helper function to get static pool data, tokens, and Uniswap v2 event mappings.
@@ -256,6 +258,8 @@ def get_static_data(
256258
The name of the blockchain being used
257259
static_pool_data_filename : str
258260
The filename of the static pool data CSV file.
261+
read_only : bool, optional
262+
Whether to run the bot in read-only mode, by default False
259263
260264
Returns
261265
-------
@@ -288,9 +292,13 @@ def get_static_data(
288292
)
289293

290294
tokens_filepath = os.path.join(base_path, "tokens.csv")
291-
if not os.path.exists(tokens_filepath):
295+
if not os.path.exists(tokens_filepath) and not read_only:
292296
df = pd.DataFrame(columns=["address", "symbol", "decimals"])
293297
df.to_csv(tokens_filepath)
298+
elif not os.path.exists(tokens_filepath) and read_only:
299+
raise ReadOnlyException(
300+
f"Tokens file {tokens_filepath} does not exist. Please run the bot in non-read-only mode to create it."
301+
)
294302
tokens = read_csv_file(tokens_filepath)
295303
tokens["address"] = tokens["address"].apply(lambda x: Web3.to_checksum_address(x))
296304
tokens = tokens.drop_duplicates(subset=["address"])
@@ -1921,18 +1929,22 @@ def handle_static_pools_update(mgr: Any):
19211929
mgr.static_pools[attr_name] = exchange_pools
19221930

19231931

1924-
def handle_tokens_csv(mgr, prefix_path):
1932+
def handle_tokens_csv(mgr, prefix_path, read_only: bool = False):
19251933
tokens_filepath = os.path.normpath(
19261934
f"{prefix_path}fastlane_bot/data/blockchain_data/{mgr.cfg.NETWORK}/tokens.csv"
19271935
)
1936+
19281937
try:
19291938
token_data = pd.read_csv(tokens_filepath)
19301939
except Exception as e:
1931-
mgr.cfg.logger.info(
1932-
f"[events.utils.handle_tokens_csv] Error reading token data: {e}... creating new file"
1933-
)
1934-
token_data = pd.DataFrame(mgr.tokens)
1935-
token_data.to_csv(tokens_filepath, index=False)
1940+
if not read_only:
1941+
mgr.cfg.logger.info(
1942+
f"[events.utils.handle_tokens_csv] Error reading token data: {e}... creating new file"
1943+
)
1944+
token_data = pd.DataFrame(mgr.tokens)
1945+
token_data.to_csv(tokens_filepath, index=False)
1946+
else:
1947+
raise ReadOnlyException(tokens_filepath) from e
19361948

19371949
extra_info = glob(
19381950
os.path.normpath(
@@ -1945,19 +1957,22 @@ def handle_tokens_csv(mgr, prefix_path):
19451957
)
19461958
token_data = pd.concat([token_data, extra_info_df], ignore_index=True)
19471959
token_data = token_data.drop_duplicates(subset=["address"])
1948-
token_data.to_csv(tokens_filepath, index=False)
1949-
mgr.tokens = token_data.to_dict(orient="records")
19501960

1951-
# delete all files in token_detail
1952-
for f in extra_info:
1953-
try:
1954-
os.remove(f)
1955-
except FileNotFoundError:
1956-
pass
1961+
if not read_only:
1962+
token_data.to_csv(tokens_filepath, index=False)
19571963

1958-
mgr.cfg.logger.info(
1959-
f"[events.utils.handle_tokens_csv] Updated token data with {len(extra_info)} new tokens"
1960-
)
1964+
# delete all files in token_detail
1965+
for f in extra_info:
1966+
try:
1967+
os.remove(f)
1968+
except FileNotFoundError:
1969+
pass
1970+
1971+
mgr.tokens = token_data.to_dict(orient="records")
1972+
1973+
mgr.cfg.logger.info(
1974+
f"[events.utils.handle_tokens_csv] Updated token data with {len(extra_info)} new tokens"
1975+
)
19611976

19621977

19631978
def self_funding_warning_sequence(cfg):

0 commit comments

Comments
 (0)