-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathbinarby.py
More file actions
137 lines (118 loc) · 4.91 KB
/
binarby.py
File metadata and controls
137 lines (118 loc) · 4.91 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
from math import log
import pandas as pd
import requests
from multiprocessing import Pool, cpu_count
from concurrent.futures import ThreadPoolExecutor, as_completed
import time
import random
API_KEY = ''
CURRENCIES = [
'USD', 'EUR', 'JPY', 'GBP', 'CNY', 'AUD', 'CAD', 'CHF', 'HKD', 'SGD',
'INR', 'RUB', 'BRL', 'ZAR', 'NZD', 'SEK', 'NOK', 'DKK', 'PLN', 'THB',
'IDR', 'MYR', 'PHP', 'KRW', 'ILS', 'CZK', 'HUF', 'MXN', 'TRY', 'SAR'
]
CHUNKS = 8
CHUNK_SIZE = 16
MIN_CYCLE_LENGTH = 3
MAX_CYCLE_LENGTH = 5
def fetch_rates_for_currency(api_key, base, currencies):
url = f"https://v6.exchangerate-api.com/v6/{api_key}/latest/{base}"
response = requests.get(url, timeout=5)
if response.status_code != 200:
raise Exception(f"API request failed for {base} with status code {response.status_code}")
rates = response.json()['conversion_rates']
return base, {target: rates.get(target, float('nan')) for target in currencies}
def get_currency_rates(api_key, currencies):
data = pd.DataFrame(index=currencies, columns=currencies)
with ThreadPoolExecutor(max_workers=10) as executor:
futures = [executor.submit(fetch_rates_for_currency, api_key, base, currencies) for base in currencies]
for future in as_completed(futures):
base, rates_dict = future.result()
for target in currencies:
data.at[base, target] = rates_dict[target] if base != target else 1.0
return data.astype(float)
def build_neg_log_rates(rates):
return -rates.applymap(log)
def bellman_ford(currencies, rates_matrix, log_margin=0.001, start_time=None):
n = len(currencies)
min_dist = [float('inf')] * n
pre = [-1] * n
min_dist[0] = 0
for _ in range(n - 1):
for u in range(n):
for v in range(n):
if min_dist[v] > min_dist[u] + rates_matrix.iat[u, v]:
min_dist[v] = min_dist[u] + rates_matrix.iat[u, v]
pre[v] = u
cycles = []
for u in range(n):
for v in range(n):
if min_dist[v] > min_dist[u] + rates_matrix.iat[u, v] + log_margin:
cycle = [v]
cur = u
while cur not in cycle:
cycle.append(cur)
cur = pre[cur]
cycle.append(v)
cycle.reverse()
named_cycle = [currencies[i] for i in cycle]
if MIN_CYCLE_LENGTH <= len(named_cycle) <= MAX_CYCLE_LENGTH and named_cycle not in [c[0] for c in cycles]:
detection_time = time.time() - start_time if start_time else None
cycles.append((named_cycle, detection_time))
return cycles
def evaluate_cycle_gain(path, rates):
amt = 100
start = path[0]
for i in range(1, len(path)):
amt *= rates.at[start, path[i]]
start = path[i]
return amt - 100
def process_chunk(args):
chunk, chunk_rates, chunk_neg_log, start_time = args
try:
cycles = bellman_ford(chunk, pd.DataFrame(chunk_neg_log, index=chunk, columns=chunk), start_time=start_time)
profitable = []
df_rates = pd.DataFrame(chunk_rates, index=chunk, columns=chunk)
for cycle, detection_time in cycles:
gain = evaluate_cycle_gain(cycle, df_rates)
if gain > 0.23:
print(f"Gain: {gain:.2f} | Path: {' → '.join(cycle)} | ⏱️ Detected after {detection_time:.2f} sec")
profitable.append((gain, cycle, detection_time))
return profitable
except Exception as e:
print(f"Error in chunk {chunk}: {e}")
return []
if __name__ == "__main__":
start_time = time.time()
try:
fetch_start = time.time()
rates = get_currency_rates(API_KEY, CURRENCIES)
neg_log_rates = build_neg_log_rates(rates)
print(f" Rates fetched and processed in {time.time() - fetch_start:.2f} sec")
except Exception as e:
print(f" Failed to fetch rates: {e}")
exit(1)
chunk_prep_start = time.time()
chunks = [random.sample(CURRENCIES, CHUNK_SIZE) for _ in range(CHUNKS)]
chunk_args = [
(
chunk,
rates.loc[chunk, chunk].values,
neg_log_rates.loc[chunk, chunk].values,
start_time
)
for chunk in chunks
]
print(f"⏱️ Chunks prepared in {time.time() - chunk_prep_start:.2f} sec")
parallel_start = time.time()
with Pool(processes=CHUNKS) as pool:
results = pool.map(process_chunk, chunk_args)
print(f"⏱️ Parallel processing in {time.time() - parallel_start:.2f} sec")
print_start = time.time()
all_opportunities = [item for sublist in results for item in sublist]
if all_opportunities:
print("\n💰 Profitable Arbitrage Cycles:")
else:
print("No arbitrage opportunities found.")
print(f" Output printed in {time.time() - print_start:.2f} sec")
print(f"\n Total time: {time.time() - start_time:.2f} seconds")