|
| 1 | +"""Performance test for light matching engine. |
| 2 | +
|
| 3 | +Usage: |
| 4 | + perf-test-light-matching-engine --freq <freq> [options] |
| 5 | +
|
| 6 | +Options: |
| 7 | + -h --help Show help. |
| 8 | + --freq=<freq> Order frequency per second. [Default: 10] |
| 9 | + --num-orders=<num_orders> Number of orders. [Default: 100] |
| 10 | + --add-order-prob=<prob> Add order probability. [Default: 0.6] |
| 11 | + --mean-price=<mean-price> Mean price in the standard normal distribution. |
| 12 | + [Default: 100] |
| 13 | + --std-price=<std-price> Standard derivation of the price in the standing |
| 14 | + derivation. [Default: 0.5] |
| 15 | + --tick-size=<tick-size> Tick size. [Default: 0.1] |
| 16 | + --gamma-quantity=<gamma> Gamma value in the gamma distribution for the |
| 17 | + order quantity. [Default: 2] |
| 18 | +""" |
| 19 | +from docopt import docopt |
| 20 | +import logging |
| 21 | +from math import log |
| 22 | +from random import uniform, seed |
| 23 | +from time import sleep, time |
| 24 | + |
| 25 | +from tabulate import tabulate |
| 26 | +from tqdm import tqdm |
| 27 | + |
| 28 | +import numpy as np |
| 29 | +import pandas as pd |
| 30 | + |
| 31 | +from lightmatchingengine.lightmatchingengine import ( |
| 32 | + LightMatchingEngine, Side) |
| 33 | + |
| 34 | +LOGGER = logging.getLogger(__name__) |
| 35 | + |
| 36 | + |
| 37 | +class Timer: |
| 38 | + def __enter__(self): |
| 39 | + self.start = time() |
| 40 | + return self |
| 41 | + |
| 42 | + def __exit__(self, *args): |
| 43 | + self.end = time() |
| 44 | + self.interval = self.end - self.start |
| 45 | + |
| 46 | + |
| 47 | +def run(args): |
| 48 | + engine = LightMatchingEngine() |
| 49 | + |
| 50 | + symbol = "EUR/USD" |
| 51 | + add_order_prob = float(args['--add-order-prob']) |
| 52 | + num_of_orders = int(args['--num-orders']) |
| 53 | + gamma_quantity = float(args['--gamma-quantity']) |
| 54 | + mean_price = float(args['--mean-price']) |
| 55 | + std_price = float(args['--std-price']) |
| 56 | + tick_size = float(args['--tick-size']) |
| 57 | + freq = float(args['--freq']) |
| 58 | + orders = {} |
| 59 | + add_statistics = [] |
| 60 | + cancel_statistics = [] |
| 61 | + |
| 62 | + # Initialize random seed |
| 63 | + seed(42) |
| 64 | + |
| 65 | + progress_bar = tqdm(num_of_orders) |
| 66 | + while num_of_orders > 0: |
| 67 | + if uniform(0, 1) <= add_order_prob or len(orders) == 0: |
| 68 | + price = np.random.standard_normal() * std_price + mean_price |
| 69 | + price = int(price / tick_size) * tick_size |
| 70 | + quantity = np.random.gamma(gamma_quantity) + 1 |
| 71 | + side = Side.BUY if uniform(0, 1) <= 0.5 else Side.SELL |
| 72 | + |
| 73 | + # Add the order |
| 74 | + with Timer() as timer: |
| 75 | + order, trades = engine.add_order(symbol, price, quantity, side) |
| 76 | + |
| 77 | + LOGGER.debug('Order %s is added at side %s, price %s ' |
| 78 | + 'and quantity %s', |
| 79 | + order.order_id, order.side, order.price, order.qty) |
| 80 | + |
| 81 | + # Save the order if there is any quantity left |
| 82 | + if order.leaves_qty > 0: |
| 83 | + orders[order.order_id] = order |
| 84 | + |
| 85 | + # Remove the trades |
| 86 | + for trade in trades: |
| 87 | + if (trade.order_id != order.order_id and |
| 88 | + orders[trade.order_id].leaves_qty == 0.0): |
| 89 | + del orders[trade.order_id] |
| 90 | + |
| 91 | + # Save the statistics |
| 92 | + add_statistics.append((order, len(trades), timer)) |
| 93 | + |
| 94 | + num_of_orders -= 1 |
| 95 | + progress_bar.update(1) |
| 96 | + else: |
| 97 | + index = int(uniform(0, 1) * len(orders)) |
| 98 | + if index == len(orders): |
| 99 | + index -= 1 |
| 100 | + |
| 101 | + order_id = list(orders.keys())[index] |
| 102 | + |
| 103 | + with Timer() as timer: |
| 104 | + engine.cancel_order(order_id, order.instmt) |
| 105 | + |
| 106 | + LOGGER.debug('Order %s is deleted', order_id) |
| 107 | + del orders[order_id] |
| 108 | + |
| 109 | + # Save the statistics |
| 110 | + cancel_statistics.append((order, timer)) |
| 111 | + |
| 112 | + # Next time = -ln(U) / lambda |
| 113 | + sleep(-log(uniform(0, 1)) / freq) |
| 114 | + |
| 115 | + return add_statistics, cancel_statistics |
| 116 | + |
| 117 | + |
| 118 | +def describe_statistics(add_statistics, cancel_statistics): |
| 119 | + add_statistics = pd.DataFrame([ |
| 120 | + (trade_num, timer.interval * 1e6) |
| 121 | + for _, trade_num, timer in add_statistics], |
| 122 | + columns=['trade_num', 'interval']) |
| 123 | + |
| 124 | + # Trade statistics |
| 125 | + trade_statistics = add_statistics['trade_num'].describe() |
| 126 | + LOGGER.info('Trade statistics:\n%s', |
| 127 | + tabulate(trade_statistics.to_frame(name='trade'), |
| 128 | + tablefmt='pipe')) |
| 129 | + |
| 130 | + cancel_statistics = pd.Series([ |
| 131 | + timer.interval * 1e6 for _, timer in cancel_statistics], |
| 132 | + name='interval') |
| 133 | + |
| 134 | + statistics = pd.concat([ |
| 135 | + add_statistics['interval'].describe(), |
| 136 | + cancel_statistics.describe()], |
| 137 | + keys=['add', 'cancel'], |
| 138 | + axis=1) |
| 139 | + |
| 140 | + statistics['add (trade > 0)'] = ( |
| 141 | + add_statistics.loc[ |
| 142 | + add_statistics['trade_num'] > 0, 'interval'].describe()) |
| 143 | + |
| 144 | + percentile_75 = trade_statistics['75%'] |
| 145 | + statistics['add (trade > %s)' % percentile_75] = ( |
| 146 | + add_statistics.loc[add_statistics['trade_num'] > percentile_75, |
| 147 | + 'interval'].describe()) |
| 148 | + |
| 149 | + LOGGER.info('Matching engine latency (nanoseconds):\n%s', |
| 150 | + tabulate(statistics, |
| 151 | + headers=statistics.columns, |
| 152 | + tablefmt='pipe')) |
| 153 | + |
| 154 | +if __name__ == '__main__': |
| 155 | + args = docopt(__doc__, version='1.0.0') |
| 156 | + logging.basicConfig(level=logging.INFO) |
| 157 | + |
| 158 | + LOGGER.info('Running the performance benchmark') |
| 159 | + add_statistics, cancel_statistics = run(args) |
| 160 | + describe_statistics(add_statistics, cancel_statistics) |
0 commit comments