Skip to content

Commit 1a42897

Browse files
committed
Add BIP for CheckTemplateVerify
1 parent 6a80232 commit 1a42897

File tree

8 files changed

+684
-0
lines changed

8 files changed

+684
-0
lines changed

bip-ctv.mediawiki

Lines changed: 548 additions & 0 deletions
Large diffs are not rendered by default.

ctv/fifty.png

364 KB
Loading

ctv/five.png

317 KB
Loading

ctv/nic.svg

Lines changed: 1 addition & 0 deletions
Loading

ctv/pooledcoshv.png

250 KB
Loading

ctv/simulation.py

Lines changed: 133 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,133 @@
1+
#!/usr/bin/python3
2+
import numpy as np
3+
import matplotlib.pyplot as plt
4+
PHASES = 15
5+
PHASE_LENGTH = 144
6+
SAMPLES = PHASE_LENGTH * PHASES
7+
AVG_TX = 235
8+
COMPRESSED_NODE_SIZE = 4 + 1 + 1 + 4 + 32 + 4 + 4 + 8 + 8 + 34 + 34 + 33 + 32 + 34
9+
print(COMPRESSED_NODE_SIZE)
10+
MAX_BLOCK_SIZE = 1e6
11+
AVG_INTERVAL = 10*60
12+
TXNS_PER_SEC = 0.5*MAX_BLOCK_SIZE/AVG_TX/AVG_INTERVAL
13+
MAX_MEMPOOL = MAX_BLOCK_SIZE * 100
14+
COMPRESSABLE = 0.05
15+
16+
17+
18+
19+
20+
def get_rate(phase):
21+
if phase > PHASES/3:
22+
return 1.25**(2*PHASES/3 - phase) *TXNS_PER_SEC
23+
else:
24+
return 1.25**(phase)*TXNS_PER_SEC
25+
26+
def normal():
27+
print("Max Txns Per Sec %f"%TXNS_PER_SEC)
28+
backlog = 0
29+
results_unconfirmed = [0]*SAMPLES
30+
total_time = [0]*SAMPLES
31+
for phase in range(PHASES):
32+
for i in range(PHASE_LENGTH*phase, PHASE_LENGTH*(1+phase)):
33+
block_time = np.random.exponential(AVG_INTERVAL)
34+
total_time[i] = block_time
35+
# Equivalent to the sum of one poisson per block time
36+
# I.E., \sum_1_n Pois(a) = Pois(a*n)
37+
txns = np.random.poisson(get_rate(phase)* block_time)
38+
weight = txns*AVG_TX + backlog
39+
if weight > MAX_BLOCK_SIZE:
40+
backlog = weight - MAX_BLOCK_SIZE
41+
else:
42+
backlog = 0
43+
results_unconfirmed[i] = backlog/AVG_TX
44+
return results_unconfirmed, np.cumsum(total_time)/(60*60*24.0)
45+
def compressed(rate_multiplier = 1):
46+
print("Max Txns Per Sec %f"%TXNS_PER_SEC)
47+
backlog = 0
48+
secondary_backlog = 0
49+
results = [0]*SAMPLES
50+
results_lo_priority = [0]*SAMPLES
51+
results_confirmed = [0]*SAMPLES
52+
results_unconfirmed = [0]*SAMPLES
53+
results_yet_to_spend = [0]*SAMPLES
54+
total_time = [0]*(SAMPLES)
55+
for phase in range(PHASES):
56+
for i in range(PHASE_LENGTH*phase, PHASE_LENGTH*(1+phase)):
57+
block_time = np.random.poisson(AVG_INTERVAL)
58+
total_time[i] = block_time
59+
txns = np.random.poisson(rate_multiplier*get_rate(phase)*block_time)
60+
postponed = txns * COMPRESSABLE
61+
weight = (txns-postponed)*AVG_TX + backlog
62+
secondary_backlog += postponed*133 + postponed*34 # Total extra work
63+
if weight > MAX_BLOCK_SIZE:
64+
results_confirmed[i] += MAX_BLOCK_SIZE - AVG_TX
65+
backlog = weight - MAX_BLOCK_SIZE
66+
else:
67+
space = MAX_BLOCK_SIZE - weight
68+
secondary_backlog = max(secondary_backlog-space, 0)
69+
backlog = 0
70+
results_unconfirmed[i] = float(backlog)/AVG_TX
71+
results_yet_to_spend[i] = secondary_backlog/2/AVG_TX
72+
73+
return results_unconfirmed, results_yet_to_spend, np.cumsum(total_time)/(60*60*24.0)
74+
75+
DAYS = np.array(range(SAMPLES))/144
76+
77+
def make_patch_spines_invisible(ax):
78+
ax.set_frame_on(True)
79+
ax.patch.set_visible(False)
80+
for sp in ax.spines.values():
81+
sp.set_visible(False)
82+
83+
if __name__ == "__main__":
84+
normal_txs, blocktimes_n = normal()
85+
compressed_txs, unspendable, blocktimes_c1 = compressed()
86+
compressed_txs2, unspendable2, blocktimes_c2 = compressed(2)
87+
88+
fig, host = plt.subplots()
89+
host.set_title("Transaction Compression Performance with %d%% Adoption During Spike"%(100*COMPRESSABLE))
90+
fig.subplots_adjust(right=0.75)
91+
par1 = host.twinx()
92+
par2 = host.twinx()
93+
par3 = host.twinx()
94+
95+
par2.spines["right"].set_position(("axes", 1.2))
96+
make_patch_spines_invisible(par2)
97+
par2.spines["right"].set_visible(True)
98+
99+
par3.spines["right"].set_position(("axes", 1.4))
100+
make_patch_spines_invisible(par3)
101+
par3.spines["right"].set_visible(True)
102+
103+
host.set_xlabel("Block Days")
104+
105+
host.set_ylabel("Transactions per Second")
106+
p5, = host.plot(range(PHASES), [get_rate(p) for p in range(PHASES)], "k-", label="Transactions Per Second (1x Rate)")
107+
p6, = host.plot(range(PHASES), [2*get_rate(p) for p in range(PHASES)], "k:", label="Transactions Per Second (2x Rate)")
108+
109+
host.yaxis.label.set_color(p5.get_color())
110+
111+
112+
par2.set_ylabel("Unconfirmed Transactions")
113+
#p1, = par2.plot(DAYS, (-np.array(compressed_txs) + np.array(normal_txs)), "b-.", label = "Mempool Delta")
114+
p1, = par2.plot(blocktimes_n, normal_txs, "g", label="Mempool without Congestion Control")
115+
p2, = par2.plot(blocktimes_c1, compressed_txs,"y", label="Mempool with Congestion Control (1x Rate)")
116+
p3, = par2.plot(blocktimes_c2, compressed_txs2,"m", label="Mempool with Congestion Control (2x Rate)")
117+
p_full_block, = par2.plot([DAYS[0], DAYS[-1]], [MAX_BLOCK_SIZE/AVG_TX]*2, "b.-", label="Maximum Average Transactions Per Block")
118+
119+
par2.yaxis.label.set_color(p2.get_color())
120+
121+
122+
par1.set_ylabel("Confirmed but Pending Transactions")
123+
p4, = par1.plot(blocktimes_c1, unspendable2, "c", label="Congestion Control Pending (2x Rate)")
124+
p4, = par1.plot(blocktimes_c2, unspendable, "r", label="Congestion Control Pending (1x Rate)")
125+
par1.yaxis.label.set_color(p4.get_color())
126+
127+
128+
129+
130+
lines = [p1, p2, p3, p4, p5, p6, p_full_block]
131+
host.legend(lines, [l.get_label() for l in lines])
132+
133+
plt.show()

ctv/states.svg

Lines changed: 1 addition & 0 deletions
Loading

ctv/vaults.svg

Lines changed: 1 addition & 0 deletions
Loading

0 commit comments

Comments
 (0)