11
11
import sanic
12
12
from sanic import response
13
13
14
+ from orjson import dumps
15
+
14
16
15
17
logger = getLogger (__name__ )
16
18
@@ -41,23 +43,26 @@ def get_num_queries(queries):
41
43
return query_count
42
44
43
45
44
- connection_pool = None
45
46
sort_fortunes_key = itemgetter (1 )
46
47
template = load_fortunes_template ()
47
48
48
- app = sanic .Sanic (name = __name__ )
49
+ app = sanic .Sanic (name = __name__ , dumps = dumps )
49
50
50
51
51
52
@app .listener ('before_server_start' )
52
53
async def setup_database (app , loop ):
53
- global connection_pool
54
- connection_pool = await asyncpg .create_pool (
55
- user = os .getenv ('PGUSER' , 'benchmarkdbuser' ),
56
- password = os .getenv ('PGPASS' , 'benchmarkdbpass' ),
57
- database = 'hello_world' ,
58
- host = 'tfb-database' ,
59
- port = 5432
60
- )
54
+ app .ctx .pool = await asyncpg .create_pool (
55
+ user = os .getenv ('PGUSER' , 'benchmarkdbuser' ),
56
+ password = os .getenv ('PGPASS' , 'benchmarkdbpass' ),
57
+ database = 'hello_world' ,
58
+ host = 'tfb-database' ,
59
+ port = 5432
60
+ )
61
+
62
+
63
+ @app .listener ('after_server_stop' )
64
+ async def close_database (app , loop ):
65
+ app .ctx .pool .close ()
61
66
62
67
63
68
@app .get ('/json' )
@@ -69,7 +74,7 @@ def json_view(request):
69
74
async def single_database_query_view (request ):
70
75
row_id = randint (1 , 10000 )
71
76
72
- async with connection_pool .acquire () as connection :
77
+ async with request . app . ctx . pool .acquire () as connection :
73
78
number = await connection .fetchval (READ_ROW_SQL , row_id )
74
79
75
80
return response .json (
@@ -84,7 +89,7 @@ async def multiple_database_queries_view(request):
84
89
row_ids = sample (range (1 , 10000 ), num_queries )
85
90
worlds = []
86
91
87
- async with connection_pool .acquire () as connection :
92
+ async with request . app . ctx . pool .acquire () as connection :
88
93
statement = await connection .prepare (READ_ROW_SQL )
89
94
for row_id in row_ids :
90
95
number = await statement .fetchval (row_id )
@@ -100,7 +105,7 @@ async def multiple_database_queries_view(request):
100
105
101
106
@app .get ('/fortunes' )
102
107
async def fortunes_view (request ):
103
- async with connection_pool .acquire () as connection :
108
+ async with request . app . ctx . pool .acquire () as connection :
104
109
fortunes = await connection .fetch ('SELECT * FROM Fortune' )
105
110
106
111
fortunes .append (ADDITIONAL_ROW )
@@ -112,22 +117,21 @@ async def fortunes_view(request):
112
117
113
118
@app .get ('/updates' )
114
119
async def database_updates_view (request ):
115
- worlds = []
116
- updates = set ()
117
120
queries = request .args .get ('queries' , 1 )
121
+ num_queries = get_num_queries (queries )
122
+ # To avoid deadlock
123
+ ids = sorted (sample (range (1 , 10000 + 1 ), num_queries ))
124
+ numbers = sorted (sample (range (1 , 10000 ), num_queries ))
125
+ updates = list (zip (ids , numbers ))
118
126
119
- async with connection_pool .acquire () as connection :
120
- statement = await connection .prepare (READ_ROW_SQL_TO_UPDATE )
121
-
122
- for row_id in sample (range (1 , 10000 ), get_num_queries (queries )):
123
- record = await statement .fetchrow (row_id )
124
- world = dict (
125
- id = record ['id' ], randomNumber = record ['randomnumber' ]
126
- )
127
- world ['randomNumber' ] = randint (1 , 10000 )
128
- worlds .append (world )
129
- updates .add ((world ['id' ], world ['randomNumber' ]))
127
+ worlds = [
128
+ {"id" : row_id , "randomNumber" : number } for row_id , number in updates
129
+ ]
130
130
131
+ async with request .app .ctx .pool .acquire () as connection :
132
+ statement = await connection .prepare (READ_ROW_SQL )
133
+ for row_id , _ in updates :
134
+ await statement .fetchval (row_id )
131
135
await connection .executemany (WRITE_ROW_SQL , updates )
132
136
133
137
return response .json (worlds , headers = get_headers ())
0 commit comments