2
2
import os
3
3
import time
4
4
import warnings
5
- from typing import List
6
5
7
- from bson import encode , ObjectId
6
+ from bson import ObjectId , encode
8
7
from django .test import (
9
8
TestCase ,
10
9
)
11
10
12
- from .models import SmallFlatModel , ForeignKeyModel , SmallFlatModelFk , LargeFlatModel , LargeNestedModel , StringEmbeddedModel , IntegerEmbeddedModel
11
+ from .models import (
12
+ ForeignKeyModel ,
13
+ IntegerEmbeddedModel ,
14
+ LargeFlatModel ,
15
+ LargeNestedModel ,
16
+ SmallFlatModel ,
17
+ SmallFlatModelFk ,
18
+ StringEmbeddedModel ,
19
+ )
13
20
14
21
OUTPUT_FILE = os .environ .get ("OUTPUT_FILE" )
15
22
18
25
MAX_ITERATION_TIME = 60
19
26
NUM_DOCS = 10000
20
27
21
- result_data : List = []
28
+ result_data : list = []
29
+
22
30
23
31
def tearDownModule ():
24
32
output = json .dumps (result_data , indent = 4 )
@@ -28,6 +36,7 @@ def tearDownModule():
28
36
else :
29
37
print (output )
30
38
39
+
31
40
class Timer :
32
41
def __enter__ (self ):
33
42
self .start = time .monotonic ()
@@ -37,6 +46,7 @@ def __exit__(self, *args):
37
46
self .end = time .monotonic ()
38
47
self .interval = self .end - self .start
39
48
49
+
40
50
# Copied from the driver benchmarking suite.
41
51
class PerformanceTest :
42
52
dataset : str
@@ -88,9 +98,8 @@ def percentile(self, percentile):
88
98
sorted_results = sorted (self .results )
89
99
percentile_index = int (len (sorted_results ) * percentile / 100 ) - 1
90
100
return sorted_results [percentile_index ]
91
- else :
92
- self .fail ("Test execution failed" )
93
- return None
101
+ self .fail ("Test execution failed" )
102
+ return None
94
103
95
104
def runTest (self ):
96
105
results = []
@@ -123,12 +132,13 @@ class SmallFlatDocTest(PerformanceTest):
123
132
124
133
def setUp (self ):
125
134
super ().setUp ()
126
- with open (self .dataset , "r" ) as data :
135
+ with open (self .dataset ) as data :
127
136
self .document = json .load (data )
128
137
129
138
self .data_size = len (encode (self .document )) * NUM_DOCS
130
139
self .documents = [self .document .copy () for _ in range (NUM_DOCS )]
131
140
141
+
132
142
class TestSmallFlatDocCreation (SmallFlatDocTest , TestCase ):
133
143
def do_task (self ):
134
144
for doc in self .documents :
@@ -138,6 +148,7 @@ def do_task(self):
138
148
def after (self ):
139
149
SmallFlatModel .objects .all ().delete ()
140
150
151
+
141
152
class TestSmallFlatDocUpdate (SmallFlatDocTest , TestCase ):
142
153
def setUp (self ):
143
154
super ().setUp ()
@@ -155,6 +166,7 @@ def do_task(self):
155
166
def after (self ):
156
167
SmallFlatModel .objects .all ().delete ()
157
168
169
+
158
170
class TestSmallFlatDocFilterById (SmallFlatDocTest , TestCase ):
159
171
def setUp (self ):
160
172
super ().setUp ()
@@ -172,6 +184,7 @@ def tearDown(self):
172
184
super ().tearDown ()
173
185
SmallFlatModel .objects .all ().delete ()
174
186
187
+
175
188
class TestSmallFlatDocFilterByForeignKey (SmallFlatDocTest , TestCase ):
176
189
def setUp (self ):
177
190
super ().setUp ()
@@ -192,17 +205,19 @@ def tearDown(self):
192
205
super ().tearDown ()
193
206
SmallFlatModelFk .objects .all ().delete ()
194
207
208
+
195
209
class LargeFlatDocTest (PerformanceTest ):
196
210
dataset = "large_doc.json"
197
211
198
212
def setUp (self ):
199
213
super ().setUp ()
200
- with open (self .dataset , "r" ) as data :
214
+ with open (self .dataset ) as data :
201
215
self .document = json .load (data )
202
216
203
217
self .data_size = len (encode (self .document )) * NUM_DOCS
204
218
self .documents = [self .document .copy () for _ in range (NUM_DOCS )]
205
219
220
+
206
221
class TestLargeFlatDocCreation (LargeFlatDocTest , TestCase ):
207
222
def do_task (self ):
208
223
for doc in self .documents :
@@ -212,6 +227,7 @@ def do_task(self):
212
227
def after (self ):
213
228
LargeFlatModel .objects .all ().delete ()
214
229
230
+
215
231
class TestLargeFlatDocUpdate (LargeFlatDocTest , TestCase ):
216
232
def setUp (self ):
217
233
super ().setUp ()
@@ -229,12 +245,13 @@ def do_task(self):
229
245
def after (self ):
230
246
LargeFlatModel .objects .all ().delete ()
231
247
248
+
232
249
class LargeNestedDocTest (PerformanceTest ):
233
250
dataset = "large_doc_nested.json"
234
251
235
252
def setUp (self ):
236
253
super ().setUp ()
237
- with open (self .dataset , "r" ) as data :
254
+ with open (self .dataset ) as data :
238
255
self .document = json .load (data )
239
256
240
257
self .data_size = len (encode (self .document )) * NUM_DOCS
@@ -261,13 +278,15 @@ def create_model(self):
261
278
setattr (model , k , embedded_int_model )
262
279
model .save ()
263
280
281
+
264
282
class TestLargeNestedDocCreation (LargeNestedDocTest , TestCase ):
265
283
def do_task (self ):
266
284
self .create_model ()
267
285
268
286
def after (self ):
269
287
LargeNestedModel .objects .all ().delete ()
270
288
289
+
271
290
class TestLargeNestedDocUpdate (LargeNestedDocTest , TestCase ):
272
291
def setUp (self ):
273
292
super ().setUp ()
@@ -283,11 +302,14 @@ def do_task(self):
283
302
model .embedded_str_doc_1 .field1 = "updated_value"
284
303
model .save ()
285
304
305
+
286
306
class TestLargeNestedDocFilterById (LargeNestedDocTest , TestCase ):
287
307
def setUp (self ):
288
308
super ().setUp ()
289
309
self .create_model ()
290
- self .ids = [model .embedded_str_doc_1 .unique_id for model in list (LargeNestedModel .objects .all ())]
310
+ self .ids = [
311
+ model .embedded_str_doc_1 .unique_id for model in list (LargeNestedModel .objects .all ())
312
+ ]
291
313
292
314
def do_task (self ):
293
315
for _id in self .ids :
@@ -297,11 +319,15 @@ def tearDown(self):
297
319
super ().tearDown ()
298
320
LargeNestedModel .objects .all ().delete ()
299
321
322
+
300
323
class TestLargeNestedDocFilterArray (LargeNestedDocTest , TestCase ):
301
324
def setUp (self ):
302
325
super ().setUp ()
303
326
self .create_model ()
304
- self .ids = [model .embedded_str_doc_array [0 ].unique_id for model in list (LargeNestedModel .objects .all ())]
327
+ self .ids = [
328
+ model .embedded_str_doc_array [0 ].unique_id
329
+ for model in list (LargeNestedModel .objects .all ())
330
+ ]
305
331
306
332
def do_task (self ):
307
333
for _id in self .ids :
@@ -310,5 +336,3 @@ def do_task(self):
310
336
def tearDown (self ):
311
337
super ().tearDown ()
312
338
LargeNestedModel .objects .all ().delete ()
313
-
314
-
0 commit comments