Skip to content

Commit 248448a

Browse files
committed
Merge branch 'master' of github.com:mongodb/mongo-python-driver
2 parents 4d845ea + a232b65 commit 248448a

14 files changed

+162
-102
lines changed

.evergreen/config.yml

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -435,6 +435,9 @@ functions:
435435
if [ -n "${TEST_INDEX_MANAGEMENT}" ]; then
436436
export TEST_INDEX_MANAGEMENT=1
437437
fi
438+
if [ -n "${SKIP_CSOT_TESTS}" ]; then
439+
export SKIP_CSOT_TESTS=1
440+
fi
438441
439442
GREEN_FRAMEWORK=${GREEN_FRAMEWORK} \
440443
PYTHON_BINARY=${PYTHON_BINARY} \
@@ -2072,13 +2075,17 @@ axes:
20722075
skip_EC2_auth_test: true
20732076
skip_ECS_auth_test: true
20742077
skip_web_identity_auth_test: true
2078+
# CSOT tests are unreliable on our slow macOS hosts.
2079+
SKIP_CSOT_TESTS: true
20752080
- id: macos-arm64
20762081
display_name: "macOS Arm64"
20772082
run_on: macos-14-arm64
20782083
variables:
20792084
skip_EC2_auth_test: true
20802085
skip_ECS_auth_test: true
20812086
skip_web_identity_auth_test: true
2087+
# CSOT tests are unreliable on our slow macOS hosts.
2088+
SKIP_CSOT_TESTS: true
20822089
- id: rhel7
20832090
display_name: "RHEL 7.x"
20842091
run_on: rhel79-small
@@ -2121,6 +2128,8 @@ axes:
21212128
skip_EC2_auth_test: true
21222129
skip_web_identity_auth_test: true
21232130
venv_bin_dir: "Scripts"
2131+
# CSOT tests are unreliable on our slow Windows hosts.
2132+
SKIP_CSOT_TESTS: true
21242133

21252134
# Test with authentication?
21262135
- id: auth

green_framework_test.py

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -60,8 +60,18 @@ def run(framework_name, *args):
6060
# Monkey-patch.
6161
FRAMEWORKS[framework_name]()
6262

63+
arg_list = list(args)
64+
65+
# Never run async tests with a framework
66+
if len(arg_list) <= 1:
67+
arg_list.extend(["-m", "not default_async and default"])
68+
else:
69+
for i in range(len(arg_list) - 1):
70+
if "-m" in arg_list[i]:
71+
arg_list[i + 1] = f"not default_async and {arg_list[i + 1]}"
72+
6373
# Run the tests.
64-
sys.exit(pytest.main(list(args)))
74+
sys.exit(pytest.main(arg_list))
6575

6676

6777
def main():

hatch.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ features = ["test"]
4141
[envs.test.scripts]
4242
test = "pytest -v --durations=5 --maxfail=10 {args}"
4343
test-eg = "bash ./.evergreen/run-tests.sh {args}"
44-
test-async = "test test/asynchronous/ {args}"
44+
test-async = "pytest -v --durations=5 --maxfail=10 -m default_async {args}"
4545
test-mockupdb = ["pip install -U git+https://github.com/ajdavis/mongo-mockup-db@master", "test -m mockupdb"]
4646

4747
[envs.encryption]

pyproject.toml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@ zstd = ["requirements/zstd.txt"]
7070

7171
[tool.pytest.ini_options]
7272
minversion = "7"
73-
addopts = ["-ra", "--strict-config", "--strict-markers", "--junitxml=xunit-results/TEST-results.xml", "-m default"]
73+
addopts = ["-ra", "--strict-config", "--strict-markers", "--junitxml=xunit-results/TEST-results.xml", "-m default or default_async"]
7474
testpaths = ["test"]
7575
log_cli_level = "INFO"
7676
faulthandler_timeout = 1500
@@ -108,6 +108,7 @@ markers = [
108108
"load_balancer: load balancer tests",
109109
"mockupdb: tests that rely on mockupdb",
110110
"default: default test suite",
111+
"default_async: default async test suite",
111112
]
112113

113114
[tool.mypy]

test/__init__.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -852,6 +852,10 @@ def max_bson_size(self):
852852
def max_write_batch_size(self):
853853
return (self.hello)["maxWriteBatchSize"]
854854

855+
@property
856+
def max_message_size_bytes(self):
857+
return (self.hello)["maxMessageSizeBytes"]
858+
855859

856860
# Reusable client context
857861
client_context = ClientContext()

test/asynchronous/__init__.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -854,6 +854,10 @@ async def max_bson_size(self):
854854
async def max_write_batch_size(self):
855855
return (await self.hello)["maxWriteBatchSize"]
856856

857+
@property
858+
async def max_message_size_bytes(self):
859+
return (await self.hello)["maxMessageSizeBytes"]
860+
857861

858862
# Reusable client context
859863
async_client_context = AsyncClientContext()

test/asynchronous/conftest.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
from __future__ import annotations
22

3+
from test import pytest_conf
34
from test.asynchronous import async_setup, async_teardown
45

56
import pytest_asyncio
@@ -14,7 +15,4 @@ async def test_setup_and_teardown():
1415
await async_teardown()
1516

1617

17-
def pytest_collection_modifyitems(items, config):
18-
for item in items:
19-
if not any(item.iter_markers()):
20-
item.add_marker("default")
18+
pytest_collection_modifyitems = pytest_conf.pytest_collection_modifyitems

test/asynchronous/test_client_bulk_write.py

Lines changed: 52 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -56,20 +56,24 @@ async def test_returns_error_if_no_namespace_provided(self):
5656

5757
# https://github.com/mongodb/specifications/tree/master/source/crud/tests
5858
class TestClientBulkWriteCRUD(AsyncIntegrationTest):
59+
async def asyncSetUp(self):
60+
self.max_write_batch_size = await async_client_context.max_write_batch_size
61+
self.max_bson_object_size = await async_client_context.max_bson_size
62+
self.max_message_size_bytes = await async_client_context.max_message_size_bytes
63+
5964
@async_client_context.require_version_min(8, 0, 0, -24)
6065
async def test_batch_splits_if_num_operations_too_large(self):
6166
listener = OvertCommandListener()
6267
client = await async_rs_or_single_client(event_listeners=[listener])
6368
self.addAsyncCleanup(client.aclose)
6469

65-
max_write_batch_size = (await async_client_context.hello)["maxWriteBatchSize"]
6670
models = []
67-
for _ in range(max_write_batch_size + 1):
71+
for _ in range(self.max_write_batch_size + 1):
6872
models.append(InsertOne(namespace="db.coll", document={"a": "b"}))
6973
self.addAsyncCleanup(client.db["coll"].drop)
7074

7175
result = await client.bulk_write(models=models)
72-
self.assertEqual(result.inserted_count, max_write_batch_size + 1)
76+
self.assertEqual(result.inserted_count, self.max_write_batch_size + 1)
7377

7478
bulk_write_events = []
7579
for event in listener.started_events:
@@ -78,7 +82,7 @@ async def test_batch_splits_if_num_operations_too_large(self):
7882
self.assertEqual(len(bulk_write_events), 2)
7983

8084
first_event, second_event = bulk_write_events
81-
self.assertEqual(len(first_event.command["ops"]), max_write_batch_size)
85+
self.assertEqual(len(first_event.command["ops"]), self.max_write_batch_size)
8286
self.assertEqual(len(second_event.command["ops"]), 1)
8387
self.assertEqual(first_event.operation_id, second_event.operation_id)
8488

@@ -88,12 +92,9 @@ async def test_batch_splits_if_ops_payload_too_large(self):
8892
client = await async_rs_or_single_client(event_listeners=[listener])
8993
self.addAsyncCleanup(client.aclose)
9094

91-
max_message_size_bytes = (await async_client_context.hello)["maxMessageSizeBytes"]
92-
max_bson_object_size = (await async_client_context.hello)["maxBsonObjectSize"]
93-
9495
models = []
95-
num_models = int(max_message_size_bytes / max_bson_object_size + 1)
96-
b_repeated = "b" * (max_bson_object_size - 500)
96+
num_models = int(self.max_message_size_bytes / self.max_bson_object_size + 1)
97+
b_repeated = "b" * (self.max_bson_object_size - 500)
9798
for _ in range(num_models):
9899
models.append(
99100
InsertOne(
@@ -126,7 +127,6 @@ async def test_collects_write_concern_errors_across_batches(self):
126127
retryWrites=False,
127128
)
128129
self.addAsyncCleanup(client.aclose)
129-
max_write_batch_size = (await async_client_context.hello)["maxWriteBatchSize"]
130130

131131
fail_command = {
132132
"configureFailPoint": "failCommand",
@@ -138,7 +138,7 @@ async def test_collects_write_concern_errors_across_batches(self):
138138
}
139139
async with self.fail_point(fail_command):
140140
models = []
141-
for _ in range(max_write_batch_size + 1):
141+
for _ in range(self.max_write_batch_size + 1):
142142
models.append(
143143
InsertOne(
144144
namespace="db.coll",
@@ -152,7 +152,7 @@ async def test_collects_write_concern_errors_across_batches(self):
152152
self.assertEqual(len(context.exception.write_concern_errors), 2) # type: ignore[arg-type]
153153
self.assertIsNotNone(context.exception.partial_result)
154154
self.assertEqual(
155-
context.exception.partial_result.inserted_count, max_write_batch_size + 1
155+
context.exception.partial_result.inserted_count, self.max_write_batch_size + 1
156156
)
157157

158158
bulk_write_events = []
@@ -172,9 +172,8 @@ async def test_collects_write_errors_across_batches_unordered(self):
172172
await collection.drop()
173173
await collection.insert_one(document={"_id": 1})
174174

175-
max_write_batch_size = (await async_client_context.hello)["maxWriteBatchSize"]
176175
models = []
177-
for _ in range(max_write_batch_size + 1):
176+
for _ in range(self.max_write_batch_size + 1):
178177
models.append(
179178
InsertOne(
180179
namespace="db.coll",
@@ -184,7 +183,7 @@ async def test_collects_write_errors_across_batches_unordered(self):
184183

185184
with self.assertRaises(ClientBulkWriteException) as context:
186185
await client.bulk_write(models=models, ordered=False)
187-
self.assertEqual(len(context.exception.write_errors), max_write_batch_size + 1) # type: ignore[arg-type]
186+
self.assertEqual(len(context.exception.write_errors), self.max_write_batch_size + 1) # type: ignore[arg-type]
188187

189188
bulk_write_events = []
190189
for event in listener.started_events:
@@ -203,9 +202,8 @@ async def test_collects_write_errors_across_batches_ordered(self):
203202
await collection.drop()
204203
await collection.insert_one(document={"_id": 1})
205204

206-
max_write_batch_size = (await async_client_context.hello)["maxWriteBatchSize"]
207205
models = []
208-
for _ in range(max_write_batch_size + 1):
206+
for _ in range(self.max_write_batch_size + 1):
209207
models.append(
210208
InsertOne(
211209
namespace="db.coll",
@@ -233,10 +231,9 @@ async def test_handles_cursor_requiring_getMore(self):
233231
self.addAsyncCleanup(collection.drop)
234232
await collection.drop()
235233

236-
max_bson_object_size = (await async_client_context.hello)["maxBsonObjectSize"]
237234
models = []
238-
a_repeated = "a" * (max_bson_object_size // 2)
239-
b_repeated = "b" * (max_bson_object_size // 2)
235+
a_repeated = "a" * (self.max_bson_object_size // 2)
236+
b_repeated = "b" * (self.max_bson_object_size // 2)
240237
models.append(
241238
UpdateOne(
242239
namespace="db.coll",
@@ -275,12 +272,11 @@ async def test_handles_cursor_requiring_getMore_within_transaction(self):
275272
self.addAsyncCleanup(collection.drop)
276273
await collection.drop()
277274

278-
max_bson_object_size = (await async_client_context.hello)["maxBsonObjectSize"]
279275
async with client.start_session() as session:
280276
await session.start_transaction()
281277
models = []
282-
a_repeated = "a" * (max_bson_object_size // 2)
283-
b_repeated = "b" * (max_bson_object_size // 2)
278+
a_repeated = "a" * (self.max_bson_object_size // 2)
279+
b_repeated = "b" * (self.max_bson_object_size // 2)
284280
models.append(
285281
UpdateOne(
286282
namespace="db.coll",
@@ -319,16 +315,15 @@ async def test_handles_getMore_error(self):
319315
self.addAsyncCleanup(collection.drop)
320316
await collection.drop()
321317

322-
max_bson_object_size = (await async_client_context.hello)["maxBsonObjectSize"]
323318
fail_command = {
324319
"configureFailPoint": "failCommand",
325320
"mode": {"times": 1},
326321
"data": {"failCommands": ["getMore"], "errorCode": 8},
327322
}
328323
async with self.fail_point(fail_command):
329324
models = []
330-
a_repeated = "a" * (max_bson_object_size // 2)
331-
b_repeated = "b" * (max_bson_object_size // 2)
325+
a_repeated = "a" * (self.max_bson_object_size // 2)
326+
b_repeated = "b" * (self.max_bson_object_size // 2)
332327
models.append(
333328
UpdateOne(
334329
namespace="db.coll",
@@ -370,8 +365,7 @@ async def test_returns_error_if_unacknowledged_too_large_insert(self):
370365
client = await async_rs_or_single_client(event_listeners=[listener])
371366
self.addAsyncCleanup(client.aclose)
372367

373-
max_bson_object_size = (await async_client_context.hello)["maxBsonObjectSize"]
374-
b_repeated = "b" * max_bson_object_size
368+
b_repeated = "b" * self.max_bson_object_size
375369

376370
# Insert document.
377371
models_insert = [InsertOne(namespace="db.coll", document={"a": b_repeated})]
@@ -384,25 +378,35 @@ async def test_returns_error_if_unacknowledged_too_large_insert(self):
384378
await client.bulk_write(models=models_replace, write_concern=WriteConcern(w=0))
385379

386380
async def _setup_namespace_test_models(self):
387-
max_message_size_bytes = (await async_client_context.hello)["maxMessageSizeBytes"]
388-
max_bson_object_size = (await async_client_context.hello)["maxBsonObjectSize"]
389-
390-
ops_bytes = max_message_size_bytes - 1122
391-
num_models = ops_bytes // max_bson_object_size
392-
remainder_bytes = ops_bytes % max_bson_object_size
381+
# See prose test specification below for details on these calculations.
382+
# https://github.com/mongodb/specifications/tree/master/source/crud/tests#details-on-size-calculations
383+
_EXISTING_BULK_WRITE_BYTES = 1122
384+
_OPERATION_DOC_BYTES = 57
385+
_NAMESPACE_DOC_BYTES = 217
386+
387+
# When compression is enabled, max_message_size is
388+
# smaller to account for compression message header.
389+
if async_client_context.client_options.get("compressors"):
390+
max_message_size_bytes = self.max_message_size_bytes - 16
391+
else:
392+
max_message_size_bytes = self.max_message_size_bytes
393+
394+
ops_bytes = max_message_size_bytes - _EXISTING_BULK_WRITE_BYTES
395+
num_models = ops_bytes // self.max_bson_object_size
396+
remainder_bytes = ops_bytes % self.max_bson_object_size
393397

394398
models = []
395-
b_repeated = "b" * (max_bson_object_size - 57)
399+
b_repeated = "b" * (self.max_bson_object_size - _OPERATION_DOC_BYTES)
396400
for _ in range(num_models):
397401
models.append(
398402
InsertOne(
399403
namespace="db.coll",
400404
document={"a": b_repeated},
401405
)
402406
)
403-
if remainder_bytes >= 217:
407+
if remainder_bytes >= _NAMESPACE_DOC_BYTES:
404408
num_models += 1
405-
b_repeated = "b" * (remainder_bytes - 57)
409+
b_repeated = "b" * (remainder_bytes - _OPERATION_DOC_BYTES)
406410
models.append(
407411
InsertOne(
408412
namespace="db.coll",
@@ -485,17 +489,15 @@ async def test_returns_error_if_no_writes_can_be_added_to_ops(self):
485489
client = await async_rs_or_single_client()
486490
self.addAsyncCleanup(client.aclose)
487491

488-
max_message_size_bytes = (await async_client_context.hello)["maxMessageSizeBytes"]
489-
490492
# Document too large.
491-
b_repeated = "b" * max_message_size_bytes
493+
b_repeated = "b" * self.max_message_size_bytes
492494
models = [InsertOne(namespace="db.coll", document={"a": b_repeated})]
493495
with self.assertRaises(InvalidOperation) as context:
494496
await client.bulk_write(models=models)
495497
self.assertIn("cannot do an empty bulk write", context.exception._message)
496498

497499
# Namespace too large.
498-
c_repeated = "c" * max_message_size_bytes
500+
c_repeated = "c" * self.max_message_size_bytes
499501
namespace = f"db.{c_repeated}"
500502
models = [InsertOne(namespace=namespace, document={"a": "b"})]
501503
with self.assertRaises(InvalidOperation) as context:
@@ -522,27 +524,32 @@ async def test_returns_error_if_auto_encryption_configured(self):
522524

523525
# https://github.com/mongodb/specifications/blob/master/source/client-side-operations-timeout/tests/README.md#11-multi-batch-bulkwrites
524526
class TestClientBulkWriteTimeout(AsyncIntegrationTest):
527+
async def asyncSetUp(self):
528+
self.max_write_batch_size = await async_client_context.max_write_batch_size
529+
self.max_bson_object_size = await async_client_context.max_bson_size
530+
self.max_message_size_bytes = await async_client_context.max_message_size_bytes
531+
525532
@async_client_context.require_version_min(8, 0, 0, -24)
526533
@async_client_context.require_failCommand_fail_point
527534
async def test_timeout_in_multi_batch_bulk_write(self):
535+
_OVERHEAD = 500
536+
528537
internal_client = await async_rs_or_single_client(timeoutMS=None)
529538
self.addAsyncCleanup(internal_client.aclose)
530539

531540
collection = internal_client.db["coll"]
532541
self.addAsyncCleanup(collection.drop)
533542
await collection.drop()
534543

535-
max_bson_object_size = (await async_client_context.hello)["maxBsonObjectSize"]
536-
max_message_size_bytes = (await async_client_context.hello)["maxMessageSizeBytes"]
537544
fail_command = {
538545
"configureFailPoint": "failCommand",
539546
"mode": {"times": 2},
540547
"data": {"failCommands": ["bulkWrite"], "blockConnection": True, "blockTimeMS": 1010},
541548
}
542549
async with self.fail_point(fail_command):
543550
models = []
544-
num_models = int(max_message_size_bytes / max_bson_object_size + 1)
545-
b_repeated = "b" * (max_bson_object_size - 500)
551+
num_models = int(self.max_message_size_bytes / self.max_bson_object_size + 1)
552+
b_repeated = "b" * (self.max_bson_object_size - _OVERHEAD)
546553
for _ in range(num_models):
547554
models.append(
548555
InsertOne(

0 commit comments

Comments
 (0)