35
35
from bson .son import SON
36
36
37
37
from pymongo .cursor import CursorType
38
- from pymongo .errors import (ConfigurationError ,
39
- EncryptionError ,
40
- InvalidOperation ,
41
- OperationFailure )
42
38
from pymongo .encryption import (Algorithm ,
43
39
ClientEncryption )
44
- from pymongo .errors import ConfigurationError , DocumentTooLarge
45
40
from pymongo .encryption_options import AutoEncryptionOpts , _HAVE_PYMONGOCRYPT
46
- from pymongo .message import _COMMAND_OVERHEAD
41
+ from pymongo .errors import (BulkWriteError ,
42
+ ConfigurationError ,
43
+ EncryptionError ,
44
+ InvalidOperation ,
45
+ OperationFailure ,
46
+ WriteError )
47
47
from pymongo .mongo_client import MongoClient
48
48
from pymongo .operations import InsertOne
49
49
from pymongo .write_concern import WriteConcern
@@ -918,6 +918,10 @@ def test_corpus_local_schema(self):
918
918
self ._test_corpus (opts )
919
919
920
920
921
+ _2_MiB = 2097152
922
+ _16_MiB = 16777216
923
+
924
+
921
925
class TestBsonSizeBatches (EncryptionIntegrationTest ):
922
926
"""Prose tests for BSON size limits and batch splitting."""
923
927
@@ -955,27 +959,14 @@ def tearDownClass(cls):
955
959
super (TestBsonSizeBatches , cls ).tearDownClass ()
956
960
957
961
def test_01_insert_succeeds_under_2MiB (self ):
958
- doc = {'_id' : 'no_encryption_under_2mib' ,
959
- 'unencrypted' : 'a' * ((2 ** 21 ) - 1000 )}
962
+ doc = {'_id' : 'over_2mib_under_16mib' , 'unencrypted' : 'a' * _2_MiB }
960
963
self .coll_encrypted .insert_one (doc )
961
964
962
965
# Same with bulk_write.
963
- doc = {'_id' : 'no_encryption_under_2mib_bulk' ,
964
- 'unencrypted' : 'a' * ((2 ** 21 ) - 1000 )}
966
+ doc ['_id' ] = 'over_2mib_under_16mib_bulk'
965
967
self .coll_encrypted .bulk_write ([InsertOne (doc )])
966
968
967
- def test_02_insert_fails_over_2MiB (self ):
968
- doc = {'_id' : 'no_encryption_over_2mib' ,
969
- 'unencrypted' : 'a' * (2 ** 21 + _COMMAND_OVERHEAD )}
970
-
971
- with self .assertRaises (DocumentTooLarge ):
972
- self .coll_encrypted .insert_one (doc )
973
- with self .assertRaises (DocumentTooLarge ):
974
- self .coll_encrypted .insert_many ([doc ])
975
- with self .assertRaises (DocumentTooLarge ):
976
- self .coll_encrypted .bulk_write ([InsertOne (doc )])
977
-
978
- def test_03_insert_succeeds_over_2MiB_post_encryption (self ):
969
+ def test_02_insert_succeeds_over_2MiB_post_encryption (self ):
979
970
doc = {'_id' : 'encryption_exceeds_2mib' ,
980
971
'unencrypted' : 'a' * ((2 ** 21 ) - 2000 )}
981
972
doc .update (json_data ('limits' , 'limits-doc.json' ))
@@ -985,29 +976,53 @@ def test_03_insert_succeeds_over_2MiB_post_encryption(self):
985
976
doc ['_id' ] = 'encryption_exceeds_2mib_bulk'
986
977
self .coll_encrypted .bulk_write ([InsertOne (doc )])
987
978
988
- def test_04_bulk_batch_split (self ):
989
- doc1 = {'_id' : 'no_encryption_under_2mib_1' ,
990
- 'unencrypted' : 'a' * ((2 ** 21 ) - 1000 )}
991
- doc2 = {'_id' : 'no_encryption_under_2mib_2' ,
992
- 'unencrypted' : 'a' * ((2 ** 21 ) - 1000 )}
979
+ def test_03_bulk_batch_split (self ):
980
+ doc1 = {'_id' : 'over_2mib_1' , 'unencrypted' : 'a' * _2_MiB }
981
+ doc2 = {'_id' : 'over_2mib_2' , 'unencrypted' : 'a' * _2_MiB }
993
982
self .listener .reset ()
994
983
self .coll_encrypted .bulk_write ([InsertOne (doc1 ), InsertOne (doc2 )])
995
984
self .assertEqual (
996
985
self .listener .started_command_names (), ['insert' , 'insert' ])
997
986
998
- def test_05_bulk_batch_split (self ):
987
+ def test_04_bulk_batch_split (self ):
999
988
limits_doc = json_data ('limits' , 'limits-doc.json' )
1000
989
doc1 = {'_id' : 'encryption_exceeds_2mib_1' ,
1001
- 'unencrypted' : 'a' * (( 2 ** 21 ) - 2000 )}
990
+ 'unencrypted' : 'a' * (_2_MiB - 2000 )}
1002
991
doc1 .update (limits_doc )
1003
992
doc2 = {'_id' : 'encryption_exceeds_2mib_2' ,
1004
- 'unencrypted' : 'a' * (( 2 ** 21 ) - 2000 )}
993
+ 'unencrypted' : 'a' * (_2_MiB - 2000 )}
1005
994
doc2 .update (limits_doc )
1006
995
self .listener .reset ()
1007
996
self .coll_encrypted .bulk_write ([InsertOne (doc1 ), InsertOne (doc2 )])
1008
997
self .assertEqual (
1009
998
self .listener .started_command_names (), ['insert' , 'insert' ])
1010
999
1000
+ def test_05_insert_succeeds_just_under_16MiB (self ):
1001
+ doc = {'_id' : 'under_16mib' , 'unencrypted' : 'a' * (_16_MiB - 2000 )}
1002
+ self .coll_encrypted .insert_one (doc )
1003
+
1004
+ # Same with bulk_write.
1005
+ doc ['_id' ] = 'under_16mib_bulk'
1006
+ self .coll_encrypted .bulk_write ([InsertOne (doc )])
1007
+
1008
+ def test_06_insert_fails_over_16MiB (self ):
1009
+ limits_doc = json_data ('limits' , 'limits-doc.json' )
1010
+ doc = {'_id' : 'encryption_exceeds_16mib' ,
1011
+ 'unencrypted' : 'a' * (_16_MiB - 2000 )}
1012
+ doc .update (limits_doc )
1013
+
1014
+ with self .assertRaisesRegex (WriteError , 'object to insert too large' ):
1015
+ self .coll_encrypted .insert_one (doc )
1016
+
1017
+ # Same with bulk_write.
1018
+ doc ['_id' ] = 'encryption_exceeds_16mib_bulk'
1019
+ with self .assertRaises (BulkWriteError ) as ctx :
1020
+ self .coll_encrypted .bulk_write ([InsertOne (doc )])
1021
+ err = ctx .exception .details ['writeErrors' ][0 ]
1022
+ self .assertEqual (2 , err ['code' ])
1023
+ self .assertIn ('object to insert too large' , err ['errmsg' ])
1024
+
1025
+
1011
1026
1012
1027
class TestCustomEndpoint (EncryptionIntegrationTest ):
1013
1028
"""Prose tests for creating data keys with a custom endpoint."""
0 commit comments