@@ -38,7 +38,7 @@ def r(request):
38
38
39
39
@pytest .mark .skipif (HIREDIS_AVAILABLE , reason = "PythonParser only" )
40
40
@pytest .mark .onlynoncluster
41
- #@skip_if_resp_version(2)
41
+ # @skip_if_resp_version(2)
42
42
class TestCache :
43
43
@pytest .mark .parametrize (
44
44
"r" ,
@@ -120,59 +120,6 @@ def test_get_from_custom_cache(self, request, r, r2):
120
120
# Make sure that new value was cached
121
121
assert cache .get (("GET" , "foo" )) == b"barbar"
122
122
123
- @pytest .mark .parametrize (
124
- "r" ,
125
- [
126
- {
127
- "cache" : CacheToolsAdapter (TTLCache (128 , 300 )),
128
- "use_cache" : True ,
129
- "single_connection_client" : True ,
130
- },
131
- {
132
- "cache" : CacheToolsAdapter (TTLCache (128 , 300 )),
133
- "use_cache" : True ,
134
- "single_connection_client" : False ,
135
- },
136
- ],
137
- ids = ["single" , "pool" ],
138
- indirect = True ,
139
- )
140
- @pytest .mark .onlynoncluster
141
- def test_get_from_cache_multithreaded (self , r ):
142
- cache = r .get_cache ()
143
- # Running commands over two threads
144
- threading .Thread (target = r .set ("foo" , "bar" )).start ()
145
- threading .Thread (target = r .set ("bar" , "foo" )).start ()
146
-
147
- # Wait for command execution to be finished
148
- time .sleep (0.1 )
149
-
150
- threading .Thread (target = r .get ("foo" )).start ()
151
- threading .Thread (target = r .get ("bar" )).start ()
152
-
153
- # Wait for command execution to be finished
154
- time .sleep (0.1 )
155
-
156
- # Make sure that responses was cached.
157
- assert cache .get (("GET" , "foo" )) == b"bar"
158
- assert cache .get (("GET" , "bar" )) == b"foo"
159
-
160
- threading .Thread (target = r .set ("foo" , "baz" )).start ()
161
- threading .Thread (target = r .set ("bar" , "bar" )).start ()
162
-
163
- # Wait for command execution to be finished
164
- time .sleep (0.1 )
165
-
166
- threading .Thread (target = r .get ("foo" )).start ()
167
- threading .Thread (target = r .get ("bar" )).start ()
168
-
169
- # Wait for command execution to be finished
170
- time .sleep (0.1 )
171
-
172
- # Make sure that new values was cached.
173
- assert cache .get (("GET" , "foo" )) == b"baz"
174
- assert cache .get (("GET" , "bar" )) == b"bar"
175
-
176
123
@pytest .mark .parametrize (
177
124
"r" ,
178
125
[
@@ -512,48 +459,6 @@ def test_get_from_custom_cache(self, request, r, r2):
512
459
# Make sure that new value was cached
513
460
assert cache .get (("GET" , "foo" )) == b"barbar"
514
461
515
- @pytest .mark .parametrize (
516
- "r" ,
517
- [{"cache" : CacheToolsAdapter (TTLCache (128 , 300 )), "use_cache" : True }],
518
- indirect = True ,
519
- )
520
- @pytest .mark .onlycluster
521
- def test_get_from_cache_multithreaded (self , r ):
522
- cache = r .nodes_manager .get_node_from_slot (10 ).redis_connection .get_cache ()
523
- # Running commands over two threads
524
- threading .Thread (target = r .set ("foo" , "bar" )).start ()
525
- threading .Thread (target = r .set ("bar" , "foo" )).start ()
526
-
527
- # Wait for command execution to be finished
528
- time .sleep (0.1 )
529
-
530
- threading .Thread (target = r .get ("foo" )).start ()
531
- threading .Thread (target = r .get ("bar" )).start ()
532
-
533
- # Wait for command execution to be finished
534
- time .sleep (0.1 )
535
-
536
- # Make sure that both values was cached.
537
- assert cache .get (("GET" , "foo" )) == b"bar"
538
- assert cache .get (("GET" , "bar" )) == b"foo"
539
-
540
- # Running commands over two threads
541
- threading .Thread (target = r .set ("foo" , "baz" )).start ()
542
- threading .Thread (target = r .set ("bar" , "bar" )).start ()
543
-
544
- # Wait for command execution to be finished
545
- time .sleep (0.1 )
546
-
547
- threading .Thread (target = r .get ("foo" )).start ()
548
- threading .Thread (target = r .get ("bar" )).start ()
549
-
550
- # Wait for command execution to be finished
551
- time .sleep (0.1 )
552
-
553
- # Make sure that new values was cached.
554
- assert cache .get (("GET" , "foo" )) == b"baz"
555
- assert cache .get (("GET" , "bar" )) == b"bar"
556
-
557
462
@pytest .mark .parametrize (
558
463
"r" ,
559
464
[{"cache" : CacheToolsAdapter (TTLCache (128 , 300 )), "use_cache" : True }],
@@ -809,57 +714,6 @@ def test_get_from_custom_cache(self, request, r, r2):
809
714
# Make sure that new value was cached
810
715
assert cache .get (("GET" , "foo" )) == b"barbar"
811
716
812
- @pytest .mark .parametrize (
813
- "sentinel_setup" ,
814
- [
815
- {
816
- "cache" : CacheToolsAdapter (LRUCache (maxsize = 128 )),
817
- "use_cache" : True ,
818
- "force_master_ip" : "localhost" ,
819
- }
820
- ],
821
- indirect = True ,
822
- )
823
- @pytest .mark .onlynoncluster
824
- def test_get_from_cache_multithreaded (self , master ):
825
- cache = master .get_cache ()
826
-
827
- # Running commands over two threads
828
- threading .Thread (target = master .set ("foo" , "bar" )).start ()
829
- threading .Thread (target = master .set ("bar" , "foo" )).start ()
830
-
831
- # Wait for command execution to be finished
832
- time .sleep (0.1 )
833
-
834
- # Running commands over two threads
835
- threading .Thread (target = master .get ("foo" )).start ()
836
- threading .Thread (target = master .get ("bar" )).start ()
837
-
838
- # Wait for command execution to be finished
839
- time .sleep (0.1 )
840
-
841
- # Make sure that both values was cached.
842
- assert cache .get (("GET" , "foo" )) == b"bar"
843
- assert cache .get (("GET" , "bar" )) == b"foo"
844
-
845
- # Running commands over two threads
846
- threading .Thread (target = master .set ("foo" , "baz" )).start ()
847
- threading .Thread (target = master .set ("bar" , "bar" )).start ()
848
-
849
- # Wait for command execution to be finished
850
- time .sleep (0.1 )
851
-
852
- # Running commands over two threads
853
- threading .Thread (target = master .get ("foo" )).start ()
854
- threading .Thread (target = master .get ("bar" )).start ()
855
-
856
- # Wait for command execution to be finished
857
- time .sleep (0.1 )
858
-
859
- # Make sure that new values was cached.
860
- assert cache .get (("GET" , "foo" )) == b"baz"
861
- assert cache .get (("GET" , "bar" )) == b"bar"
862
-
863
717
@pytest .mark .parametrize (
864
718
"sentinel_setup" ,
865
719
[
@@ -994,53 +848,6 @@ def test_get_from_custom_cache(self, request, r, r2):
994
848
# Make sure that new value was cached
995
849
assert cache .get (("GET" , "foo" )) == b"barbar"
996
850
997
- @pytest .mark .parametrize (
998
- "r" ,
999
- [
1000
- {
1001
- "cache" : CacheToolsAdapter (TTLCache (128 , 300 )),
1002
- "use_cache" : True ,
1003
- "ssl" : True ,
1004
- }
1005
- ],
1006
- indirect = True ,
1007
- )
1008
- @pytest .mark .onlynoncluster
1009
- def test_get_from_cache_multithreaded (self , r ):
1010
- cache = r .get_cache ()
1011
- # Running commands over two threads
1012
- threading .Thread (target = r .set ("foo" , "bar" )).start ()
1013
- threading .Thread (target = r .set ("bar" , "foo" )).start ()
1014
-
1015
- # Wait for command execution to be finished
1016
- time .sleep (0.1 )
1017
-
1018
- threading .Thread (target = r .get ("foo" )).start ()
1019
- threading .Thread (target = r .get ("bar" )).start ()
1020
-
1021
- # Wait for command execution to be finished
1022
- time .sleep (0.1 )
1023
-
1024
- # Make sure that responses was cached.
1025
- assert cache .get (("GET" , "foo" )) == b"bar"
1026
- assert cache .get (("GET" , "bar" )) == b"foo"
1027
-
1028
- threading .Thread (target = r .set ("foo" , "baz" )).start ()
1029
- threading .Thread (target = r .set ("bar" , "bar" )).start ()
1030
-
1031
- # Wait for command execution to be finished
1032
- time .sleep (0.1 )
1033
-
1034
- threading .Thread (target = r .get ("foo" )).start ()
1035
- threading .Thread (target = r .get ("bar" )).start ()
1036
-
1037
- # Wait for command execution to be finished
1038
- time .sleep (0.1 )
1039
-
1040
- # Make sure that new values was cached.
1041
- assert cache .get (("GET" , "foo" )) == b"baz"
1042
- assert cache .get (("GET" , "bar" )) == b"bar"
1043
-
1044
851
@pytest .mark .parametrize (
1045
852
"r" ,
1046
853
[
0 commit comments