27
27
ArcticDbNotYetImplemented ,
28
28
InternalException ,
29
29
UserInputException ,
30
- ArcticException ,
31
30
)
32
31
from arcticdb import QueryBuilder
33
32
from arcticdb .flattener import Flattener
36
35
from arcticdb .version_store ._store import VersionedItem
37
36
from arcticdb_ext .exceptions import _ArcticLegacyCompatibilityException , StorageException
38
37
from arcticdb_ext .storage import KeyType , NoDataFoundException
39
- from arcticdb_ext .version_store import (
40
- NoSuchVersionException ,
41
- StreamDescriptorMismatch ,
42
- ManualClockVersionStore ,
43
- DataError ,
44
- )
38
+ from arcticdb_ext .version_store import NoSuchVersionException , StreamDescriptorMismatch , ManualClockVersionStore
45
39
from arcticdb .util .test import (
46
40
sample_dataframe ,
47
41
sample_dataframe_only_strings ,
51
45
config_context ,
52
46
distinct_timestamps ,
53
47
)
54
- from tests .conftest import Marks
55
48
from tests .util .date import DateRange
56
49
from arcticdb .util .test import equals
57
50
from arcticdb .version_store ._store import resolve_defaults
58
51
from tests .util .mark import MACOS , MACOS_WHEEL_BUILD , xfail_azure_chars
59
- from tests .util .marking import marks
60
52
61
53
62
54
@pytest .fixture ()
@@ -847,9 +839,11 @@ def test_range_index(basic_store, sym):
847
839
assert_equal (expected , vit .data )
848
840
849
841
842
+ @pytest .mark .pipeline # Covered
850
843
@pytest .mark .parametrize ("use_date_range_clause" , [True , False ])
851
- @marks ([Marks .pipeline , Marks .storage ])
852
- def test_date_range (basic_store , use_date_range_clause ):
844
+ @pytest .mark .storage
845
+ def test_date_range (basic_store , use_date_range_clause , any_output_format ):
846
+ basic_store ._set_output_format_for_pipeline_tests (any_output_format )
853
847
initial_timestamp = pd .Timestamp ("2019-01-01" )
854
848
df = pd .DataFrame (data = np .arange (100 ), index = pd .date_range (initial_timestamp , periods = 100 ))
855
849
sym = "date_test"
@@ -895,9 +889,11 @@ def test_date_range(basic_store, use_date_range_clause):
895
889
assert data_closed [data_closed .columns [0 ]][- 1 ] == end_offset
896
890
897
891
892
+ @pytest .mark .pipeline # Covered
898
893
@pytest .mark .parametrize ("use_date_range_clause" , [True , False ])
899
- @marks ([Marks .pipeline , Marks .storage ])
900
- def test_date_range_none (basic_store , use_date_range_clause ):
894
+ @pytest .mark .storage
895
+ def test_date_range_none (basic_store , use_date_range_clause , any_output_format ):
896
+ basic_store ._set_output_format_for_pipeline_tests (any_output_format )
901
897
sym = "date_test2"
902
898
rows = 100
903
899
initial_timestamp = pd .Timestamp ("2019-01-01" )
@@ -914,9 +910,11 @@ def test_date_range_none(basic_store, use_date_range_clause):
914
910
assert len (data ) == rows
915
911
916
912
913
+ @pytest .mark .pipeline # Covered
917
914
@pytest .mark .parametrize ("use_date_range_clause" , [True , False ])
918
- @marks ([Marks .pipeline , Marks .storage ])
919
- def test_date_range_start_equals_end (basic_store , use_date_range_clause ):
915
+ @pytest .mark .storage
916
+ def test_date_range_start_equals_end (basic_store , use_date_range_clause , any_output_format ):
917
+ basic_store ._set_output_format_for_pipeline_tests (any_output_format )
920
918
sym = "date_test2"
921
919
rows = 100
922
920
initial_timestamp = pd .Timestamp ("2019-01-01" )
@@ -936,10 +934,12 @@ def test_date_range_start_equals_end(basic_store, use_date_range_clause):
936
934
assert data [data .columns [0 ]][0 ] == start_offset
937
935
938
936
937
+ @pytest .mark .pipeline # Covered
939
938
@pytest .mark .parametrize ("use_date_range_clause" , [True , False ])
940
- @marks ([ Marks . pipeline , Marks .storage ])
941
- def test_date_range_row_sliced (basic_store_tiny_segment , use_date_range_clause ):
939
+ @pytest . mark .storage
940
+ def test_date_range_row_sliced (basic_store_tiny_segment , use_date_range_clause , any_output_format ):
942
941
lib = basic_store_tiny_segment
942
+ lib ._set_output_format_for_pipeline_tests (any_output_format )
943
943
sym = "test_date_range_row_sliced"
944
944
# basic_store_tiny_segment produces 2x2 segments
945
945
num_rows = 6
@@ -1705,7 +1705,7 @@ def test_batch_write_then_list_symbol_without_cache(basic_store_factory):
1705
1705
assert set (lib .list_symbols ()) == set (symbols )
1706
1706
1707
1707
1708
- @marks ([ Marks . storage , Marks . dedup ])
1708
+ @pytest . mark . storage
1709
1709
def test_batch_write_missing_keys_dedup (basic_store_factory ):
1710
1710
"""When there is duplicate data to reuse for the current write, we need to access the index key of the previous
1711
1711
versions in order to refer to the corresponding keys for the deduplicated data."""
@@ -2265,26 +2265,6 @@ def test_batch_read_meta_multiple_versions(object_version_store):
2265
2265
assert results_dict ["sym3" ][0 ].metadata == {"meta3" : 1 }
2266
2266
assert results_dict ["sym2" ][3 ].metadata == {"meta2" : 4 }
2267
2267
2268
- # We can supply only an array of symbols, including repeating symbols
2269
- results_dict = lib .batch_read_metadata_multi (["sym1" , "sym2" , "sym1" , "sym3" , "sym2" , "sym1" , "sym1" ])
2270
- assert results_dict ["sym1" ][2 ].metadata == {"meta1" : 3 }
2271
- assert len (results_dict ["sym1" ]) == 1
2272
- assert results_dict ["sym2" ][3 ].metadata == {"meta2" : 4 }
2273
- assert results_dict ["sym3" ][0 ].metadata == {"meta3" : 1 }
2274
-
2275
- # The lists are of different sizr
2276
- with pytest .raises (ArcticException ):
2277
- results_dict = lib .batch_read_metadata_multi (["sym1" , "sym2" ], [0 , 0 , - 2 ])
2278
-
2279
- # With negative number we can go back from current versions
2280
- assert lib .batch_read_metadata_multi (["sym1" , "sym1" ], [- 1 , - 2 ]) == lib .batch_read_metadata_multi (
2281
- ["sym1" , "sym1" ], [2 , 1 ]
2282
- )
2283
-
2284
- # Check DataError is thrown when requesting non-existing version
2285
- with pytest .raises (TypeError ): # Not a good error though - issue 10070002655
2286
- results_dict = lib .batch_read_metadata_multi (["sym1" ], [10 ])
2287
-
2288
2268
2289
2269
@pytest .mark .storage
2290
2270
def test_list_symbols (basic_store ):
@@ -2830,10 +2810,12 @@ def test_batch_append_with_throw_exception(basic_store, three_col_df):
2830
2810
)
2831
2811
2832
2812
2813
+ @pytest .mark .pipeline # Covered
2833
2814
@pytest .mark .parametrize ("use_date_range_clause" , [True , False ])
2834
- @marks ([ Marks . pipeline , Marks .storage ])
2835
- def test_batch_read_date_range (basic_store_tombstone_and_sync_passive , use_date_range_clause ):
2815
+ @pytest . mark .storage
2816
+ def test_batch_read_date_range (basic_store_tombstone_and_sync_passive , use_date_range_clause , any_output_format ):
2836
2817
lmdb_version_store = basic_store_tombstone_and_sync_passive
2818
+ lmdb_version_store ._set_output_format_for_pipeline_tests (any_output_format )
2837
2819
symbols = []
2838
2820
for i in range (5 ):
2839
2821
symbols .append ("sym_{}" .format (i ))
@@ -2872,7 +2854,6 @@ def test_batch_read_date_range(basic_store_tombstone_and_sync_passive, use_date_
2872
2854
2873
2855
2874
2856
@pytest .mark .parametrize ("use_row_range_clause" , [True , False ])
2875
- @marks ([Marks .pipeline ])
2876
2857
def test_batch_read_row_range (lmdb_version_store_v1 , use_row_range_clause ):
2877
2858
lib = lmdb_version_store_v1
2878
2859
num_symbols = 5
0 commit comments