27
27
ArcticDbNotYetImplemented ,
28
28
InternalException ,
29
29
UserInputException ,
30
- ArcticException ,
31
30
)
32
31
from arcticdb import QueryBuilder
33
32
from arcticdb .flattener import Flattener
34
33
from arcticdb .version_store import NativeVersionStore
35
34
from arcticdb .version_store ._store import VersionedItem
36
35
from arcticdb_ext .exceptions import _ArcticLegacyCompatibilityException , StorageException
37
36
from arcticdb_ext .storage import KeyType , NoDataFoundException
38
- from arcticdb_ext .version_store import (
39
- NoSuchVersionException ,
40
- StreamDescriptorMismatch ,
41
- ManualClockVersionStore ,
42
- DataError ,
43
- )
37
+ from arcticdb_ext .version_store import NoSuchVersionException , StreamDescriptorMismatch , ManualClockVersionStore
44
38
from arcticdb .util .test import (
45
39
sample_dataframe ,
46
40
sample_dataframe_only_strings ,
50
44
config_context ,
51
45
distinct_timestamps ,
52
46
)
53
- from tests .conftest import Marks
54
47
from tests .util .date import DateRange
55
48
from arcticdb .util .test import equals
56
49
from arcticdb .version_store ._store import resolve_defaults
57
50
from tests .util .mark import MACOS , MACOS_WHEEL_BUILD , xfail_azure_chars
58
- from tests .util .marking import marks
59
51
60
52
61
53
@pytest .fixture ()
@@ -830,9 +822,11 @@ def test_range_index(basic_store, sym):
830
822
assert_equal (expected , vit .data )
831
823
832
824
825
+ @pytest .mark .pipeline # Covered
833
826
@pytest .mark .parametrize ("use_date_range_clause" , [True , False ])
834
- @marks ([Marks .pipeline , Marks .storage ])
835
- def test_date_range (basic_store , use_date_range_clause ):
827
+ @pytest .mark .storage
828
+ def test_date_range (basic_store , use_date_range_clause , any_output_format ):
829
+ basic_store ._set_output_format_for_pipeline_tests (any_output_format )
836
830
initial_timestamp = pd .Timestamp ("2019-01-01" )
837
831
df = pd .DataFrame (data = np .arange (100 ), index = pd .date_range (initial_timestamp , periods = 100 ))
838
832
sym = "date_test"
@@ -878,9 +872,11 @@ def test_date_range(basic_store, use_date_range_clause):
878
872
assert data_closed [data_closed .columns [0 ]][- 1 ] == end_offset
879
873
880
874
875
+ @pytest .mark .pipeline # Covered
881
876
@pytest .mark .parametrize ("use_date_range_clause" , [True , False ])
882
- @marks ([Marks .pipeline , Marks .storage ])
883
- def test_date_range_none (basic_store , use_date_range_clause ):
877
+ @pytest .mark .storage
878
+ def test_date_range_none (basic_store , use_date_range_clause , any_output_format ):
879
+ basic_store ._set_output_format_for_pipeline_tests (any_output_format )
884
880
sym = "date_test2"
885
881
rows = 100
886
882
initial_timestamp = pd .Timestamp ("2019-01-01" )
@@ -897,9 +893,11 @@ def test_date_range_none(basic_store, use_date_range_clause):
897
893
assert len (data ) == rows
898
894
899
895
896
+ @pytest .mark .pipeline # Covered
900
897
@pytest .mark .parametrize ("use_date_range_clause" , [True , False ])
901
- @marks ([Marks .pipeline , Marks .storage ])
902
- def test_date_range_start_equals_end (basic_store , use_date_range_clause ):
898
+ @pytest .mark .storage
899
+ def test_date_range_start_equals_end (basic_store , use_date_range_clause , any_output_format ):
900
+ basic_store ._set_output_format_for_pipeline_tests (any_output_format )
903
901
sym = "date_test2"
904
902
rows = 100
905
903
initial_timestamp = pd .Timestamp ("2019-01-01" )
@@ -919,10 +917,12 @@ def test_date_range_start_equals_end(basic_store, use_date_range_clause):
919
917
assert data [data .columns [0 ]][0 ] == start_offset
920
918
921
919
920
+ @pytest .mark .pipeline # Covered
922
921
@pytest .mark .parametrize ("use_date_range_clause" , [True , False ])
923
- @marks ([ Marks . pipeline , Marks .storage ])
924
- def test_date_range_row_sliced (basic_store_tiny_segment , use_date_range_clause ):
922
+ @pytest . mark .storage
923
+ def test_date_range_row_sliced (basic_store_tiny_segment , use_date_range_clause , any_output_format ):
925
924
lib = basic_store_tiny_segment
925
+ lib ._set_output_format_for_pipeline_tests (any_output_format )
926
926
sym = "test_date_range_row_sliced"
927
927
# basic_store_tiny_segment produces 2x2 segments
928
928
num_rows = 6
@@ -1657,7 +1657,7 @@ def test_batch_write_then_list_symbol_without_cache(basic_store_factory):
1657
1657
assert set (lib .list_symbols ()) == set (symbols )
1658
1658
1659
1659
1660
- @marks ([ Marks . storage , Marks . dedup ])
1660
+ @pytest . mark . storage
1661
1661
def test_batch_write_missing_keys_dedup (basic_store_factory ):
1662
1662
"""When there is duplicate data to reuse for the current write, we need to access the index key of the previous
1663
1663
versions in order to refer to the corresponding keys for the deduplicated data."""
@@ -2217,26 +2217,6 @@ def test_batch_read_meta_multiple_versions(object_version_store):
2217
2217
assert results_dict ["sym3" ][0 ].metadata == {"meta3" : 1 }
2218
2218
assert results_dict ["sym2" ][3 ].metadata == {"meta2" : 4 }
2219
2219
2220
- # We can supply only an array of symbols, including repeating symbols
2221
- results_dict = lib .batch_read_metadata_multi (["sym1" , "sym2" , "sym1" , "sym3" , "sym2" , "sym1" , "sym1" ])
2222
- assert results_dict ["sym1" ][2 ].metadata == {"meta1" : 3 }
2223
- assert len (results_dict ["sym1" ]) == 1
2224
- assert results_dict ["sym2" ][3 ].metadata == {"meta2" : 4 }
2225
- assert results_dict ["sym3" ][0 ].metadata == {"meta3" : 1 }
2226
-
2227
- # The lists are of different sizr
2228
- with pytest .raises (ArcticException ):
2229
- results_dict = lib .batch_read_metadata_multi (["sym1" , "sym2" ], [0 , 0 , - 2 ])
2230
-
2231
- # With negative number we can go back from current versions
2232
- assert lib .batch_read_metadata_multi (["sym1" , "sym1" ], [- 1 , - 2 ]) == lib .batch_read_metadata_multi (
2233
- ["sym1" , "sym1" ], [2 , 1 ]
2234
- )
2235
-
2236
- # Check DataError is thrown when requesting non-existing version
2237
- with pytest .raises (TypeError ): # Not a good error though - issue 10070002655
2238
- results_dict = lib .batch_read_metadata_multi (["sym1" ], [10 ])
2239
-
2240
2220
2241
2221
@pytest .mark .storage
2242
2222
def test_list_symbols (basic_store ):
@@ -2746,10 +2726,12 @@ def test_batch_append_with_throw_exception(basic_store, three_col_df):
2746
2726
)
2747
2727
2748
2728
2729
+ @pytest .mark .pipeline # Covered
2749
2730
@pytest .mark .parametrize ("use_date_range_clause" , [True , False ])
2750
- @marks ([ Marks . pipeline , Marks .storage ])
2751
- def test_batch_read_date_range (basic_store_tombstone_and_sync_passive , use_date_range_clause ):
2731
+ @pytest . mark .storage
2732
+ def test_batch_read_date_range (basic_store_tombstone_and_sync_passive , use_date_range_clause , any_output_format ):
2752
2733
lmdb_version_store = basic_store_tombstone_and_sync_passive
2734
+ lmdb_version_store ._set_output_format_for_pipeline_tests (any_output_format )
2753
2735
symbols = []
2754
2736
for i in range (5 ):
2755
2737
symbols .append ("sym_{}" .format (i ))
@@ -2788,7 +2770,6 @@ def test_batch_read_date_range(basic_store_tombstone_and_sync_passive, use_date_
2788
2770
2789
2771
2790
2772
@pytest .mark .parametrize ("use_row_range_clause" , [True , False ])
2791
- @marks ([Marks .pipeline ])
2792
2773
def test_batch_read_row_range (lmdb_version_store_v1 , use_row_range_clause ):
2793
2774
lib = lmdb_version_store_v1
2794
2775
num_symbols = 5
0 commit comments