Skip to content

Commit cade791

Browse files
committed
MOD: Upgrade databento-dbn to v0.15.0
1 parent ab5d54d commit cade791

File tree

7 files changed

+16
-27
lines changed

7 files changed

+16
-27
lines changed

CHANGELOG.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@ This release adds support for transcoding DBN data into Apache parquet.
66

77
#### Enhancements
88
- Added `DBNStore.to_parquet` for transcoding DBN data into Apache parquet using `pyarrow`
9+
- Upgraded `databento-dbn` to 0.15.0
910

1011
## 0.25.0 - 2024-01-09
1112

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ The library is fully compatible with the latest distribution of Anaconda 3.8 and
3232
The minimum dependencies as found in the `pyproject.toml` are also listed below:
3333
- python = "^3.8"
3434
- aiohttp = "^3.8.3"
35-
- databento-dbn = "0.14.2"
35+
- databento-dbn = "0.15.0"
3636
- numpy= ">=1.23.5"
3737
- pandas = ">=1.5.3"
3838
- pyarrow = ">=13.0.0"

databento/common/dbnstore.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1192,7 +1192,7 @@ def _transcode(
11921192
pretty_ts=pretty_ts,
11931193
has_metadata=True,
11941194
map_symbols=map_symbols,
1195-
symbol_interval_map=symbol_map, # type: ignore [arg-type]
1195+
symbol_interval_map=symbol_map,
11961196
schema=schema,
11971197
)
11981198

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ aiohttp = [
3232
{version = "^3.8.3", python = "<3.12"},
3333
{version = "^3.9.0", python = "^3.12"}
3434
]
35-
databento-dbn = "0.14.2"
35+
databento-dbn = "0.15.0"
3636
numpy = [
3737
{version = ">=1.23.5", python = "<3.12"},
3838
{version = "^1.26.0", python = "^3.12"}

tests/mock_live_server.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -434,7 +434,7 @@ def _(self, message: SessionStart) -> None:
434434
self.__transport.close()
435435

436436
elif self.mode is MockLiveMode.REPEAT:
437-
metadata = Metadata("UNIT.TEST", 0, SType.RAW_SYMBOL, [], [], [], []) # type: ignore [call-arg]
437+
metadata = Metadata("UNIT.TEST", 0, SType.RAW_SYMBOL, [], [], [], [])
438438
self.__transport.write(bytes(metadata))
439439

440440
loop = asyncio.get_event_loop()

tests/test_common_symbology.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -148,7 +148,7 @@ def create_symbol_mapping_message(
148148
SymbolMappingMsg
149149
150150
"""
151-
return SymbolMappingMsg( # type: ignore [call-arg]
151+
return SymbolMappingMsg(
152152
publisher_id=publisher_id,
153153
instrument_id=instrument_id,
154154
ts_event=ts_event,
@@ -172,7 +172,7 @@ def create_metadata(
172172
limit: int | None = None,
173173
ts_out: bool = False,
174174
) -> Metadata:
175-
return Metadata( # type: ignore [call-arg]
175+
return Metadata(
176176
dataset=dataset,
177177
start=start,
178178
stype_out=stype_out,
@@ -357,7 +357,7 @@ def test_instrument_map_insert_symbol_mapping_message_v1(
357357
start_ts=start_date,
358358
end_ts=end_date,
359359
)
360-
sym_msg_v1 = SymbolMappingMsgV1( # type: ignore [call-arg]
360+
sym_msg_v1 = SymbolMappingMsgV1(
361361
publisher_id=sym_msg.publisher_id,
362362
instrument_id=sym_msg.instrument_id,
363363
ts_event=sym_msg.ts_event,

tests/test_historical_bento.py

Lines changed: 8 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -782,8 +782,8 @@ def test_dbnstore_iterable(
782782
dbnstore = DBNStore.from_bytes(data=stub_data)
783783

784784
record_list: list[DBNRecord] = list(dbnstore)
785-
first: MBOMsg = record_list[0] # type: ignore
786-
second: MBOMsg = record_list[1] # type: ignore
785+
first: MBOMsg = record_list[0]
786+
second: MBOMsg = record_list[1]
787787

788788
# Assert
789789
assert first.hd.length == 14
@@ -882,9 +882,7 @@ def test_dbnstore_buffer_short(
882882
"""
883883
# Arrange
884884
dbn_stub_data = (
885-
zstandard.ZstdDecompressor()
886-
.stream_reader(test_data(Dataset.GLBX_MDP3, Schema.MBO))
887-
.read()
885+
zstandard.ZstdDecompressor().stream_reader(test_data(Dataset.GLBX_MDP3, Schema.MBO)).read()
888886
)
889887

890888
# Act
@@ -917,9 +915,7 @@ def test_dbnstore_buffer_long(
917915
"""
918916
# Arrange
919917
dbn_stub_data = (
920-
zstandard.ZstdDecompressor()
921-
.stream_reader(test_data(Dataset.GLBX_MDP3, Schema.MBO))
922-
.read()
918+
zstandard.ZstdDecompressor().stream_reader(test_data(Dataset.GLBX_MDP3, Schema.MBO)).read()
923919
)
924920

925921
# Act
@@ -952,9 +948,7 @@ def test_dbnstore_buffer_rewind(
952948
"""
953949
# Arrange
954950
dbn_stub_data = (
955-
zstandard.ZstdDecompressor()
956-
.stream_reader(test_data(Dataset.GLBX_MDP3, Schema.MBO))
957-
.read()
951+
zstandard.ZstdDecompressor().stream_reader(test_data(Dataset.GLBX_MDP3, Schema.MBO)).read()
958952
)
959953

960954
# Act
@@ -989,9 +983,7 @@ def test_dbnstore_to_ndarray_with_count(
989983
"""
990984
# Arrange
991985
dbn_stub_data = (
992-
zstandard.ZstdDecompressor()
993-
.stream_reader(test_data(Dataset.GLBX_MDP3, schema))
994-
.read()
986+
zstandard.ZstdDecompressor().stream_reader(test_data(Dataset.GLBX_MDP3, schema)).read()
995987
)
996988

997989
# Act
@@ -1074,9 +1066,7 @@ def test_dbnstore_to_ndarray_with_schema(
10741066
"""
10751067
# Arrange
10761068
dbn_stub_data = (
1077-
zstandard.ZstdDecompressor()
1078-
.stream_reader(test_data(Dataset.GLBX_MDP3, schema))
1079-
.read()
1069+
zstandard.ZstdDecompressor().stream_reader(test_data(Dataset.GLBX_MDP3, schema)).read()
10801070
)
10811071

10821072
# Act
@@ -1210,9 +1200,7 @@ def test_dbnstore_to_df_with_count(
12101200
"""
12111201
# Arrange
12121202
dbn_stub_data = (
1213-
zstandard.ZstdDecompressor()
1214-
.stream_reader(test_data(Dataset.GLBX_MDP3, schema))
1215-
.read()
1203+
zstandard.ZstdDecompressor().stream_reader(test_data(Dataset.GLBX_MDP3, schema)).read()
12161204
)
12171205

12181206
# Act

0 commit comments

Comments
 (0)