Skip to content

Commit 4daf54e

Browse files
committed
MOD: Change behaviour of compression for endpoints
1 parent 0b3eb5b commit 4daf54e

File tree

4 files changed

+4
-7
lines changed

4 files changed

+4
-7
lines changed

databento/historical/api/batch.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -33,12 +33,12 @@ def __init__(self, key, gateway):
3333
def submit_job(
3434
self,
3535
dataset: Union[Dataset, str],
36+
schema: Union[Schema, str],
3637
symbols: Optional[Union[List[str], str]] = None,
37-
schema: Union[Schema, str] = "trades",
3838
start: Optional[Union[pd.Timestamp, date, str, int]] = None,
3939
end: Optional[Union[pd.Timestamp, date, str, int]] = None,
4040
encoding: Union[Encoding, str] = "dbz",
41-
compression: Optional[Union[Compression, str]] = "zstd",
41+
compression: Optional[Union[Compression, str]] = None,
4242
split_duration: Union[Duration, str] = "day",
4343
split_size: Optional[int] = None,
4444
packaging: Union[Packaging, str] = "none",
@@ -72,8 +72,9 @@ def submit_job(
7272
If an integer is passed, then this represents nanoseconds since UNIX epoch.
7373
encoding : Encoding or str {'dbz', 'csv', 'json'}, default 'dbz'
7474
The data encoding.
75-
compression : Compression or str {'none', 'zstd'}, default 'zstd'
75+
compression : Compression or str {'none', 'zstd'}, optional
7676
The data compression mode.
77+
If encoding is 'dbz' then specifying a `compression` is invalid (already zstd compressed).
7778
split_duration : Duration or str {'day', 'week', 'month', 'none'}, default 'day'
7879
The maximum time duration before batched data is split into multiple files.
7980
A week starts on Sunday UTC.

databento/historical/api/timeseries.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -95,7 +95,6 @@ def stream(
9595
)
9696

9797
params.append(("encoding", Encoding.DBZ.value)) # Always requests DBZ
98-
params.append(("compression", Compression.ZSTD.value)) # Always requests ZSTD
9998

10099
self._pre_check_data_size(
101100
symbols=symbols,

tests/test_historical_client.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -120,7 +120,6 @@ def test_request_full_definitions_expected_request(self, mocker) -> None:
120120
("stype_in", "native"),
121121
("stype_out", "product_id"),
122122
("encoding", "dbz"),
123-
("compression", "zstd"),
124123
]
125124
assert sorted(call["headers"].keys()) == ["accept", "user-agent"]
126125
assert call["headers"]["accept"] == "application/json"

tests/test_historical_timeseries.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -79,7 +79,6 @@ def test_stream_sends_expected_request(self, mocker) -> None:
7979
("stype_in", "native"),
8080
("stype_out", "product_id"),
8181
("encoding", "dbz"),
82-
("compression", "zstd"),
8382
]
8483
assert call["timeout"] == (100, 100)
8584
assert isinstance(call["auth"], requests.auth.HTTPBasicAuth)
@@ -120,7 +119,6 @@ def test_stream_with_limit_sends_expected_request(self, mocker) -> None:
120119
("stype_out", "product_id"),
121120
("limit", "1000000"),
122121
("encoding", "dbz"),
123-
("compression", "zstd"),
124122
]
125123
assert call["timeout"] == (100, 100)
126124
assert isinstance(call["auth"], requests.auth.HTTPBasicAuth)

0 commit comments

Comments
 (0)