Skip to content

Commit 5d1dc0a

Browse files
committed
MOD: Remove or change metadata endpoints
1 parent fc754bf commit 5d1dc0a

File tree

5 files changed

+6
-157
lines changed

5 files changed

+6
-157
lines changed

CHANGELOG.md

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,9 @@
22

33
## 0.10.0 - TBD
44
- Renamed `Bento` class to `DBNStore`
5+
- Removed `metadata.list_compressions` (redundant with docs)
6+
- Removed `metadata.list_encodings` (redundant with docs)
7+
- Removed optional `start` and `end` params from `metadata.list_schemas` (redundant)
58

69
## 0.9.0 - 2023-03-10
710
- Removed `record_count` property from Bento class

databento/historical/api/metadata.py

Lines changed: 1 addition & 48 deletions
Original file line numberDiff line numberDiff line change
@@ -86,12 +86,7 @@ def list_datasets(
8686
)
8787
return response.json()
8888

89-
def list_schemas(
90-
self,
91-
dataset: Union[Dataset, str],
92-
start_date: Optional[Union[date, str]] = None,
93-
end_date: Optional[Union[date, str]] = None,
94-
) -> List[str]:
89+
def list_schemas(self, dataset: Union[Dataset, str]) -> List[str]:
9590
"""
9691
Request all available data schemas from Databento.
9792
@@ -101,12 +96,6 @@ def list_schemas(
10196
----------
10297
dataset : Dataset or str
10398
The dataset code (string identifier) for the request.
104-
start_date : date or str, optional
105-
The start date (UTC) for the request range.
106-
If `None` then first date available.
107-
end_date : date or str, optional
108-
The end date (UTC) for the request range.
109-
If `None` then last date available.
11099
111100
Returns
112101
-------
@@ -115,8 +104,6 @@ def list_schemas(
115104
"""
116105
params: List[Tuple[str, Optional[str]]] = [
117106
("dataset", validate_semantic_string(dataset, "dataset")),
118-
("start_date", optional_date_to_string(start_date)),
119-
("end_date", optional_date_to_string(end_date)),
120107
]
121108

122109
response: Response = self._get(
@@ -168,40 +155,6 @@ def list_fields(
168155
)
169156
return response.json()
170157

171-
def list_encodings(self) -> List[str]:
172-
"""
173-
Request all available data encodings from Databento.
174-
175-
Makes a `GET /metadata.list_encodings` HTTP request.
176-
177-
Returns
178-
-------
179-
List[str]
180-
181-
"""
182-
response: Response = self._get(
183-
url=self._base_url + ".list_encodings",
184-
basic_auth=True,
185-
)
186-
return response.json()
187-
188-
def list_compressions(self) -> List[str]:
189-
"""
190-
Request all available data compression modes from Databento.
191-
192-
Makes a `GET /metadata.list_compressions` HTTP request.
193-
194-
Returns
195-
-------
196-
List[str]
197-
198-
"""
199-
response: Response = self._get(
200-
url=self._base_url + ".list_compressions",
201-
basic_auth=True,
202-
)
203-
return response.json()
204-
205158
def list_unit_prices(
206159
self,
207160
dataset: Union[Dataset, str],

examples/historical_metadata.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,5 +9,3 @@
99
print(client.metadata.list_datasets())
1010
print(client.metadata.list_schemas(dataset="GLBX.MDP3"))
1111
print(client.metadata.list_fields(dataset="GLBX.MDP3"))
12-
print(client.metadata.list_encodings())
13-
print(client.metadata.list_compressions())

notebooks/quickstart.ipynb

Lines changed: 1 addition & 53 deletions
Original file line numberDiff line numberDiff line change
@@ -213,58 +213,6 @@
213213
"client.metadata.list_fields(dataset=\"GLBX.MDP3\", schema=\"trades\", encoding=\"csv\")"
214214
]
215215
},
216-
{
217-
"cell_type": "code",
218-
"execution_count": null,
219-
"id": "2ef363e9-7150-4ace-a2d8-dcc355d73f72",
220-
"metadata": {
221-
"pycharm": {
222-
"name": "#%%\n"
223-
},
224-
"tags": []
225-
},
226-
"outputs": [
227-
{
228-
"data": {
229-
"text/plain": [
230-
"['dbn', 'csv', 'json']"
231-
]
232-
},
233-
"execution_count": null,
234-
"metadata": {},
235-
"output_type": "execute_result"
236-
}
237-
],
238-
"source": [
239-
"client.metadata.list_encodings()"
240-
]
241-
},
242-
{
243-
"cell_type": "code",
244-
"execution_count": null,
245-
"id": "25a67ca8-10f0-41cb-a932-95a5e8dcd5f1",
246-
"metadata": {
247-
"pycharm": {
248-
"name": "#%%\n"
249-
},
250-
"tags": []
251-
},
252-
"outputs": [
253-
{
254-
"data": {
255-
"text/plain": [
256-
"['none', 'zstd']"
257-
]
258-
},
259-
"execution_count": null,
260-
"metadata": {},
261-
"output_type": "execute_result"
262-
}
263-
],
264-
"source": [
265-
"client.metadata.list_compressions()"
266-
]
267-
},
268216
{
269217
"cell_type": "markdown",
270218
"id": "a335a762-bba0-4de5-bde3-8bce006691c9",
@@ -2088,7 +2036,7 @@
20882036
"name": "python",
20892037
"nbconvert_exporter": "python",
20902038
"pygments_lexer": "ipython3",
2091-
"version": "3.7.15"
2039+
"version": "3.7.16"
20922040
}
20932041
},
20942042
"nbformat": 4,

tests/test_historical_metadata.py

Lines changed: 1 addition & 54 deletions
Original file line numberDiff line numberDiff line change
@@ -71,11 +71,7 @@ def test_list_schemas_sends_expected_request(self, mocker: MockerFixture) -> Non
7171
mocked_get = mocker.patch("requests.get")
7272

7373
# Act
74-
self.client.metadata.list_schemas(
75-
dataset="GLBX.MDP3",
76-
start_date="2018-01-01",
77-
end_date="2021-01-01",
78-
)
74+
self.client.metadata.list_schemas(dataset="GLBX.MDP3")
7975

8076
# Assert
8177
call = mocked_get.call_args.kwargs
@@ -84,8 +80,6 @@ def test_list_schemas_sends_expected_request(self, mocker: MockerFixture) -> Non
8480
== f"https://hist.databento.com/v{db.API_VERSION}/metadata.list_schemas"
8581
)
8682
assert ("dataset", "GLBX.MDP3") in call["params"]
87-
assert ("start_date", "2018-01-01") in call["params"]
88-
assert ("end_date", "2021-01-01") in call["params"]
8983
assert sorted(call["headers"].keys()) == ["accept", "user-agent"]
9084
assert call["headers"]["accept"] == "application/json"
9185
assert all(
@@ -123,53 +117,6 @@ def test_list_fields_sends_expected_request(self, mocker: MockerFixture) -> None
123117
assert call["timeout"] == (100, 100)
124118
assert isinstance(call["auth"], requests.auth.HTTPBasicAuth)
125119

126-
@pytest.mark.skipif(sys.version_info < (3, 8), reason="incompatible mocking")
127-
def test_list_encodings_sends_expected_request(self, mocker: MockerFixture) -> None:
128-
# Arrange
129-
mocked_get = mocker.patch("requests.get")
130-
131-
# Act
132-
self.client.metadata.list_encodings()
133-
134-
# Assert
135-
call = mocked_get.call_args.kwargs
136-
assert (
137-
call["url"]
138-
== f"https://hist.databento.com/v{db.API_VERSION}/metadata.list_encodings"
139-
)
140-
assert sorted(call["headers"].keys()) == ["accept", "user-agent"]
141-
assert call["headers"]["accept"] == "application/json"
142-
assert all(
143-
v in call["headers"]["user-agent"] for v in ("Databento/", "Python/")
144-
)
145-
assert call["timeout"] == (100, 100)
146-
assert isinstance(call["auth"], requests.auth.HTTPBasicAuth)
147-
148-
@pytest.mark.skipif(sys.version_info < (3, 8), reason="incompatible mocking")
149-
def test_list_compressions_sends_expected_request(
150-
self,
151-
mocker: MockerFixture,
152-
) -> None:
153-
# Arrange
154-
mocked_get = mocker.patch("requests.get")
155-
156-
# Act
157-
self.client.metadata.list_compressions()
158-
159-
# Assert
160-
call = mocked_get.call_args.kwargs
161-
assert (
162-
call["url"]
163-
== f"https://hist.databento.com/v{db.API_VERSION}/metadata.list_compressions" # noqa
164-
)
165-
assert sorted(call["headers"].keys()) == ["accept", "user-agent"]
166-
assert call["headers"]["accept"] == "application/json"
167-
assert all(
168-
v in call["headers"]["user-agent"] for v in ("Databento/", "Python/")
169-
)
170-
assert call["timeout"] == (100, 100)
171-
assert isinstance(call["auth"], requests.auth.HTTPBasicAuth)
172-
173120
@pytest.mark.skipif(sys.version_info < (3, 8), reason="incompatible mocking")
174121
@pytest.mark.parametrize(
175122
"dataset, schema, mode",

0 commit comments

Comments
 (0)