Skip to content

Commit cf5368d

Browse files
committed
MOD: Standardize some enum naming
1 parent 2daf9a6 commit cf5368d

File tree

2 files changed

+8
-8
lines changed

2 files changed

+8
-8
lines changed

databento/common/enums.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -70,8 +70,8 @@ class Compression(Enum):
7070

7171

7272
@unique
73-
class Duration(Enum):
74-
"""Represents the duration interval for each batch data file."""
73+
class SplitDuration(Enum):
74+
"""Represents the duration before splitting for each batched data file."""
7575

7676
DAY = "day"
7777
WEEK = "week"
@@ -90,7 +90,7 @@ class Packaging(Enum):
9090

9191
@unique
9292
class Delivery(Enum):
93-
"""Represents the delivery mechanism for batch data."""
93+
"""Represents the delivery mechanism for batched data."""
9494

9595
DOWNLOAD = "download"
9696
S3 = "s3"

databento/historical/api/batch.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -6,10 +6,10 @@
66
Compression,
77
Dataset,
88
Delivery,
9-
Duration,
109
Encoding,
1110
Packaging,
1211
Schema,
12+
SplitDuration,
1313
SType,
1414
)
1515
from databento.common.parsing import (
@@ -39,7 +39,7 @@ def submit_job(
3939
symbols: Optional[Union[List[str], str]] = None,
4040
encoding: Union[Encoding, str] = "dbz",
4141
compression: Optional[Union[Compression, str]] = None,
42-
split_duration: Union[Duration, str] = "day",
42+
split_duration: Union[SplitDuration, str] = "day",
4343
split_size: Optional[int] = None,
4444
packaging: Union[Packaging, str] = "none",
4545
delivery: Union[Delivery, str] = "download",
@@ -75,7 +75,7 @@ def submit_job(
7575
compression : Compression or str {'none', 'zstd'}, optional
7676
The data compression format (if any).
7777
If encoding is 'dbz' then specifying a `compression` is invalid (already zstd compressed).
78-
split_duration : Duration or str {'day', 'week', 'month', 'none'}, default 'day'
78+
split_duration : SplitDuration or str {'day', 'week', 'month', 'none'}, default 'day'
7979
The maximum time duration before batched data is split into multiple files.
8080
A week starts on Sunday UTC.
8181
split_size : int, optional
@@ -107,7 +107,7 @@ def submit_job(
107107
validate_enum(schema, Schema, "schema")
108108
validate_enum(encoding, Encoding, "encoding")
109109
validate_enum(compression, Compression, "compression")
110-
validate_enum(split_duration, Duration, "duration")
110+
validate_enum(split_duration, SplitDuration, "duration")
111111
validate_enum(packaging, Packaging, "packaging")
112112
validate_enum(delivery, Delivery, "delivery")
113113
validate_enum(stype_in, SType, "stype_in")
@@ -126,7 +126,7 @@ def submit_job(
126126

127127
params.append(("encoding", Encoding(encoding).value))
128128
params.append(("compression", Compression(compression).value))
129-
params.append(("split_duration", Duration(split_duration).value))
129+
params.append(("split_duration", SplitDuration(split_duration).value))
130130
params.append(("packaging", Packaging(packaging).value))
131131
params.append(("delivery", Delivery(delivery).value))
132132
if split_size is not None:

0 commit comments

Comments
 (0)