Skip to content

Commit 0dfb00b

Browse files
Use smaller date range in the examples
1 parent 61934a3 commit 0dfb00b

File tree

4 files changed

+10
-7
lines changed

4 files changed

+10
-7
lines changed

examples/rapidpro/contacts.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
from api.rapidpro import pyRapid
22

33
contacts = pyRapid().contacts.get_contacts(
4-
end_datetime="2025-10-16 00:00:00", start_datetime="2025-10-10 00:00:00"
4+
end_datetime="2025-10-16 00:00:00", start_datetime="2025-10-15 00:00:00"
55
)
66

77
print(contacts.collect())

examples/rapidpro/flowstarts.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
from api.rapidpro import pyRapid
22

33
flowstarts = pyRapid().flow_starts.get_flowstarts(
4-
end_datetime="2025-10-16 00:00:00", start_datetime="2025-10-10 00:00:00"
4+
end_datetime="2025-10-16 00:00:00", start_datetime="2025-10-15 00:00:00"
55
)
66

77
print(flowstarts.collect())

examples/rapidpro/runs.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
from api.rapidpro import pyRapid
22

33
runs = pyRapid().runs.get_runs(
4-
end_datetime="2025-10-16 00:00:00", start_datetime="2025-10-10 00:00:00"
4+
end_datetime="2025-10-16 00:00:00", start_datetime="2025-10-15 00:00:00"
55
)
66

77
print(runs.collect())

rdw_ingestion_tools/api/__init__.py

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import os
22
from collections.abc import Iterator
33

4-
import more_itertools as it
4+
from more_itertools import chunked
55
from pandas import DataFrame
66
from pandas import json_normalize as pd_json_normalize
77
from polars import (
@@ -94,19 +94,22 @@ def get_polars_schema(
9494

9595
def concatenate_to_string_lazyframe(
9696
objs: list[dict] | dict[Never, Never] | list[Never] | Iterator,
97-
object_columns: list[str], batch_size: int = 20000
97+
object_columns: list[str],
98+
batch_size: int = 20000,
9899
) -> LazyFrame:
99100
"""
100101
Flattens JSON data. Returns a LazyFrame with columns of type `String`.
101102
"""
102103
lf = LazyFrame()
103104

104-
for data in it.batched(objs, batch_size):
105+
for data in chunked(objs, batch_size):
105106
schema = get_polars_schema(data=data, object_columns=object_columns)
106107
response_lf = (
107108
json_normalize(data, separator="_", schema=schema)
108109
.lazy()
109-
.with_columns(col(Object).map_elements(lambda x: str(x), return_dtype=String))
110+
.with_columns(
111+
col(Object).map_elements(lambda x: str(x), return_dtype=String)
112+
)
110113
)
111114
lf = concat([lf, response_lf], how="diagonal")
112115

0 commit comments

Comments
 (0)