Skip to content

Commit 3333e0f

Browse files
committed
fix: add type annotations to output lists in stream tests
1 parent 924ab59 commit 3333e0f

File tree

1 file changed

+6
-6
lines changed

1 file changed

+6
-6
lines changed

tests/lib/test_stream.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515

1616
def create_mock_prediction_json(stream_url: str | None = None) -> dict[str, Any]:
1717
"""Helper to create a complete prediction JSON response"""
18-
prediction = {
18+
prediction: dict[str, Any] = {
1919
"id": "test-prediction-id",
2020
"created_at": "2023-01-01T00:00:00Z",
2121
"data_removed": False,
@@ -31,7 +31,7 @@ def create_mock_prediction_json(stream_url: str | None = None) -> dict[str, Any]
3131
},
3232
}
3333
if stream_url:
34-
prediction["urls"]["stream"] = stream_url
34+
prediction["urls"]["stream"] = stream_url # type: ignore[index]
3535
return prediction
3636

3737

@@ -62,7 +62,7 @@ def stream_content() -> Iterator[bytes]:
6262
)
6363

6464
# Stream the model
65-
output = []
65+
output: list[str] = []
6666
for chunk in client.stream(
6767
"meta/meta-llama-3-70b-instruct",
6868
input={"prompt": "Say hello"},
@@ -99,7 +99,7 @@ def stream_content() -> Iterator[bytes]:
9999
)
100100

101101
# Stream the model
102-
output = []
102+
output: list[str] = []
103103
for chunk in client.stream(
104104
version_id,
105105
input={"prompt": "Test"},
@@ -154,7 +154,7 @@ async def stream_content():
154154
)
155155

156156
# Stream the model
157-
output = []
157+
output: list[str] = []
158158
async for chunk in async_client.stream(
159159
"meta/meta-llama-3-70b-instruct",
160160
input={"prompt": "Say hello"},
@@ -213,7 +213,7 @@ def stream_content() -> Iterator[bytes]:
213213
)
214214

215215
# Stream using module-level function
216-
output = []
216+
output: list[str] = []
217217
for chunk in replicate.stream(
218218
"meta/meta-llama-3-70b-instruct",
219219
input={"prompt": "Test"},

0 commit comments

Comments
 (0)