Skip to content

Commit a15571c

Browse files
authored
Merge pull request #30 from replicate/release-please--branches--main--changes--next
release: 2.0.0-alpha.4
2 parents f91af92 + 869c5e5 commit a15571c

File tree

12 files changed

+200
-133
lines changed

12 files changed

+200
-133
lines changed

.github/workflows/ci.yml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,10 @@ on:
77
- 'integrated/**'
88
- 'stl-preview-head/**'
99
- 'stl-preview-base/**'
10+
pull_request:
11+
branches-ignore:
12+
- 'stl-preview-head/**'
13+
- 'stl-preview-base/**'
1014

1115
jobs:
1216
lint:

.release-please-manifest.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
{
2-
".": "2.0.0-alpha.3"
2+
".": "2.0.0-alpha.4"
33
}

.stats.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
configured_endpoints: 35
2-
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/replicate%2Freplicate-client-37cd8ea847eb57706035f766ca549d5b4e2111053af0656a2df9a8150421428e.yml
3-
openapi_spec_hash: a3e4d6fd9aff6de0e4b6d8ad28cbbe05
2+
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/replicate%2Freplicate-client-12e7ef40109b6b34f1471a638d09b79f005c8dbf7e1a8aeca9db7e37a334e8eb.yml
3+
openapi_spec_hash: 10b0fc9094dac5d51f46bbdd5fe3de32
44
config_hash: 12536d2bf978a995771d076a4647c17d

CHANGELOG.md

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,25 @@
11
# Changelog
22

3+
## 2.0.0-alpha.4 (2025-06-18)
4+
5+
Full Changelog: [v2.0.0-alpha.3...v2.0.0-alpha.4](https://github.com/replicate/replicate-python-stainless/compare/v2.0.0-alpha.3...v2.0.0-alpha.4)
6+
7+
### Features
8+
9+
* **api:** api update ([a9be2e0](https://github.com/replicate/replicate-python-stainless/commit/a9be2e087bd6f01301608322a50b321b0b01d4da))
10+
11+
12+
### Bug Fixes
13+
14+
* **tests:** fix: tests which call HTTP endpoints directly with the example parameters ([3dfe4f7](https://github.com/replicate/replicate-python-stainless/commit/3dfe4f711c061b6197017a5b999f9db4e7f2836d))
15+
16+
17+
### Chores
18+
19+
* **ci:** enable for pull requests ([67ffb34](https://github.com/replicate/replicate-python-stainless/commit/67ffb34adaaef43b4e4e469e5fff7ce3cdca3dcf))
20+
* **internal:** update conftest.py ([90da407](https://github.com/replicate/replicate-python-stainless/commit/90da407a4818b21bd5a33347a3c4566189c4377d))
21+
* **readme:** update badges ([4f54c7a](https://github.com/replicate/replicate-python-stainless/commit/4f54c7a76e5107b854e82f5266578e4f84aacc74))
22+
323
## 2.0.0-alpha.3 (2025-06-17)
424

525
Full Changelog: [v2.0.0-alpha.2...v2.0.0-alpha.3](https://github.com/replicate/replicate-python-stainless/compare/v2.0.0-alpha.2...v2.0.0-alpha.3)

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
# Replicate Python API library
22

3-
[![PyPI version](https://img.shields.io/pypi/v/replicate.svg)](https://pypi.org/project/replicate/)
3+
[![PyPI version](<https://img.shields.io/pypi/v/replicate.svg?label=pypi%20(stable)>)](https://pypi.org/project/replicate/)
44

55
The Replicate Python library provides convenient access to the Replicate REST API from any Python 3.8+
66
application. The library includes type definitions for all request params and response fields,

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[project]
22
name = "replicate"
3-
version = "2.0.0-alpha.3"
3+
version = "2.0.0-alpha.4"
44
description = "The official Python library for the replicate API"
55
dynamic = ["readme"]
66
license = "Apache-2.0"

src/replicate/_version.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
22

33
__title__ = "replicate"
4-
__version__ = "2.0.0-alpha.3" # x-release-please-version
4+
__version__ = "2.0.0-alpha.4" # x-release-please-version

tests/api_resources/deployments/test_predictions.py

Lines changed: 52 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,10 @@ def test_method_create(self, client: Replicate) -> None:
2323
prediction = client.deployments.predictions.create(
2424
deployment_owner="deployment_owner",
2525
deployment_name="deployment_name",
26-
input={},
26+
input={
27+
"prompt": "Tell me a joke",
28+
"system_prompt": "You are a helpful assistant",
29+
},
2730
)
2831
assert_matches_type(Prediction, prediction, path=["response"])
2932

@@ -33,10 +36,13 @@ def test_method_create_with_all_params(self, client: Replicate) -> None:
3336
prediction = client.deployments.predictions.create(
3437
deployment_owner="deployment_owner",
3538
deployment_name="deployment_name",
36-
input={},
39+
input={
40+
"prompt": "Tell me a joke",
41+
"system_prompt": "You are a helpful assistant",
42+
},
3743
stream=True,
38-
webhook="webhook",
39-
webhook_events_filter=["start"],
44+
webhook="https://example.com/my-webhook-handler",
45+
webhook_events_filter=["start", "completed"],
4046
prefer="wait=5",
4147
)
4248
assert_matches_type(Prediction, prediction, path=["response"])
@@ -47,7 +53,10 @@ def test_raw_response_create(self, client: Replicate) -> None:
4753
response = client.deployments.predictions.with_raw_response.create(
4854
deployment_owner="deployment_owner",
4955
deployment_name="deployment_name",
50-
input={},
56+
input={
57+
"prompt": "Tell me a joke",
58+
"system_prompt": "You are a helpful assistant",
59+
},
5160
)
5261

5362
assert response.is_closed is True
@@ -61,7 +70,10 @@ def test_streaming_response_create(self, client: Replicate) -> None:
6170
with client.deployments.predictions.with_streaming_response.create(
6271
deployment_owner="deployment_owner",
6372
deployment_name="deployment_name",
64-
input={},
73+
input={
74+
"prompt": "Tell me a joke",
75+
"system_prompt": "You are a helpful assistant",
76+
},
6577
) as response:
6678
assert not response.is_closed
6779
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -78,14 +90,20 @@ def test_path_params_create(self, client: Replicate) -> None:
7890
client.deployments.predictions.with_raw_response.create(
7991
deployment_owner="",
8092
deployment_name="deployment_name",
81-
input={},
93+
input={
94+
"prompt": "Tell me a joke",
95+
"system_prompt": "You are a helpful assistant",
96+
},
8297
)
8398

8499
with pytest.raises(ValueError, match=r"Expected a non-empty value for `deployment_name` but received ''"):
85100
client.deployments.predictions.with_raw_response.create(
86101
deployment_owner="deployment_owner",
87102
deployment_name="",
88-
input={},
103+
input={
104+
"prompt": "Tell me a joke",
105+
"system_prompt": "You are a helpful assistant",
106+
},
89107
)
90108

91109

@@ -98,7 +116,10 @@ async def test_method_create(self, async_client: AsyncReplicate) -> None:
98116
prediction = await async_client.deployments.predictions.create(
99117
deployment_owner="deployment_owner",
100118
deployment_name="deployment_name",
101-
input={},
119+
input={
120+
"prompt": "Tell me a joke",
121+
"system_prompt": "You are a helpful assistant",
122+
},
102123
)
103124
assert_matches_type(Prediction, prediction, path=["response"])
104125

@@ -108,10 +129,13 @@ async def test_method_create_with_all_params(self, async_client: AsyncReplicate)
108129
prediction = await async_client.deployments.predictions.create(
109130
deployment_owner="deployment_owner",
110131
deployment_name="deployment_name",
111-
input={},
132+
input={
133+
"prompt": "Tell me a joke",
134+
"system_prompt": "You are a helpful assistant",
135+
},
112136
stream=True,
113-
webhook="webhook",
114-
webhook_events_filter=["start"],
137+
webhook="https://example.com/my-webhook-handler",
138+
webhook_events_filter=["start", "completed"],
115139
prefer="wait=5",
116140
)
117141
assert_matches_type(Prediction, prediction, path=["response"])
@@ -122,7 +146,10 @@ async def test_raw_response_create(self, async_client: AsyncReplicate) -> None:
122146
response = await async_client.deployments.predictions.with_raw_response.create(
123147
deployment_owner="deployment_owner",
124148
deployment_name="deployment_name",
125-
input={},
149+
input={
150+
"prompt": "Tell me a joke",
151+
"system_prompt": "You are a helpful assistant",
152+
},
126153
)
127154

128155
assert response.is_closed is True
@@ -136,7 +163,10 @@ async def test_streaming_response_create(self, async_client: AsyncReplicate) ->
136163
async with async_client.deployments.predictions.with_streaming_response.create(
137164
deployment_owner="deployment_owner",
138165
deployment_name="deployment_name",
139-
input={},
166+
input={
167+
"prompt": "Tell me a joke",
168+
"system_prompt": "You are a helpful assistant",
169+
},
140170
) as response:
141171
assert not response.is_closed
142172
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -153,12 +183,18 @@ async def test_path_params_create(self, async_client: AsyncReplicate) -> None:
153183
await async_client.deployments.predictions.with_raw_response.create(
154184
deployment_owner="",
155185
deployment_name="deployment_name",
156-
input={},
186+
input={
187+
"prompt": "Tell me a joke",
188+
"system_prompt": "You are a helpful assistant",
189+
},
157190
)
158191

159192
with pytest.raises(ValueError, match=r"Expected a non-empty value for `deployment_name` but received ''"):
160193
await async_client.deployments.predictions.with_raw_response.create(
161194
deployment_owner="deployment_owner",
162195
deployment_name="",
163-
input={},
196+
input={
197+
"prompt": "Tell me a joke",
198+
"system_prompt": "You are a helpful assistant",
199+
},
164200
)

tests/api_resources/models/test_predictions.py

Lines changed: 52 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,10 @@ def test_method_create(self, client: Replicate) -> None:
2323
prediction = client.models.predictions.create(
2424
model_owner="model_owner",
2525
model_name="model_name",
26-
input={},
26+
input={
27+
"prompt": "Tell me a joke",
28+
"system_prompt": "You are a helpful assistant",
29+
},
2730
)
2831
assert_matches_type(Prediction, prediction, path=["response"])
2932

@@ -33,10 +36,13 @@ def test_method_create_with_all_params(self, client: Replicate) -> None:
3336
prediction = client.models.predictions.create(
3437
model_owner="model_owner",
3538
model_name="model_name",
36-
input={},
39+
input={
40+
"prompt": "Tell me a joke",
41+
"system_prompt": "You are a helpful assistant",
42+
},
3743
stream=True,
38-
webhook="webhook",
39-
webhook_events_filter=["start"],
44+
webhook="https://example.com/my-webhook-handler",
45+
webhook_events_filter=["start", "completed"],
4046
prefer="wait=5",
4147
)
4248
assert_matches_type(Prediction, prediction, path=["response"])
@@ -47,7 +53,10 @@ def test_raw_response_create(self, client: Replicate) -> None:
4753
response = client.models.predictions.with_raw_response.create(
4854
model_owner="model_owner",
4955
model_name="model_name",
50-
input={},
56+
input={
57+
"prompt": "Tell me a joke",
58+
"system_prompt": "You are a helpful assistant",
59+
},
5160
)
5261

5362
assert response.is_closed is True
@@ -61,7 +70,10 @@ def test_streaming_response_create(self, client: Replicate) -> None:
6170
with client.models.predictions.with_streaming_response.create(
6271
model_owner="model_owner",
6372
model_name="model_name",
64-
input={},
73+
input={
74+
"prompt": "Tell me a joke",
75+
"system_prompt": "You are a helpful assistant",
76+
},
6577
) as response:
6678
assert not response.is_closed
6779
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -78,14 +90,20 @@ def test_path_params_create(self, client: Replicate) -> None:
7890
client.models.predictions.with_raw_response.create(
7991
model_owner="",
8092
model_name="model_name",
81-
input={},
93+
input={
94+
"prompt": "Tell me a joke",
95+
"system_prompt": "You are a helpful assistant",
96+
},
8297
)
8398

8499
with pytest.raises(ValueError, match=r"Expected a non-empty value for `model_name` but received ''"):
85100
client.models.predictions.with_raw_response.create(
86101
model_owner="model_owner",
87102
model_name="",
88-
input={},
103+
input={
104+
"prompt": "Tell me a joke",
105+
"system_prompt": "You are a helpful assistant",
106+
},
89107
)
90108

91109

@@ -98,7 +116,10 @@ async def test_method_create(self, async_client: AsyncReplicate) -> None:
98116
prediction = await async_client.models.predictions.create(
99117
model_owner="model_owner",
100118
model_name="model_name",
101-
input={},
119+
input={
120+
"prompt": "Tell me a joke",
121+
"system_prompt": "You are a helpful assistant",
122+
},
102123
)
103124
assert_matches_type(Prediction, prediction, path=["response"])
104125

@@ -108,10 +129,13 @@ async def test_method_create_with_all_params(self, async_client: AsyncReplicate)
108129
prediction = await async_client.models.predictions.create(
109130
model_owner="model_owner",
110131
model_name="model_name",
111-
input={},
132+
input={
133+
"prompt": "Tell me a joke",
134+
"system_prompt": "You are a helpful assistant",
135+
},
112136
stream=True,
113-
webhook="webhook",
114-
webhook_events_filter=["start"],
137+
webhook="https://example.com/my-webhook-handler",
138+
webhook_events_filter=["start", "completed"],
115139
prefer="wait=5",
116140
)
117141
assert_matches_type(Prediction, prediction, path=["response"])
@@ -122,7 +146,10 @@ async def test_raw_response_create(self, async_client: AsyncReplicate) -> None:
122146
response = await async_client.models.predictions.with_raw_response.create(
123147
model_owner="model_owner",
124148
model_name="model_name",
125-
input={},
149+
input={
150+
"prompt": "Tell me a joke",
151+
"system_prompt": "You are a helpful assistant",
152+
},
126153
)
127154

128155
assert response.is_closed is True
@@ -136,7 +163,10 @@ async def test_streaming_response_create(self, async_client: AsyncReplicate) ->
136163
async with async_client.models.predictions.with_streaming_response.create(
137164
model_owner="model_owner",
138165
model_name="model_name",
139-
input={},
166+
input={
167+
"prompt": "Tell me a joke",
168+
"system_prompt": "You are a helpful assistant",
169+
},
140170
) as response:
141171
assert not response.is_closed
142172
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -153,12 +183,18 @@ async def test_path_params_create(self, async_client: AsyncReplicate) -> None:
153183
await async_client.models.predictions.with_raw_response.create(
154184
model_owner="",
155185
model_name="model_name",
156-
input={},
186+
input={
187+
"prompt": "Tell me a joke",
188+
"system_prompt": "You are a helpful assistant",
189+
},
157190
)
158191

159192
with pytest.raises(ValueError, match=r"Expected a non-empty value for `model_name` but received ''"):
160193
await async_client.models.predictions.with_raw_response.create(
161194
model_owner="model_owner",
162195
model_name="",
163-
input={},
196+
input={
197+
"prompt": "Tell me a joke",
198+
"system_prompt": "You are a helpful assistant",
199+
},
164200
)

0 commit comments

Comments
 (0)