Skip to content

Commit 4a35670

Browse files
committed
docs: update examples to use anthropic/claude-4-sonnet and google/nano-banana
Replace all model references in examples: - stability-ai/sdxl → google/nano-banana - meta/llama-2-70b-chat → anthropic/claude-4-sonnet - black-forest-labs/flux-schnell → google/nano-banana - stability-ai/stable-diffusion → google/nano-banana This provides more whimsical and memorable example model names while maintaining the same API patterns and functionality demonstrations.
1 parent 0d8b065 commit 4a35670

File tree

1 file changed

+33
-33
lines changed

1 file changed

+33
-33
lines changed

api.md

Lines changed: 33 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -15,15 +15,15 @@ from replicate import Replicate
1515
replicate = Replicate()
1616

1717
# Create a model function
18-
flux = replicate.use("black-forest-labs/flux-schnell")
18+
banana = replicate.use("google/nano-banana")
1919

2020
# Call it like any Python function
21-
output = flux(prompt="astronaut on a horse")
21+
output = banana(prompt="astronaut on a horse")
2222
print(output)
2323

2424
# Or use run() for one-off predictions
2525
output = replicate.run(
26-
"black-forest-labs/flux-schnell",
26+
"google/nano-banana",
2727
input={"prompt": "astronaut on a horse"}
2828
)
2929
```
@@ -62,7 +62,7 @@ import asyncio
6262
async def main():
6363
replicate = AsyncReplicate(bearer_token="your_api_token")
6464
output = await replicate.run(
65-
"stability-ai/stable-diffusion",
65+
"google/nano-banana",
6666
input={"prompt": "a watercolor painting"}
6767
)
6868
print(output)
@@ -78,26 +78,26 @@ The most Pythonic way to interact with models. Creates a callable function for a
7878

7979
```python
8080
# Create a model function
81-
sdxl = replicate.use("stability-ai/sdxl")
81+
banana = replicate.use("google/nano-banana")
8282

8383
# Call it like a regular function
84-
image = sdxl(prompt="a 19th century portrait of a wombat gentleman")
84+
image = banana(prompt="a 19th century portrait of a wombat gentleman")
8585

8686
# Use it multiple times with different inputs
87-
image1 = sdxl(prompt="a cat in a hat", negative_prompt="blurry, low quality")
88-
image2 = sdxl(prompt="a dog in sunglasses", num_outputs=4)
87+
image1 = banana(prompt="a cat in a hat", negative_prompt="blurry, low quality")
88+
image2 = banana(prompt="a dog in sunglasses", num_outputs=4)
8989

9090
# Works great with language models too
91-
llama = replicate.use("meta/llama-2-70b-chat")
92-
response = llama(
91+
claude = replicate.use("anthropic/claude-4-sonnet")
92+
response = claude(
9393
prompt="Write a haiku about Python programming",
9494
temperature=0.7,
9595
max_new_tokens=100
9696
)
9797

9898
# Enable streaming for models that support it
99-
llama_stream = replicate.use("meta/llama-2-70b-chat", streaming=True)
100-
for chunk in llama_stream(prompt="Explain quantum computing"):
99+
claude_stream = replicate.use("anthropic/claude-4-sonnet", streaming=True)
100+
for chunk in claude_stream(prompt="Explain quantum computing"):
101101
print(chunk, end="")
102102

103103
# Can accept model references in various formats
@@ -113,13 +113,13 @@ Direct method to run a model and get output. Good for one-off predictions.
113113
```python
114114
# Basic usage - returns output when complete
115115
output = replicate.run(
116-
"stability-ai/sdxl:39ed52f2a78e934b3ba6e2a89f5b1c712de7dfea535525255b1aa35c5565e08b",
116+
"google/nano-banana:39ed52f2a78e934b3ba6e2a89f5b1c712de7dfea535525255b1aa35c5565e08b",
117117
input={"prompt": "a 19th century portrait of a wombat gentleman"}
118118
)
119119

120120
# With options
121121
output = replicate.run(
122-
"meta/llama-2-70b-chat",
122+
"anthropic/claude-4-sonnet",
123123
input={
124124
"prompt": "Write a poem about machine learning",
125125
"max_new_tokens": 500,
@@ -143,7 +143,7 @@ For models that support streaming (like language models). Returns an iterator of
143143
```python
144144
# Stream text output
145145
for event in replicate.stream(
146-
"meta/llama-2-70b-chat",
146+
"anthropic/claude-4-sonnet",
147147
input={
148148
"prompt": "Tell me a story about a robot",
149149
"max_new_tokens": 1000
@@ -152,7 +152,7 @@ for event in replicate.stream(
152152
print(str(event), end="")
153153

154154
# Async streaming
155-
async for event in async_replicate.stream("meta/llama-2-70b-chat", input={"prompt": "Hello"}):
155+
async for event in async_replicate.stream("anthropic/claude-4-sonnet", input={"prompt": "Hello"}):
156156
print(str(event), end="")
157157
```
158158

@@ -210,7 +210,7 @@ Interact with models and their versions.
210210

211211
```python
212212
# Get a specific model
213-
model = replicate.models.get(model_owner="stability-ai", model_name="stable-diffusion")
213+
model = replicate.models.get(model_owner="google", model_name="nano-banana")
214214
print(f"Model: {model.owner}/{model.name}")
215215
print(f"Description: {model.description}")
216216
print(f"Latest version: {model.latest_version.id}")
@@ -242,15 +242,15 @@ replicate.models.delete(model_owner="your-username", model_name="my-model")
242242
```python
243243
# List model versions
244244
for version in replicate.models.versions.list(
245-
model_owner="stability-ai",
246-
model_name="stable-diffusion"
245+
model_owner="google",
246+
model_name="nano-banana"
247247
):
248248
print(f"Version {version.id}: created at {version.created_at}")
249249

250250
# Get a specific version
251251
version = replicate.models.versions.get(
252-
model_owner="stability-ai",
253-
model_name="stable-diffusion",
252+
model_owner="google",
253+
model_name="nano-banana",
254254
version_id="db21e45d3f7023abc2a46ee38a23973f6dce16bb082a930b0c49861f96d1e5bf"
255255
)
256256

@@ -269,8 +269,8 @@ Run predictions directly through a model.
269269
```python
270270
# Create a prediction for a specific model
271271
prediction = replicate.models.predictions.create(
272-
model_owner="stability-ai",
273-
model_name="stable-diffusion",
272+
model_owner="google",
273+
model_name="nano-banana",
274274
input={"prompt": "a beautiful landscape"}
275275
)
276276
```
@@ -280,8 +280,8 @@ prediction = replicate.models.predictions.create(
280280
```python
281281
# Get example predictions for a model
282282
for example in replicate.models.examples.list(
283-
model_owner="stability-ai",
284-
model_name="stable-diffusion"
283+
model_owner="google",
284+
model_name="nano-banana"
285285
):
286286
print(f"Example input: {example.input}")
287287
print(f"Example output: {example.output}")
@@ -373,7 +373,7 @@ for collection in replicate.collections.list():
373373
print(f"{collection.name}: {collection.description}")
374374

375375
# Get a specific collection
376-
collection = replicate.collections.get(collection_slug="awesome-sdxl-models")
376+
collection = replicate.collections.get(collection_slug="awesome-banana-models")
377377
for model in collection.models:
378378
print(f"- {model.owner}/{model.name}")
379379
```
@@ -626,13 +626,13 @@ async def main():
626626

627627
# Run a model
628628
output = await replicate.run(
629-
"stability-ai/stable-diffusion",
629+
"google/nano-banana",
630630
input={"prompt": "a futuristic city"}
631631
)
632632

633633
# Stream output
634634
async for event in replicate.stream(
635-
"meta/llama-2-70b-chat",
635+
"anthropic/claude-4-sonnet",
636636
input={"prompt": "Tell me a joke"}
637637
):
638638
print(event, end="")
@@ -770,12 +770,12 @@ import replicate
770770

771771
# Run a model
772772
output = replicate.run(
773-
"stability-ai/stable-diffusion:version",
773+
"google/nano-banana:version",
774774
input={"prompt": "a cat"}
775775
)
776776

777777
# Get a model
778-
model = replicate.models.get("stability-ai/stable-diffusion")
778+
model = replicate.models.get("google/nano-banana")
779779
```
780780

781781
**New (v1.0+):**
@@ -786,14 +786,14 @@ replicate = Replicate()
786786

787787
# Run a model
788788
output = replicate.run(
789-
"stability-ai/stable-diffusion:version",
789+
"google/nano-banana:version",
790790
input={"prompt": "a cat"}
791791
)
792792

793793
# Get a model
794794
model = replicate.models.get(
795-
model_owner="stability-ai",
796-
model_name="stable-diffusion"
795+
model_owner="google",
796+
model_name="nano-banana"
797797
)
798798
```
799799

0 commit comments

Comments
 (0)