Skip to content

Commit 2b4a658

Browse files
committed
updated images-and-vision example
1 parent 9c37df6 commit 2b4a658

File tree

3 files changed

+130
-0
lines changed

3 files changed

+130
-0
lines changed
Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
[package]
2+
name = "responses-images-and-vision"
3+
version = "0.1.0"
4+
edition = "2021"
5+
publish = false
6+
7+
[dependencies]
8+
async-openai = { path = "../../async-openai" }
9+
tokio = { version = "1.0", features = ["full"] }
10+
futures = "0.3"
11+
base64 = "0.22.1"
12+
serde_json = "1.0"
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
## Overview
2+
3+
This example exercises as many Responses API capabilities
4+
5+
Image Credit: https://unsplash.com/photos/pride-of-lion-on-field-L4-BDd01wmM
Lines changed: 113 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,113 @@
1+
use std::error::Error;
2+
3+
use async_openai::{
4+
config::OpenAIConfig,
5+
types::{
6+
chat::ImageDetail,
7+
responses::{
8+
CreateResponseArgs, ImageGenTool, InputContent, InputImageContent, InputMessage,
9+
InputRole, OutputItem, OutputMessageContent,
10+
},
11+
},
12+
Client,
13+
};
14+
15+
use base64::{engine::general_purpose::STANDARD as BASE64_STANDARD, Engine as _};
16+
use std::fs::OpenOptions;
17+
use std::io::Write;
18+
19+
async fn analyze_image_url(client: &Client<OpenAIConfig>) -> Result<(), Box<dyn Error>> {
20+
let image_url =
21+
"https://images.unsplash.com/photo-1554990772-0bea55d510d5?q=80&w=512&auto=format";
22+
let request = CreateResponseArgs::default()
23+
.model("gpt-4.1-mini")
24+
.input(InputMessage {
25+
content: vec![
26+
"what is in this image? Along with count of objects in the image?".into(),
27+
InputContent::InputImage(InputImageContent {
28+
detail: ImageDetail::Auto,
29+
image_url: Some(image_url.to_string()),
30+
file_id: None,
31+
}),
32+
],
33+
role: InputRole::User,
34+
status: None,
35+
})
36+
.build()?;
37+
38+
println!(
39+
"analyze_image_url request:\n{}",
40+
serde_json::to_string(&request)?
41+
);
42+
43+
let response = client.responses().create(request).await?;
44+
45+
for output in response.output {
46+
match output {
47+
OutputItem::Message(message) => {
48+
for content in message.content {
49+
match content {
50+
OutputMessageContent::OutputText(text) => {
51+
println!("Text: {:?}", text.text);
52+
}
53+
OutputMessageContent::Refusal(refusal) => {
54+
println!("Refusal: {:?}", refusal.refusal);
55+
}
56+
}
57+
}
58+
}
59+
_ => println!("Other output: {:?}", output),
60+
}
61+
}
62+
63+
Ok(())
64+
}
65+
66+
async fn generate_image(client: &Client<OpenAIConfig>) -> Result<(), Box<dyn Error>> {
67+
let request = CreateResponseArgs::default()
68+
.model("gpt-4.1-mini")
69+
.input("Generate an image of gray tabby cat hugging an otter with an orange scarf")
70+
.tools(ImageGenTool::default())
71+
.build()?;
72+
73+
println!(
74+
"generate_image request:\n{}",
75+
serde_json::to_string(&request)?
76+
);
77+
78+
let response = client.responses().create(request).await?;
79+
80+
let mut file = OpenOptions::new()
81+
.create(true)
82+
.append(true)
83+
.open("./data/image.png")?;
84+
85+
for output in response.output {
86+
match output {
87+
OutputItem::ImageGenerationCall(image_gen_call) => {
88+
if let Some(result) = image_gen_call.result {
89+
println!("Image generation call has result");
90+
let decoded = BASE64_STANDARD.decode(&result)?;
91+
file.write_all(&decoded)?;
92+
} else {
93+
println!("Image generation call has no result");
94+
}
95+
}
96+
_ => println!("Other output: {:?}", output),
97+
}
98+
}
99+
100+
Ok(())
101+
}
102+
103+
#[tokio::main]
104+
async fn main() -> Result<(), Box<dyn Error>> {
105+
let client = Client::new();
106+
107+
std::fs::create_dir_all("./data")?;
108+
109+
generate_image(&client).await?;
110+
analyze_image_url(&client).await?;
111+
112+
Ok(())
113+
}

0 commit comments

Comments
 (0)