Skip to content

Commit ad0184c

Browse files
samuel100Copilot
andcommitted
Fix Rust tutorial samples to match updated SDK API
- Use ::from() instead of ::new() for message construction - Use |progress: &str| instead of |progress: f32| for download callbacks - Use serde_json::from_value for assistant message construction - Use ChatCompletionMessageToolCalls::Function enum pattern for tool calls - Use foundry_local_sdk::openai::ChatClient for type annotations - Add is_cached() check before download - Add serde_json dependency to tutorial-chat-assistant Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
1 parent 301f5f7 commit ad0184c

File tree

5 files changed

+167
-106
lines changed

5 files changed

+167
-106
lines changed

samples/rust/tutorial-chat-assistant/Cargo.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,3 +8,4 @@ foundry-local-sdk = { path = "../../../sdk/rust" }
88
tokio = { version = "1", features = ["full"] }
99
tokio-stream = "0.1"
1010
anyhow = "1"
11+
serde_json = "1"

samples/rust/tutorial-chat-assistant/src/main.rs

Lines changed: 27 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,11 @@
11
// <complete_code>
22
// <imports>
33
use foundry_local_sdk::{
4-
ChatCompletionRequestAssistantMessage, ChatCompletionRequestMessage,
4+
ChatCompletionRequestMessage,
55
ChatCompletionRequestSystemMessage, ChatCompletionRequestUserMessage,
66
FoundryLocalConfig, FoundryLocalManager,
77
};
8-
use std::io::{BufRead, Write};
8+
use std::io::{self, BufRead, Write};
99
use tokio_stream::StreamExt;
1010
// </imports>
1111

@@ -18,13 +18,16 @@ async fn main() -> anyhow::Result<()> {
1818
// Select and load a model from the catalog
1919
let model = manager.catalog().get_model("phi-3.5-mini").await?;
2020

21-
model
22-
.download(Some(|progress: f32| {
23-
print!("\rDownloading model: {:.2}%", progress);
24-
std::io::stdout().flush().unwrap();
25-
}))
26-
.await?;
27-
println!();
21+
if !model.is_cached().await? {
22+
println!("Downloading model...");
23+
model
24+
.download(Some(|progress: &str| {
25+
print!("\r {progress}");
26+
io::stdout().flush().ok();
27+
}))
28+
.await?;
29+
println!();
30+
}
2831

2932
model.load().await?;
3033
println!("Model loaded and ready.");
@@ -36,7 +39,7 @@ async fn main() -> anyhow::Result<()> {
3639
// <system_prompt>
3740
// Start the conversation with a system prompt
3841
let mut messages: Vec<ChatCompletionRequestMessage> = vec![
39-
ChatCompletionRequestSystemMessage::new(
42+
ChatCompletionRequestSystemMessage::from(
4043
"You are a helpful, friendly assistant. Keep your responses \
4144
concise and conversational. If you don't know something, say so.",
4245
)
@@ -46,11 +49,11 @@ async fn main() -> anyhow::Result<()> {
4649

4750
println!("\nChat assistant ready! Type 'quit' to exit.\n");
4851

49-
let stdin = std::io::stdin();
52+
let stdin = io::stdin();
5053
// <conversation_loop>
5154
loop {
5255
print!("You: ");
53-
std::io::stdout().flush()?;
56+
io::stdout().flush()?;
5457

5558
let mut input = String::new();
5659
stdin.lock().read_line(&mut input)?;
@@ -61,27 +64,32 @@ async fn main() -> anyhow::Result<()> {
6164
}
6265

6366
// Add the user's message to conversation history
64-
messages.push(ChatCompletionRequestUserMessage::new(input).into());
67+
messages.push(ChatCompletionRequestUserMessage::from(input).into());
6568

6669
// <streaming>
6770
// Stream the response token by token
6871
print!("Assistant: ");
69-
std::io::stdout().flush()?;
72+
io::stdout().flush()?;
7073
let mut full_response = String::new();
7174
let mut stream = client.complete_streaming_chat(&messages, None).await?;
7275
while let Some(chunk) = stream.next().await {
7376
let chunk = chunk?;
74-
if let Some(content) = &chunk.choices[0].message.content {
75-
print!("{}", content);
76-
std::io::stdout().flush()?;
77-
full_response.push_str(content);
77+
if let Some(choice) = chunk.choices.first() {
78+
if let Some(ref content) = choice.delta.content {
79+
print!("{content}");
80+
io::stdout().flush()?;
81+
full_response.push_str(content);
82+
}
7883
}
7984
}
8085
println!("\n");
8186
// </streaming>
8287

8388
// Add the complete response to conversation history
84-
messages.push(ChatCompletionRequestAssistantMessage::new(full_response).into());
89+
let assistant_msg: ChatCompletionRequestMessage = serde_json::from_value(
90+
serde_json::json!({"role": "assistant", "content": full_response}),
91+
)?;
92+
messages.push(assistant_msg);
8593
}
8694
// </conversation_loop>
8795

samples/rust/tutorial-document-summarizer/src/main.rs

Lines changed: 61 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -1,26 +1,31 @@
11
// <complete_code>
22
// <imports>
33
use foundry_local_sdk::{
4-
ChatCompletionRequestMessage, ChatCompletionRequestSystemMessage,
5-
ChatCompletionRequestUserMessage, FoundryLocalConfig, FoundryLocalManager,
4+
ChatCompletionRequestMessage,
5+
ChatCompletionRequestSystemMessage,
6+
ChatCompletionRequestUserMessage, FoundryLocalConfig,
7+
FoundryLocalManager,
68
};
7-
use std::io::Write;
9+
use std::io::{self, Write};
810
use std::path::Path;
911
use std::{env, fs};
1012
// </imports>
1113

1214
async fn summarize_file(
13-
client: &foundry_local_sdk::ChatClient,
15+
client: &foundry_local_sdk::openai::ChatClient,
1416
file_path: &Path,
1517
system_prompt: &str,
1618
) -> anyhow::Result<()> {
1719
let content = fs::read_to_string(file_path)?;
1820
let messages: Vec<ChatCompletionRequestMessage> = vec![
19-
ChatCompletionRequestSystemMessage::new(system_prompt).into(),
20-
ChatCompletionRequestUserMessage::new(&content).into(),
21+
ChatCompletionRequestSystemMessage::from(system_prompt)
22+
.into(),
23+
ChatCompletionRequestUserMessage::from(content.as_str())
24+
.into(),
2125
];
2226

23-
let response = client.complete_chat(&messages, None).await?;
27+
let response =
28+
client.complete_chat(&messages, None).await?;
2429
let summary = response.choices[0]
2530
.message
2631
.content
@@ -31,14 +36,16 @@ async fn summarize_file(
3136
}
3237

3338
async fn summarize_directory(
34-
client: &foundry_local_sdk::ChatClient,
39+
client: &foundry_local_sdk::openai::ChatClient,
3540
directory: &Path,
3641
system_prompt: &str,
3742
) -> anyhow::Result<()> {
3843
let mut txt_files: Vec<_> = fs::read_dir(directory)?
3944
.filter_map(|entry| entry.ok())
4045
.filter(|entry| {
41-
entry.path().extension()
46+
entry
47+
.path()
48+
.extension()
4249
.map(|ext| ext == "txt")
4350
.unwrap_or(false)
4451
})
@@ -47,14 +54,25 @@ async fn summarize_directory(
4754
txt_files.sort_by_key(|e| e.path());
4855

4956
if txt_files.is_empty() {
50-
println!("No .txt files found in {}", directory.display());
57+
println!(
58+
"No .txt files found in {}",
59+
directory.display()
60+
);
5161
return Ok(());
5262
}
5363

5464
for entry in &txt_files {
5565
let file_name = entry.file_name();
56-
println!("--- {} ---", file_name.to_string_lossy());
57-
summarize_file(client, &entry.path(), system_prompt).await?;
66+
println!(
67+
"--- {} ---",
68+
file_name.to_string_lossy()
69+
);
70+
summarize_file(
71+
client,
72+
&entry.path(),
73+
system_prompt,
74+
)
75+
.await?;
5876
println!();
5977
}
6078

@@ -70,15 +88,21 @@ async fn main() -> anyhow::Result<()> {
7088
)?;
7189

7290
// Select and load a model from the catalog
73-
let model = manager.catalog().get_model("phi-3.5-mini").await?;
74-
75-
model
76-
.download(Some(|progress: f32| {
77-
print!("\rDownloading model: {:.2}%", progress);
78-
std::io::stdout().flush().unwrap();
79-
}))
91+
let model = manager
92+
.catalog()
93+
.get_model("phi-3.5-mini")
8094
.await?;
81-
println!();
95+
96+
if !model.is_cached().await? {
97+
println!("Downloading model...");
98+
model
99+
.download(Some(|progress: &str| {
100+
print!("\r {progress}");
101+
io::stdout().flush().ok();
102+
}))
103+
.await?;
104+
println!();
105+
}
82106

83107
model.load().await?;
84108
println!("Model loaded and ready.\n");
@@ -91,25 +115,36 @@ async fn main() -> anyhow::Result<()> {
91115
// </init>
92116

93117
// <summarization>
94-
let system_prompt =
95-
"Summarize the following document into concise bullet points. \
96-
Focus on the key points and main ideas.";
118+
let system_prompt = "Summarize the following document \
119+
into concise bullet points. Focus on the key \
120+
points and main ideas.";
97121

98122
// <file_reading>
99-
let target = env::args().nth(1)
123+
let target = env::args()
124+
.nth(1)
100125
.unwrap_or_else(|| "document.txt".to_string());
101126
let target_path = Path::new(&target);
102127
// </file_reading>
103128

104129
if target_path.is_dir() {
105-
summarize_directory(&client, target_path, system_prompt).await?;
130+
summarize_directory(
131+
&client,
132+
target_path,
133+
system_prompt,
134+
)
135+
.await?;
106136
} else {
107137
let file_name = target_path
108138
.file_name()
109139
.map(|n| n.to_string_lossy().to_string())
110140
.unwrap_or_else(|| target.clone());
111141
println!("--- {} ---", file_name);
112-
summarize_file(&client, target_path, system_prompt).await?;
142+
summarize_file(
143+
&client,
144+
target_path,
145+
system_prompt,
146+
)
147+
.await?;
113148
}
114149
// </summarization>
115150

0 commit comments

Comments
 (0)