Skip to content

Commit aa8dd08

Browse files
committed
drop chat-session and change tools and and provider tools optional in config
1 parent 5f59315 commit aa8dd08

File tree

24 files changed

+87
-486
lines changed

24 files changed

+87
-486
lines changed

llm/anthropic/src/conversions.rs

Lines changed: 21 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -12,22 +12,28 @@ use std::collections::HashMap;
1212
pub fn events_to_request(events: Vec<Event>, config: Config) -> Result<MessagesRequest, Error> {
1313
let options = config
1414
.provider_options
15-
.into_iter()
16-
.map(|kv| (kv.key, kv.value))
17-
.collect::<HashMap<_, _>>();
15+
.map(|options| {
16+
options
17+
.into_iter()
18+
.map(|kv| (kv.key, kv.value))
19+
.collect::<HashMap<_, _>>()
20+
})
21+
.unwrap_or_default();
1822

1923
let (user_messages, system_messages) = events_to_messages_and_system_messages(events);
2024

2125
let tool_choice = config.tool_choice.map(convert_tool_choice);
22-
let tools = if config.tools.is_empty() {
23-
None
24-
} else {
25-
let mut tools = Vec::new();
26-
for tool in &config.tools {
27-
tools.push(tool_definition_to_tool(tool)?)
28-
}
29-
Some(tools)
30-
};
26+
let tools = config
27+
.tools
28+
.and_then(|tools| {
29+
(!tools.is_empty()).then(|| {
30+
tools
31+
.into_iter()
32+
.map(tool_definition_to_tool)
33+
.collect::<Result<Vec<_>, _>>()
34+
})
35+
})
36+
.transpose()?;
3137

3238
Ok(MessagesRequest {
3339
max_tokens: config.max_tokens.unwrap_or(4096),
@@ -280,13 +286,13 @@ fn content_parts_to_content(content_parts: Vec<ContentPart>) -> Vec<Content> {
280286
result
281287
}
282288

283-
fn tool_definition_to_tool(tool: &ToolDefinition) -> Result<Tool, Error> {
289+
fn tool_definition_to_tool(tool: ToolDefinition) -> Result<Tool, Error> {
284290
match serde_json::from_str(&tool.parameters_schema) {
285291
Ok(value) => Ok(Tool::CustomTool {
286292
input_schema: value,
287-
name: tool.name.clone(),
293+
name: tool.name,
288294
cache_control: None,
289-
description: tool.description.clone(),
295+
description: tool.description,
290296
}),
291297
Err(error) => Err(Error {
292298
code: ErrorCode::InternalError,

llm/anthropic/src/lib.rs

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@ use crate::client::{
77
use crate::conversions::{
88
convert_usage, events_to_request, process_response, stop_reason_to_finish_reason,
99
};
10-
use golem_llm::chat_session::ChatSession;
1110
use golem_llm::chat_stream::{LlmChatStream, LlmChatStreamState};
1211
use golem_llm::config::{get_config_key, with_config_key};
1312
use golem_llm::durability::{DurableLLM, ExtendedGuest};
@@ -277,7 +276,6 @@ impl AnthropicComponent {
277276

278277
impl Guest for AnthropicComponent {
279278
type ChatStream = LlmChatStream<AnthropicChatStream>;
280-
type ChatSession = ChatSession<DurableAnthropicComponent>;
281279

282280
fn send(events: Vec<Event>, config: Config) -> Result<Response, Error> {
283281
let anthropic_api_key = get_config_key(Self::ENV_VAR_NAME)?;

llm/anthropic/wit/deps/golem-llm/golem-llm.wit

Lines changed: 2 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -173,11 +173,11 @@ interface llm {
173173
/// A sequence where the model stops generating tokens
174174
stop-sequences: option<list<string>>,
175175
/// List of available tools
176-
tools: list<tool-definition>,
176+
tools: option<list<tool-definition>>,
177177
/// Tool choice policy
178178
tool-choice: option<string>,
179179
/// Additional LLM provider specific key-value pairs
180-
provider-options: list<kv>,
180+
provider-options: option<list<kv>>,
181181
}
182182

183183
// --- Usage / Metadata ---
@@ -286,33 +286,6 @@ interface llm {
286286
events: list<event>,
287287
config: config,
288288
) -> chat-stream;
289-
290-
// --- Chat session ---
291-
292-
/// Chat session is a simple wrapper on top of events to help with maintaining chat history
293-
resource chat-session {
294-
/// Create new session with the provided config
295-
constructor(config: config);
296-
297-
/// Add a single user message to the chat events
298-
add-message: func(message: message);
299-
/// Add multiple user messages to the chat events
300-
add-messages: func(messages: list<message>);
301-
/// Add a single tool result to the chat events
302-
add-tool-result: func(tool-result: tool-result);
303-
/// Add multiple tool results to the chat events
304-
add-tool-results: func(tool-results: list<tool-result>);
305-
306-
/// Observe all events in the session
307-
get-chat-events: func() -> list<event>;
308-
// Replace all events in the session, which allows e.g. compacting them
309-
set-chat-events: func(events: list<event>);
310-
311-
/// Send the full accumulated chat events, responses are automatically added to the session chat events
312-
send: func() -> result<response, error>;
313-
/// Like Send, but streams responses
314-
%stream: func() -> chat-stream;
315-
}
316289
}
317290

318291
world llm-library {

llm/bedrock/src/conversions.rs

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -30,9 +30,13 @@ impl BedrockInput {
3030

3131
let options = config
3232
.provider_options
33-
.iter()
34-
.map(|kv| (kv.key.clone(), Document::String(kv.value.clone())))
35-
.collect::<HashMap<_, _>>();
33+
.map(|options| {
34+
options
35+
.into_iter()
36+
.map(|kv| (kv.key, Document::String(kv.value)))
37+
.collect::<HashMap<_, _>>()
38+
})
39+
.unwrap_or_default();
3640

3741
Ok(BedrockInput {
3842
model_id: config.model.clone(),
@@ -47,7 +51,7 @@ impl BedrockInput {
4751
.build(),
4852
messages: user_messages,
4953
system_instructions,
50-
tools: tool_defs_to_bedrock_tool_config(config.tools.clone())?,
54+
tools: tool_defs_to_bedrock_tool_config(config.tools.unwrap_or_default())?,
5155
additional_fields: Document::Object(options),
5256
})
5357
}

llm/bedrock/src/lib.rs

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
use async_utils::get_async_runtime;
22
use client::Bedrock;
3-
use golem_llm::chat_session::ChatSession;
43
use golem_llm::durability::{DurableLLM, ExtendedGuest};
54
use golem_llm::golem::llm::llm::{
65
self, ChatStream, Config, Error, Event, Guest, Message, Response,
@@ -19,7 +18,6 @@ struct BedrockComponent;
1918

2019
impl Guest for BedrockComponent {
2120
type ChatStream = BedrockChatStream;
22-
type ChatSession = ChatSession<DurableBedrockComponent>;
2321

2422
fn send(events: Vec<Event>, config: Config) -> Result<Response, Error> {
2523
let runtime = get_async_runtime();

llm/bedrock/wit/deps/golem-llm/golem-llm.wit

Lines changed: 2 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -173,11 +173,11 @@ interface llm {
173173
/// A sequence where the model stops generating tokens
174174
stop-sequences: option<list<string>>,
175175
/// List of available tools
176-
tools: list<tool-definition>,
176+
tools: option<list<tool-definition>>,
177177
/// Tool choice policy
178178
tool-choice: option<string>,
179179
/// Additional LLM provider specific key-value pairs
180-
provider-options: list<kv>,
180+
provider-options: option<list<kv>>,
181181
}
182182

183183
// --- Usage / Metadata ---
@@ -286,33 +286,6 @@ interface llm {
286286
events: list<event>,
287287
config: config,
288288
) -> chat-stream;
289-
290-
// --- Chat session ---
291-
292-
/// Chat session is a simple wrapper on top of events to help with maintaining chat history
293-
resource chat-session {
294-
/// Create new session with the provided config
295-
constructor(config: config);
296-
297-
/// Add a single user message to the chat events
298-
add-message: func(message: message);
299-
/// Add multiple user messages to the chat events
300-
add-messages: func(messages: list<message>);
301-
/// Add a single tool result to the chat events
302-
add-tool-result: func(tool-result: tool-result);
303-
/// Add multiple tool results to the chat events
304-
add-tool-results: func(tool-results: list<tool-result>);
305-
306-
/// Observe all events in the session
307-
get-chat-events: func() -> list<event>;
308-
// Replace all events in the session, which allows e.g. compacting them
309-
set-chat-events: func(events: list<event>);
310-
311-
/// Send the full accumulated chat events, responses are automatically added to the session chat events
312-
send: func() -> result<response, error>;
313-
/// Like Send, but streams responses
314-
%stream: func() -> chat-stream;
315-
}
316289
}
317290

318291
world llm-library {

llm/grok/src/conversions.rs

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -9,9 +9,13 @@ use std::collections::HashMap;
99
pub fn events_to_request(events: Vec<Event>, config: Config) -> Result<CompletionsRequest, Error> {
1010
let options = config
1111
.provider_options
12-
.into_iter()
13-
.map(|kv| (kv.key, kv.value))
14-
.collect::<HashMap<_, _>>();
12+
.map(|options| {
13+
options
14+
.into_iter()
15+
.map(|kv| (kv.key, kv.value))
16+
.collect::<HashMap<_, _>>()
17+
})
18+
.unwrap_or_default();
1519

1620
let mut completion_messages = Vec::new();
1721
for event in events {
@@ -61,7 +65,7 @@ pub fn events_to_request(events: Vec<Event>, config: Config) -> Result<Completio
6165
}
6266

6367
let mut tools = Vec::new();
64-
for tool in config.tools {
68+
for tool in config.tools.unwrap_or_default() {
6569
tools.push(tool_definition_to_tool(tool)?)
6670
}
6771

llm/grok/src/lib.rs

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,6 @@ use crate::conversions::{
66
convert_client_tool_call_to_tool_call, convert_finish_reason, convert_usage, events_to_request,
77
process_response,
88
};
9-
use golem_llm::chat_session::ChatSession;
109
use golem_llm::chat_stream::{LlmChatStream, LlmChatStreamState};
1110
use golem_llm::config::{get_config_key, with_config_key};
1211
use golem_llm::durability::{DurableLLM, ExtendedGuest};
@@ -155,7 +154,6 @@ impl GrokComponent {
155154

156155
impl Guest for GrokComponent {
157156
type ChatStream = LlmChatStream<GrokChatStream>;
158-
type ChatSession = ChatSession<DurableGrokComponent>;
159157

160158
fn send(events: Vec<Event>, config: Config) -> Result<Response, Error> {
161159
let xai_api_key = get_config_key(Self::ENV_VAR_NAME)?;

llm/grok/wit/deps/golem-llm/golem-llm.wit

Lines changed: 2 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -173,11 +173,11 @@ interface llm {
173173
/// A sequence where the model stops generating tokens
174174
stop-sequences: option<list<string>>,
175175
/// List of available tools
176-
tools: list<tool-definition>,
176+
tools: option<list<tool-definition>>,
177177
/// Tool choice policy
178178
tool-choice: option<string>,
179179
/// Additional LLM provider specific key-value pairs
180-
provider-options: list<kv>,
180+
provider-options: option<list<kv>>,
181181
}
182182

183183
// --- Usage / Metadata ---
@@ -286,33 +286,6 @@ interface llm {
286286
events: list<event>,
287287
config: config,
288288
) -> chat-stream;
289-
290-
// --- Chat session ---
291-
292-
/// Chat session is a simple wrapper on top of events to help with maintaining chat history
293-
resource chat-session {
294-
/// Create new session with the provided config
295-
constructor(config: config);
296-
297-
/// Add a single user message to the chat events
298-
add-message: func(message: message);
299-
/// Add multiple user messages to the chat events
300-
add-messages: func(messages: list<message>);
301-
/// Add a single tool result to the chat events
302-
add-tool-result: func(tool-result: tool-result);
303-
/// Add multiple tool results to the chat events
304-
add-tool-results: func(tool-results: list<tool-result>);
305-
306-
/// Observe all events in the session
307-
get-chat-events: func() -> list<event>;
308-
// Replace all events in the session, which allows e.g. compacting them
309-
set-chat-events: func(events: list<event>);
310-
311-
/// Send the full accumulated chat events, responses are automatically added to the session chat events
312-
send: func() -> result<response, error>;
313-
/// Like Send, but streams responses
314-
%stream: func() -> chat-stream;
315-
}
316289
}
317290

318291
world llm-library {

0 commit comments

Comments
 (0)