11using Anthropic . SDK . Common ;
22using BotSharp . Abstraction . Conversations ;
3+ using BotSharp . Abstraction . Files ;
4+ using BotSharp . Abstraction . Files . Models ;
5+ using BotSharp . Abstraction . Files . Utilities ;
36using BotSharp . Abstraction . Hooks ;
4- using BotSharp . Abstraction . MLTasks . Settings ;
57using System . Text . Json . Nodes ;
68using System . Text . Json . Serialization ;
79
@@ -39,10 +41,10 @@ public async Task<RoleDialogModel> GetChatCompletions(Agent agent, List<RoleDial
3941 }
4042
4143 var settingsService = _services . GetRequiredService < ILlmProviderService > ( ) ;
42- var settings = settingsService . GetSetting ( Provider , _model ?? agent . LlmConfig ? . Model ?? "claude-3- haiku" ) ;
44+ var settings = settingsService . GetSetting ( Provider , _model ?? agent . LlmConfig ? . Model ?? "claude-haiku-4-5-20251001 " ) ;
4345
4446 var client = new AnthropicClient ( new APIAuthentication ( settings . ApiKey ) ) ;
45- var ( prompt , parameters ) = PrepareOptions ( agent , conversations , settings ) ;
47+ var ( prompt , parameters ) = PrepareOptions ( agent , conversations ) ;
4648
4749 var response = await client . Messages . GetClaudeMessageAsync ( parameters ) ;
4850
@@ -74,6 +76,8 @@ public async Task<RoleDialogModel> GetChatCompletions(Agent agent, List<RoleDial
7476 } ;
7577 }
7678
79+ var tokenUsage = response . Usage ;
80+
7781 // After chat completion hook
7882 foreach ( var hook in contentHooks )
7983 {
@@ -82,8 +86,8 @@ public async Task<RoleDialogModel> GetChatCompletions(Agent agent, List<RoleDial
8286 Prompt = prompt ,
8387 Provider = Provider ,
8488 Model = _model ,
85- TextInputTokens = response . Usage ? . InputTokens ?? 0 ,
86- TextOutputTokens = response . Usage ? . OutputTokens ?? 0
89+ TextInputTokens = tokenUsage ? . InputTokens ?? 0 ,
90+ TextOutputTokens = tokenUsage ? . OutputTokens ?? 0
8791 } ) ;
8892 }
8993
@@ -101,10 +105,13 @@ public Task<RoleDialogModel> GetChatCompletionsStreamingAsync(Agent agent, List<
101105 throw new NotImplementedException ( ) ;
102106 }
103107
104- private ( string , MessageParameters ) PrepareOptions ( Agent agent , List < RoleDialogModel > conversations ,
105- LlmModelSetting settings )
108+ private ( string , MessageParameters ) PrepareOptions ( Agent agent , List < RoleDialogModel > conversations )
106109 {
107110 var agentService = _services . GetRequiredService < IAgentService > ( ) ;
111+ var state = _services . GetRequiredService < IConversationStateService > ( ) ;
112+ var settingsService = _services . GetRequiredService < ILlmProviderService > ( ) ;
113+ var settings = settingsService . GetSetting ( Provider , _model ) ;
114+ var allowMultiModal = settings != null && settings . MultiModal ;
108115 renderedInstructions = [ ] ;
109116
110117 // Prepare instruction and functions
@@ -140,7 +147,17 @@ public Task<RoleDialogModel> GetChatCompletionsStreamingAsync(Agent agent, List<
140147 {
141148 if ( message . Role == AgentRole . User )
142149 {
143- messages . Add ( new Message ( RoleType . User , message . LlmContent ) ) ;
150+ var contentParts = new List < ContentBase > ( ) ;
151+ if ( allowMultiModal && ! message . Files . IsNullOrEmpty ( ) )
152+ {
153+ CollectMessageContentParts ( contentParts , message . Files ) ;
154+ }
155+ contentParts . Add ( new TextContent ( ) { Text = message . LlmContent } ) ;
156+ messages . Add ( new Message
157+ {
158+ Role = RoleType . User ,
159+ Content = contentParts
160+ } ) ;
144161 }
145162 else if ( message . Role == AgentRole . Assistant )
146163 {
@@ -177,7 +194,6 @@ public Task<RoleDialogModel> GetChatCompletionsStreamingAsync(Agent agent, List<
177194 }
178195 }
179196
180- var state = _services . GetRequiredService < IConversationStateService > ( ) ;
181197 var temperature = decimal . Parse ( state . GetState ( "temperature" , "0.0" ) ) ;
182198 var maxTokens = int . TryParse ( state . GetState ( "max_tokens" ) , out var tokens )
183199 ? tokens
@@ -201,8 +217,6 @@ public Task<RoleDialogModel> GetChatCompletionsStreamingAsync(Agent agent, List<
201217 } ;
202218 }
203219
204- ;
205-
206220 JsonSerializerOptions ? jsonSerializationOptions = new ( )
207221 {
208222 DefaultIgnoreCondition = JsonIgnoreCondition . WhenWritingNull ,
@@ -299,4 +313,53 @@ public void SetModelName(string model)
299313 {
300314 _model = model ;
301315 }
316+
317+ private void CollectMessageContentParts ( List < ContentBase > contentParts , List < BotSharpFile > files )
318+ {
319+ foreach ( var file in files )
320+ {
321+ if ( ! string . IsNullOrEmpty ( file . FileData ) )
322+ {
323+ var ( contentType , binary ) = FileUtility . GetFileInfoFromData ( file . FileData ) ;
324+ var contentPart = new ImageContent
325+ {
326+ Source = new ImageSource
327+ {
328+ MediaType = contentType ,
329+ Data = Convert . ToBase64String ( binary . ToArray ( ) )
330+ }
331+ } ;
332+ contentParts . Add ( contentPart ) ;
333+ }
334+ else if ( ! string . IsNullOrEmpty ( file . FileStorageUrl ) )
335+ {
336+ var fileStorage = _services . GetRequiredService < IFileStorageService > ( ) ;
337+ var binary = fileStorage . GetFileBytes ( file . FileStorageUrl ) ;
338+ var contentType = FileUtility . GetFileContentType ( file . FileStorageUrl ) ;
339+ var contentPart = new ImageContent
340+ {
341+ Source = new ImageSource
342+ {
343+ MediaType = contentType ,
344+ Data = Convert . ToBase64String ( binary )
345+ }
346+ } ;
347+ contentParts . Add ( contentPart ) ;
348+ }
349+ else if ( ! string . IsNullOrEmpty ( file . FileUrl ) )
350+ {
351+ var contentType = FileUtility . GetFileContentType ( file . FileUrl ) ;
352+
353+ var contentPart = new ImageContent
354+ {
355+ Source = new ImageSource
356+ {
357+ MediaType = contentType ,
358+ Url = file . FileUrl
359+ }
360+ } ;
361+ contentParts . Add ( contentPart ) ;
362+ }
363+ }
364+ }
302365}
0 commit comments