11using BotSharp . Abstraction . Files . Utilities ;
22using OpenAI . Chat ;
3+ using System . ClientModel ;
34
45namespace BotSharp . Plugin . AzureOpenAI . Providers . Chat ;
56
@@ -37,43 +38,67 @@ public async Task<RoleDialogModel> GetChatCompletions(Agent agent, List<RoleDial
3738 var chatClient = client . GetChatClient ( _model ) ;
3839 var ( prompt , messages , options ) = PrepareOptions ( agent , conversations ) ;
3940
40- var response = chatClient . CompleteChat ( messages , options ) ;
41- var value = response . Value ;
42- var reason = value . FinishReason ;
43- var content = value . Content ;
44- var text = content . FirstOrDefault ( ) ? . Text ?? string . Empty ;
45-
41+ ChatCompletion value = default ;
4642 RoleDialogModel responseMessage ;
47- if ( reason == ChatFinishReason . FunctionCall )
43+
44+ try
4845 {
49- responseMessage = new RoleDialogModel ( AgentRole . Function , text )
50- {
51- CurrentAgentId = agent . Id ,
52- MessageId = conversations . LastOrDefault ( ) ? . MessageId ?? string . Empty ,
53- FunctionName = value . FunctionCall . FunctionName ,
54- FunctionArgs = value . FunctionCall . FunctionArguments
55- } ;
46+ var response = chatClient . CompleteChat ( messages , options ) ;
47+ value = response . Value ;
5648
57- // Somethings LLM will generate a function name with agent name.
58- if ( ! string . IsNullOrEmpty ( responseMessage . FunctionName ) )
49+ var reason = value . FinishReason ;
50+ var content = value . Content ;
51+ var text = content . FirstOrDefault ( ) ? . Text ?? string . Empty ;
52+
53+ if ( reason == ChatFinishReason . FunctionCall )
5954 {
60- responseMessage . FunctionName = responseMessage . FunctionName . Split ( '.' ) . Last ( ) ;
55+ responseMessage = new RoleDialogModel ( AgentRole . Function , text )
56+ {
57+ CurrentAgentId = agent . Id ,
58+ MessageId = conversations . LastOrDefault ( ) ? . MessageId ?? string . Empty ,
59+ FunctionName = value . FunctionCall . FunctionName ,
60+ FunctionArgs = value . FunctionCall . FunctionArguments
61+ } ;
62+
63+ // Somethings LLM will generate a function name with agent name.
64+ if ( ! string . IsNullOrEmpty ( responseMessage . FunctionName ) )
65+ {
66+ responseMessage . FunctionName = responseMessage . FunctionName . Split ( '.' ) . Last ( ) ;
67+ }
68+ }
69+ else if ( reason == ChatFinishReason . ToolCalls )
70+ {
71+ var toolCall = value . ToolCalls . FirstOrDefault ( ) ;
72+ responseMessage = new RoleDialogModel ( AgentRole . Function , text )
73+ {
74+ CurrentAgentId = agent . Id ,
75+ MessageId = conversations . LastOrDefault ( ) ? . MessageId ?? string . Empty ,
76+ FunctionName = toolCall ? . FunctionName ,
77+ FunctionArgs = toolCall ? . FunctionArguments
78+ } ;
79+ }
80+ else
81+ {
82+ responseMessage = new RoleDialogModel ( AgentRole . Assistant , text )
83+ {
84+ CurrentAgentId = agent . Id ,
85+ MessageId = conversations . LastOrDefault ( ) ? . MessageId ?? string . Empty ,
86+ } ;
6187 }
6288 }
63- else if ( reason == ChatFinishReason . ToolCalls )
89+ catch ( ClientResultException ex )
6490 {
65- var toolCall = value . ToolCalls . FirstOrDefault ( ) ;
66- responseMessage = new RoleDialogModel ( AgentRole . Function , text )
91+ _logger . LogError ( ex , ex . Message ) ;
92+ responseMessage = new RoleDialogModel ( AgentRole . Assistant , "The response was filtered due to the prompt triggering our content management policy. Please modify your prompt and retry." )
6793 {
6894 CurrentAgentId = agent . Id ,
6995 MessageId = conversations . LastOrDefault ( ) ? . MessageId ?? string . Empty ,
70- FunctionName = toolCall ? . FunctionName ,
71- FunctionArgs = toolCall ? . FunctionArguments
7296 } ;
7397 }
74- else
98+ catch ( Exception ex )
7599 {
76- responseMessage = new RoleDialogModel ( AgentRole . Assistant , text )
100+ _logger . LogError ( ex , ex . Message ) ;
101+ responseMessage = new RoleDialogModel ( AgentRole . Assistant , ex . Message )
77102 {
78103 CurrentAgentId = agent . Id ,
79104 MessageId = conversations . LastOrDefault ( ) ? . MessageId ?? string . Empty ,
@@ -88,8 +113,8 @@ public async Task<RoleDialogModel> GetChatCompletions(Agent agent, List<RoleDial
88113 Prompt = prompt ,
89114 Provider = Provider ,
90115 Model = _model ,
91- PromptCount = response . Value . Usage . InputTokens ,
92- CompletionCount = response . Value . Usage . OutputTokens
116+ PromptCount = value ? . Usage ? . InputTokens ?? 0 ,
117+ CompletionCount = value ? . Usage ? . OutputTokens ?? 0
93118 } ) ;
94119 }
95120
0 commit comments