Skip to content

Commit 8d2761a

Browse files
committed
experiment failed.
1 parent af0a819 commit 8d2761a

File tree

5 files changed

+113
-10
lines changed

5 files changed

+113
-10
lines changed
Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
using System.Text.Json;
2+
3+
namespace BotSharp.Abstraction.Functions.Models;
4+
5+
/// <summary>
6+
/// This class defines the LLM response output if function call needed
7+
/// </summary>
8+
public class FunctionCallingResponse
9+
{
10+
[JsonPropertyName("role")]
11+
public string Role { get; set; } = AgentRole.Assistant;
12+
13+
[JsonPropertyName("content")]
14+
public string? Content { get; set; }
15+
16+
[JsonPropertyName("function_name")]
17+
public string? FunctionName { get; set; }
18+
19+
[JsonPropertyName("args")]
20+
public JsonDocument? Args { get; set; }
21+
}

src/Infrastructure/BotSharp.Abstraction/Functions/Models/FunctionDef.cs

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,10 @@ public class FunctionDef
44
{
55
public string Name { get; set; }
66
public string Description { get; set; }
7+
8+
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
79
public string? Impact { get; set; }
10+
811
public FunctionParametersDef Parameters { get; set; } = new FunctionParametersDef();
912

1013
public override string ToString()

src/Plugins/BotSharp.Plugin.GoogleAI/Providers/ChatCompletionProvider.cs

Lines changed: 79 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,13 @@
11
using BotSharp.Abstraction.Agents;
22
using BotSharp.Abstraction.Agents.Enums;
33
using BotSharp.Abstraction.Conversations;
4+
using BotSharp.Abstraction.Functions.Models;
5+
using BotSharp.Abstraction.Routing;
46
using BotSharp.Plugin.GoogleAI.Settings;
57
using LLMSharp.Google.Palm;
68
using Microsoft.Extensions.Logging;
9+
using System.Diagnostics.Metrics;
10+
using static System.Net.Mime.MediaTypeNames;
711

812
namespace BotSharp.Plugin.GoogleAI.Providers;
913

@@ -33,18 +37,42 @@ public RoleDialogModel GetChatCompletions(Agent agent, List<RoleDialogModel> con
3337
hook.BeforeGenerating(agent, conversations)).ToArray());
3438

3539
var client = new GooglePalmClient(apiKey: _settings.PaLM.ApiKey);
36-
var messages = conversations.Select(c => new PalmChatMessage(c.Content, c.Role == AgentRole.User ? "user" : "AI"))
37-
.ToList();
3840

39-
var agentService = _services.GetRequiredService<IAgentService>();
40-
var instruction = agentService.RenderedInstruction(agent);
41-
var response = client.ChatAsync(messages, instruction, null).Result;
41+
var (prompt, messages) = PrepareOptions(agent, conversations);
42+
43+
RoleDialogModel msg;
44+
45+
if (messages == null)
46+
{
47+
// use text completion
48+
var response = client.GenerateTextAsync(prompt, null).Result;
49+
50+
var message = response.Candidates.First();
4251

43-
var message = response.Candidates.First();
44-
var msg = new RoleDialogModel(AgentRole.Assistant, message.Content)
52+
// check if returns function calling
53+
var llmResponse = message.Output.JsonContent<FunctionCallingResponse>();
54+
55+
msg = new RoleDialogModel(llmResponse.Role, llmResponse.Content)
56+
{
57+
CurrentAgentId = agent.Id,
58+
FunctionName = llmResponse.FunctionName,
59+
FunctionArgs = JsonSerializer.Serialize(llmResponse.Args)
60+
};
61+
}
62+
else
4563
{
46-
CurrentAgentId = agent.Id
47-
};
64+
var response = client.ChatAsync(messages, context: prompt, examples: null, options: null).Result;
65+
66+
var message = response.Candidates.First();
67+
68+
// check if returns function calling
69+
var llmResponse = message.Content.JsonContent<FunctionCallingResponse>();
70+
71+
msg = new RoleDialogModel(llmResponse.Role, llmResponse.Content ?? message.Content)
72+
{
73+
CurrentAgentId = agent.Id
74+
};
75+
}
4876

4977
// After chat completion hook
5078
Task.WaitAll(hooks.Select(hook =>
@@ -56,6 +84,48 @@ public RoleDialogModel GetChatCompletions(Agent agent, List<RoleDialogModel> con
5684
return msg;
5785
}
5886

87+
private (string, List<PalmChatMessage>) PrepareOptions(Agent agent, List<RoleDialogModel> conversations)
88+
{
89+
var prompt = "";
90+
91+
var agentService = _services.GetRequiredService<IAgentService>();
92+
93+
if (!string.IsNullOrEmpty(agent.Instruction))
94+
{
95+
prompt += agentService.RenderedInstruction(agent);
96+
}
97+
98+
var routing = _services.GetRequiredService<IRoutingService>();
99+
var router = routing.Router;
100+
101+
if (agent.Functions != null && agent.Functions.Count > 0)
102+
{
103+
prompt += "\r\n\r\n[Functions] defined in JSON Schema:\r\n";
104+
prompt += JsonSerializer.Serialize(agent.Functions, new JsonSerializerOptions
105+
{
106+
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
107+
WriteIndented = true
108+
});
109+
110+
prompt += "\r\n\r\n[Conversations]\r\n";
111+
foreach (var dialog in conversations)
112+
{
113+
prompt += dialog.Role == AgentRole.Function ?
114+
$"{dialog.Role}: {dialog.FunctionName} => {dialog.Content}\r\n" :
115+
$"{dialog.Role}: {dialog.Content}\r\n";
116+
}
117+
118+
prompt += "\r\n\r\n" + router.Templates.FirstOrDefault(x => x.Name == "response_with_function").Content;
119+
120+
return (prompt, null);
121+
}
122+
123+
var messages = conversations.Select(c => new PalmChatMessage(c.Content, c.Role == AgentRole.User ? "user" : "AI"))
124+
.ToList();
125+
126+
return (prompt, messages);
127+
}
128+
59129
public Task<bool> GetChatCompletionsAsync(Agent agent, List<RoleDialogModel> conversations, Func<RoleDialogModel, Task> onMessageReceived, Func<RoleDialogModel, Task> onFunctionExecuting)
60130
{
61131
throw new NotImplementedException();
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
What is the next step based on the CONVERSATION?
2-
Response must be in appropriate JSON format.
2+
Response must be in required JSON format without any other contents.
33
Route to the Agent that last handled the conversation if necessary.
44
If user wants to speak to customer service, use function human_intervention_needed.
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
1. Read the [Functions] definition, you can utilize the function to retrieve data or execute actions.
2+
2. Think step by step, check if specific function will provider data to help complete user request based on the [Conversation].
3+
3. If you need to call a function to decide how to response user,
4+
response in format: {"role": "function", "reason":"why choose this function", "function_name": "", "args": {}},
5+
otherwise response in format: {"role": "assistant", "reason":"why response to user", "content":""}.
6+
4. If the [Conversation] already contains the function execution result, don't need to call it again.
7+
5. If user mentioned some specific requirment, don't ask this again.
8+
9+
Make your decision for the next step, output your response in JSON:

0 commit comments

Comments
 (0)