Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 23 additions & 0 deletions src/Infrastructure/BotSharp.Abstraction/Memory/AIContext.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
namespace BotSharp.Abstraction.Memory;

/// <summary>
/// AI Context that can be injected into the AI model request.
/// Contains additional context information like memory, knowledge, etc.
/// </summary>
public class AIContext
{
/// <summary>
/// Context messages to be injected into the conversation
/// </summary>
public List<RoleDialogModel> ContextMessages { get; set; } = new();

/// <summary>
/// System instructions to be added
/// </summary>
public string? SystemInstruction { get; set; }

/// <summary>
/// Additional metadata
/// </summary>
public Dictionary<string, object> Metadata { get; set; } = new();
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
using System.Threading;

namespace BotSharp.Abstraction.Memory;

/// <summary>
/// Base implementation of IAIContextProvider with default behavior.
/// </summary>
public abstract class AIContextProviderBase : IAIContextProvider
{
public virtual string Name => GetType().Name;

/// <summary>
/// Priority for execution order. Lower values execute first.
/// Default is 0 (medium priority).
/// </summary>
public virtual int Priority => 0;

/// <summary>
/// Invoked before the AI model is called to provide additional context.
/// Override this method to provide custom context.
/// </summary>
public virtual ValueTask<AIContext?> InvokingAsync(InvokingContext context, CancellationToken ct)
{
return default;
}

/// <summary>
/// Invoked after the AI model has been called to process the response and update memory.
/// Override this method to save memory or process the response.
/// </summary>
public virtual ValueTask InvokedAsync(InvokedContext context, CancellationToken ct)
{
return default;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;

namespace BotSharp.Abstraction.Memory;

public interface IAIContextOrchestrator
{
ValueTask<AIContext> OnInvokingAsync(InvokingContext invokingContext,CancellationToken ct = default);

ValueTask OnInvokedAsync(InvokedContext invokedContext, CancellationToken ct = default);
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
using System.Threading;

namespace BotSharp.Abstraction.Memory;

/// <summary>
/// AI Context Provider interface for managing context before and after AI model invocation.
/// </summary>
public interface IAIContextProvider
{
/// <summary>
/// Priority for execution order. Lower values execute first.
/// </summary>
int Priority { get; }

/// <summary>
/// Name of the AI Context Provider.
/// </summary>
string Name { get; }

/// <summary>
/// Invoked before the AI model is called to provide additional context.
/// </summary>
/// <param name="context">The invoking context containing agent and dialog information</param>
/// <returns>AI context to be injected into the model request</returns>
ValueTask<AIContext?> InvokingAsync(InvokingContext context, CancellationToken ct=default);

/// <summary>
/// Invoked after the AI model has been called to process the response and update memory.
/// </summary>
/// <param name="context">The invoked context containing request and response information</param>
/// <returns>Completion task</returns>
ValueTask InvokedAsync(InvokedContext context, CancellationToken ct=default);
}
32 changes: 32 additions & 0 deletions src/Infrastructure/BotSharp.Abstraction/Memory/InvokedContext.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
namespace BotSharp.Abstraction.Memory;

/// <summary>
/// Context information available after the AI model has been invoked (after call).
/// </summary>
public class InvokedContext
{
/// <summary>
/// The agent that was invoked
/// </summary>
public Agent Agent { get; set; } = null!;

/// <summary>
/// The request dialogs sent to the AI model
/// </summary>
public List<RoleDialogModel> RequestDialogs { get; set; } = new();

/// <summary>
/// The response from the AI model
/// </summary>
public RoleDialogModel Response { get; set; } = null!;

/// <summary>
/// The conversation ID
/// </summary>
public string ConversationId { get; set; } = string.Empty;

/// <summary>
/// Additional metadata
/// </summary>
public Dictionary<string, object> Metadata { get; set; } = new();
}
27 changes: 27 additions & 0 deletions src/Infrastructure/BotSharp.Abstraction/Memory/InvokingContext.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
namespace BotSharp.Abstraction.Memory;

/// <summary>
/// Context information available when invoking the AI model (before call).
/// </summary>
public class InvokingContext
{
/// <summary>
/// The agent being invoked
/// </summary>
public Agent Agent { get; set; } = null!;

/// <summary>
/// The conversation dialogs
/// </summary>
public List<RoleDialogModel> Dialogs { get; set; } = new();

/// <summary>
/// The conversation ID
/// </summary>
public string ConversationId { get; set; } = string.Empty;

/// <summary>
/// Additional metadata
/// </summary>
public Dictionary<string, object> Metadata { get; set; } = new();
}
66 changes: 66 additions & 0 deletions src/Infrastructure/BotSharp.Core/Memory/AIContextOrchestrator.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
using BotSharp.Abstraction.Memory;

namespace BotSharp.Core.Memory
{
public class AIContextOrchestrator : IAIContextOrchestrator
{
private readonly IEnumerable<IAIContextProvider> _providers;
private readonly ILogger _logger;

public AIContextOrchestrator(IEnumerable<IAIContextProvider> providers, ILogger<AIContextOrchestrator> logger)
{
_providers = providers.OrderBy(p => p.Priority).ToArray();
_logger = logger;
}

public async ValueTask<AIContext> OnInvokingAsync(InvokingContext invokingContext, CancellationToken ct= default)
{
var aggregated = new AIContext() { ContextMessages = invokingContext.Dialogs};
foreach (var provider in _providers)
{
try
{
var ctx = await provider.InvokingAsync(invokingContext, ct);
if (ctx == null)
continue;

if (ctx.ContextMessages?.Count > 0)
{
aggregated.ContextMessages.AddRange(ctx.ContextMessages);
}

if (ctx.Metadata?.Count > 0)
{
foreach (var kv in ctx.Metadata)
aggregated.Metadata[kv.Key] = kv.Value;
}

if(!string.IsNullOrEmpty(ctx.SystemInstruction))
{
aggregated.SystemInstruction += ctx.SystemInstruction + "\n";
}
}
catch (Exception ex)
{
_logger.LogWarning(ex, "AIContextProvider {Name} InvokingAsync failed.", provider.Name);
}
}
return aggregated;
}

public async ValueTask OnInvokedAsync(InvokedContext invokedContext, CancellationToken ct = default)
{
foreach (var p in _providers.Reverse())
{
try
{
await p.InvokedAsync(invokedContext, ct);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "AIContextProvider {Name} InvokedAsync failed.", p.Name);
}
}
}
}
}
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
using BotSharp.Abstraction.Memory;
using BotSharp.Abstraction.Routing.Models;
using BotSharp.Abstraction.Templating;

Expand Down Expand Up @@ -35,17 +36,41 @@ public async Task<bool> InvokeAgent(
provider: provider,
model: model);

// Get conversation ID
var conversationId = _conversationService.ConversationId;

var invokingContext = new InvokingContext
{
Agent = agent,
Dialogs = dialogs,
ConversationId = conversationId
};


RoleDialogModel response;
var message = dialogs.Last();
var aicontext = await _aIContextOrchestrator.OnInvokingAsync(invokingContext);

if (options?.UseStream == true)
{
response = await chatCompletion.GetChatCompletionsStreamingAsync(agent, dialogs);
response = await chatCompletion.GetChatCompletionsStreamingAsync(agent, aicontext.ContextMessages);
}
else
{
response = await chatCompletion.GetChatCompletions(agent, dialogs);
response = await chatCompletion.GetChatCompletions(agent, aicontext.ContextMessages);
}

// Call AI Context Providers after the model has been invoked (InvokedAsync)
var invokedContext = new InvokedContext
{
Agent = agent,
RequestDialogs = dialogs,
Response = response,
ConversationId = conversationId
};

await _aIContextOrchestrator.OnInvokedAsync(invokedContext);

if (response.Role == AgentRole.Function)
{
message = RoleDialogModel.From(message, role: AgentRole.Function);
Expand Down
7 changes: 7 additions & 0 deletions src/Infrastructure/BotSharp.Core/Routing/RoutingService.cs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
using BotSharp.Abstraction.Memory;
using BotSharp.Abstraction.Routing.Models;
using BotSharp.Abstraction.Routing.Settings;

Expand All @@ -8,6 +9,8 @@ public partial class RoutingService : IRoutingService
private readonly IServiceProvider _services;
private readonly RoutingSettings _settings;
private readonly IRoutingContext _context;
private readonly IAIContextOrchestrator _aIContextOrchestrator;
private readonly IConversationService _conversationService;
private readonly ILogger _logger;
private Agent _router;

Expand All @@ -18,11 +21,15 @@ public RoutingService(
IServiceProvider services,
RoutingSettings settings,
IRoutingContext context,
IAIContextOrchestrator aIContextOrchestrator,
IConversationService conversationService,
ILogger<RoutingService> logger)
{
_services = services;
_settings = settings;
_context = context;
_aIContextOrchestrator = aIContextOrchestrator;
_conversationService = conversationService;
_logger = logger;
}

Expand Down
25 changes: 25 additions & 0 deletions src/Plugins/BotSharp.Plugin.AIMemory/AIMemoryPlugin.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
using BotSharp.Abstraction.AIContext;
using BotSharp.Abstraction.Plugins;
using BotSharp.Plugin.AIMemory.Providers;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;

namespace BotSharp.Plugin.AIMemory;

/// <summary>
/// Plugin for AI Memory management.
/// Provides context providers for managing conversation memory and context.
/// </summary>
public class AIMemoryPlugin : IBotSharpPlugin
{
public string Id => "8c6f9e42-5a3b-4d1e-9f2a-7b8c9d0e1f2a";
public string Name => "AI Memory Plugin";
public string Description => "Provides AI memory management similar to Microsoft Agent Framework's AIContextProvider pattern.";
public string IconUrl => "https://cdn-icons-png.flaticon.com/512/2103/2103633.png";

public void RegisterDI(IServiceCollection services, IConfiguration config)
{
// Register the conversation memory context provider
services.AddScoped<IAIContextProvider, ConversationMemoryProvider>();
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
<Project Sdk="Microsoft.NET.Sdk">

<PropertyGroup>
<TargetFramework>$(TargetFramework)</TargetFramework>
<Nullable>enable</Nullable>
<LangVersion>$(LangVersion)</LangVersion>
<VersionPrefix>$(BotSharpVersion)</VersionPrefix>
<GeneratePackageOnBuild>$(GeneratePackageOnBuild)</GeneratePackageOnBuild>
<GenerateDocumentationFile>$(GenerateDocumentationFile)</GenerateDocumentationFile>
<OutputPath>$(SolutionDir)packages</OutputPath>
</PropertyGroup>

<ItemGroup>
<ProjectReference Include="..\..\Infrastructure\BotSharp.Core\BotSharp.Core.csproj" />
</ItemGroup>

</Project>
Loading
Loading