From 2b6758277d30164caa973fdc1d9fe10269105926 Mon Sep 17 00:00:00 2001 From: Ivan P Date: Wed, 5 Mar 2025 20:37:52 +0200 Subject: [PATCH 01/13] OllamaAgent can now handle and switch between multiple models seamlessly, listing available models dynamically via an API * Modified the agent to include a model selection mechanism that allows it to iterate through predefined models. * Introduced a `model` command to manage the switching between models based on available models. It is possible to configure SystemPrompt with the selected model into OllamaAgent, providing users with a toolkit for task-oriented conversations. * Modified the agent to accommodate Ollama's SystemPrompt functionality. * Introduced a `system-prompt` command to manage the setting. OllamaAgent now supports the use of predefined model configurations, allowing users to easily switch between different models and system prompts based on specific requirements. * Introduced a `ModelConfig` Record to encapsulate data for each configuration. * Introduced a `config` command to manage the switching between configuration based on available predefined sets. --- shell/agents/AIShell.Ollama.Agent/Command.cs | 280 ++++++++++++++++++ .../AIShell.Ollama.Agent/OllamaAgent.cs | 43 ++- shell/agents/AIShell.Ollama.Agent/Settings.cs | 207 ++++++++++++- 3 files changed, 509 insertions(+), 21 deletions(-) create mode 100644 shell/agents/AIShell.Ollama.Agent/Command.cs diff --git a/shell/agents/AIShell.Ollama.Agent/Command.cs b/shell/agents/AIShell.Ollama.Agent/Command.cs new file mode 100644 index 00000000..86636fe5 --- /dev/null +++ b/shell/agents/AIShell.Ollama.Agent/Command.cs @@ -0,0 +1,280 @@ +using System.CommandLine; +using System.CommandLine.Completions; +using System.Threading.Tasks; +using AIShell.Abstraction; + +namespace AIShell.Ollama.Agent; + +internal sealed class ConfigCommand : CommandBase +{ + private readonly OllamaAgent _agnet; + public ConfigCommand(OllamaAgent agent) + : base("config", "Command for config management within the 'ollama' agent.") + { + _agnet = agent; + + var use = new Command("use", "Specify a config to use."); + var useConfig = new Argument( + name: "Config", + getDefaultValue: () => null, + description: "Name of a configuration.").AddCompletions(ConfigNameCompleter); + use.AddArgument(useConfig); + use.SetHandler(UseConfigAction, useConfig); + + var list = new Command("list", "List a specific config, or all available configs."); + var listConfig = new Argument( + name: "Config", + getDefaultValue: () => null, + description: "Name of a configuration.").AddCompletions(ConfigNameCompleter); + list.AddArgument(listConfig); + list.SetHandler(ListConfigAction, listConfig); + + AddCommand(list); + AddCommand(use); + } + + private void ListConfigAction(string name) + { + IHost host = Shell.Host; + + // Reload the setting file if needed. + _agnet.ReloadSettings(); + + Settings settings = _agnet.Settings; + + if (settings is null) + { + host.WriteErrorLine("Invalid configuration."); + return; + } + + if (string.IsNullOrEmpty(name)) + { + settings.ListAllConfigs(host); + return; + } + + try + { + settings.ShowOneConfig(host, name); + } + catch (InvalidOperationException ex) + { + string availableConfigNames = ConfigNamesAsString(); + host.WriteErrorLine($"{ex.Message} Available cofiguration(s): {availableConfigNames}."); + } + } + + private async Task UseConfigAction(string name) + { + // Reload the setting file if needed. + _agnet.ReloadSettings(); + + var setting = _agnet.Settings; + var host = Shell.Host; + + if (setting is null || setting.Configs.Count is 0) + { + host.WriteErrorLine("No configs configured."); + return; + } + + try + { + ModelConfig chosenConfig = (string.IsNullOrEmpty(name) + ? host.PromptForSelectionAsync( + title: "[orange1]Please select a [Blue]Configuration[/] to use[/]:", + choices: setting.Configs, + converter: ConfigName, + CancellationToken.None).GetAwaiter().GetResult() + : setting.Configs.FirstOrDefault(c => c.Name == name)) ?? throw new InvalidOperationException($"The configuration '{name}' doesn't exist."); + await setting.UseConfg(host, chosenConfig); + host.MarkupLine($"Using the config [green]{chosenConfig.Name}[/]:"); + } + catch (InvalidOperationException ex) + { + string availableConfigNames = ConfigNamesAsString(); + host.WriteErrorLine($"{ex.Message} Available configurations: {availableConfigNames}."); + } + } + + private static string ConfigName(ModelConfig config) => config.Name.Any(Char.IsWhiteSpace) ? $"\"{config.Name}\"" : config.Name; + private IEnumerable ConfigNameCompleter(CompletionContext context) => _agnet.Settings?.Configs?.Select(ConfigName) ?? []; + private string ConfigNamesAsString() => string.Join(", ", ConfigNameCompleter(null)); +} + +internal sealed class SystemPromptCommand : CommandBase +{ + private readonly OllamaAgent _agnet; + + public SystemPromptCommand(OllamaAgent agent) + : base("system-prompt", "Command for system prompt management within the 'ollama' agent.") + { + _agnet = agent; + + var show = new Command("show", "Show the current system prompt."); + show.SetHandler(ShowSystemPromptAction); + + var set = new Command("set", "Sets the system prompt."); + var systemPromptModel = new Argument( + name: "System-Prompt", + getDefaultValue: () => null, + description: "The system prompt"); + set.AddArgument(systemPromptModel); + set.SetHandler(SetSystemPromptAction, systemPromptModel); + + AddCommand(show); + AddCommand(set); + } + + private void ShowSystemPromptAction() + { + IHost host = Shell.Host; + + // Reload the setting file if needed. + _agnet.ReloadSettings(); + + Settings settings = _agnet.Settings; + + if (settings is null) + { + host.WriteErrorLine("Invalid configuration."); + return; + } + + try + { + settings.ShowSystemPrompt(host); + } + catch (InvalidOperationException ex) + { + host.WriteErrorLine($"{ex.Message}"); + } + } + + private void SetSystemPromptAction(string prompt) + { + IHost host = Shell.Host; + + // Reload the setting file if needed. + _agnet.ReloadSettings(); + _agnet.ResetContext(); + + Settings settings = _agnet.Settings; + + if (settings is null) + { + host.WriteErrorLine("Invalid configuration."); + return; + } + + try + { + settings.SetSystemPrompt(host, prompt); + } + catch (InvalidOperationException ex) + { + host.WriteErrorLine($"{ex.Message}."); + } + } +} + +internal sealed class ModelCommand : CommandBase +{ + private readonly OllamaAgent _agnet; + + public ModelCommand(OllamaAgent agent) + : base("model", "Command for model management within the 'ollama' agent.") + { + _agnet = agent; + + var use = new Command("use", "Specify a model to use, or choose one from the available models."); + var useModel = new Argument( + name: "Model", + getDefaultValue: () => null, + description: "Name of a model.").AddCompletions(ModelNameCompleter); + use.AddArgument(useModel); + use.SetHandler(UseModelAction, useModel); + + var list = new Command("list", "List a specific model, or all available models."); + var listModel = new Argument( + name: "Model", + getDefaultValue: () => null, + description: "Name of a model.").AddCompletions(ModelNameCompleter); + list.AddArgument(listModel); + list.SetHandler(ListModelAction, listModel); + + AddCommand(list); + AddCommand(use); + } + + private void ListModelAction(string name) + { + IHost host = Shell.Host; + + // Reload the setting file if needed. + _agnet.ReloadSettings(); + + Settings settings = _agnet.Settings; + + if (settings is null) + { + host.WriteErrorLine("Invalid configuration."); + return; + } + + if (string.IsNullOrEmpty(name)) + { + settings.ListAllModels(host).GetAwaiter().GetResult(); + return; + } + + try + { + settings.ShowOneModel(host, name).GetAwaiter().GetResult(); + } + catch (InvalidOperationException ex) + { + string availableModelNames = ModelNamesAsString(); + host.WriteErrorLine($"{ex.Message} Available Models(s): {availableModelNames}."); + } + } + + private void UseModelAction(string name) + { + // Reload the setting file if needed. + _agnet.ReloadSettings(); + + var setting = _agnet.Settings; + var host = Shell.Host; + + if (setting is null || setting.GetAllModels().GetAwaiter().GetResult().Count is 0) + { + host.WriteErrorLine("No models configured."); + return; + } + + try + { + OllamaModel chosenModel = string.IsNullOrEmpty(name) + ? host.PromptForSelectionAsync( + title: "[orange1]Please select a [Blue]Model[/] to use[/]:", + choices: setting.GetAllModels().GetAwaiter().GetResult(), + converter: ModelName, + CancellationToken.None).GetAwaiter().GetResult() + : setting.GetModelByName(name).GetAwaiter().GetResult(); + + setting.UseModel(chosenModel); + host.MarkupLine($"Using the model [green]{chosenModel.Name}[/]:"); + } + catch (InvalidOperationException ex) + { + string availableModelNames = ModelNamesAsString(); + host.WriteErrorLine($"{ex.Message} Available Modless: {availableModelNames}."); + } + } + + private static string ModelName(OllamaModel model) => model.Name; + private IEnumerable ModelNameCompleter(CompletionContext context) => _agnet.Settings?.GetAllModels().GetAwaiter().GetResult().Select(ModelName) ?? []; + private string ModelNamesAsString() => string.Join(", ", ModelNameCompleter(null)); +} diff --git a/shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs b/shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs index 480cf383..37dd9ef7 100644 --- a/shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs +++ b/shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs @@ -110,7 +110,7 @@ public void Initialize(AgentConfig config) /// /// Get commands that an agent can register to the shell when being loaded. /// - public IEnumerable GetCommands() => null; + public IEnumerable GetCommands() => [new ConfigCommand(this), new ModelCommand(this), new SystemPromptCommand(this)]; /// /// Gets the path to the setting file of the agent. @@ -148,6 +148,11 @@ public Task RefreshChatAsync(IShell shell, bool force) return Task.CompletedTask; } + public void ResetContext() + { + _request.Context = null; + } + /// /// Main chat function that takes the users input and passes it to the LLM and renders it. /// @@ -171,11 +176,18 @@ public async Task ChatAsync(string input, IShell shell) return false; } + var activeModel = await _settings.GetActiveModel().ConfigureAwait(false); + // Prepare request _request.Prompt = input; - _request.Model = _settings.Model; + _request.Model = activeModel.Name; _request.Stream = _settings.Stream; + if (!string.IsNullOrWhiteSpace(_settings.RunningConfig.SystemPrompt)) + { + _request.System = _settings.RunningConfig.SystemPrompt; + } + try { if (_request.Stream) @@ -238,15 +250,23 @@ public async Task ChatAsync(string input, IShell shell) catch (HttpRequestException e) { host.WriteErrorLine($"{e.Message}"); - host.WriteErrorLine($"Ollama model: \"{_settings.Model}\""); + host.WriteErrorLine($"Ollama active model: \"{activeModel.Name}\""); host.WriteErrorLine($"Ollama endpoint: \"{_settings.Endpoint}\""); host.WriteErrorLine($"Ollama settings: \"{SettingFile}\""); } + finally + { + if (_settings.RunningConfig.ResetContext) + { + // Reset the request context + ResetContext(); + } + } return true; } - private void ReloadSettings() + internal void ReloadSettings() { if (_reloadSettings) { @@ -308,13 +328,20 @@ private void NewExampleSettingFile() // 1. Install Ollama: `winget install Ollama.Ollama` // 2. Start Ollama API server: `ollama serve` // 3. Install Ollama model: `ollama pull phi3` - - // Declare Ollama model - "Model": "phi3", + "Configs": [ + { + "Name": "PowerShell Expert", + "Description": "A ollama agent with expertise in PowerShell scripting and command line utilities.", + "ModelName": "phi3", + "SystemPrompt": "1. You are a helpful and friendly assistant with expertise in PowerShell scripting and command line.\n2. Assume user is using the operating system `Windows 11` unless otherwise specified.\n3. Use the `code block` syntax in markdown to encapsulate any part in responses that is code, YAML, JSON or XML, but not table.\n4. When encapsulating command line code, use '```powershell' if it's PowerShell command; use '```sh' if it's non-PowerShell CLI command.\n5. When generating CLI commands, never ever break a command into multiple lines. Instead, always list all parameters and arguments of the command on the same line.\n6. Please keep the response concise but to the point. Do not overexplain." + } + ], // Declare Ollama endpoint "Endpoint": "http://localhost:11434", // Enable Ollama streaming - "Stream": false + "Stream": false, + // Specify the default model to use + "DefaultConfig": "PowerShell Expert", } """; File.WriteAllText(SettingFile, SampleContent, Encoding.UTF8); diff --git a/shell/agents/AIShell.Ollama.Agent/Settings.cs b/shell/agents/AIShell.Ollama.Agent/Settings.cs index 11ebd8de..eb649d2a 100644 --- a/shell/agents/AIShell.Ollama.Agent/Settings.cs +++ b/shell/agents/AIShell.Ollama.Agent/Settings.cs @@ -1,38 +1,219 @@ -using System.Text.Json; +using System.Collections.Generic; +using System.Text.Json; using System.Text.Json.Serialization; +using System.Linq; +using AIShell.Abstraction; +using OllamaSharp; +using OllamaSharp.Models; namespace AIShell.Ollama.Agent; internal class Settings { - public string Model { get; } + private OllamaModel _activeModel; + private ICollection _models = []; + private bool _initialized = false; + + public ICollection Configs { get; } public string Endpoint { get; } public bool Stream { get; } + public RunningConfig RunningConfig { get; private set; } public Settings(ConfigData configData) { - // Validate Model and Endpoint for null or empty values - if (string.IsNullOrWhiteSpace(configData.Model)) - { - throw new ArgumentException("\"Model\" key is missing."); - } - if (string.IsNullOrWhiteSpace(configData.Endpoint)) { throw new ArgumentException("\"Endpoint\" key is missing."); } - Model = configData.Model; + Configs = configData.Configs; Endpoint = configData.Endpoint; Stream = configData.Stream; + + RunningConfig = new RunningConfig(); + + if (Configs is not null) + { + var modelConfig = Configs.FirstOrDefault(c => c.Name == configData.DefaultConfig); + if (modelConfig is not null) + { + RunningConfig = modelConfig.ToRunnigConfig(); + } + } } + + private async Task EnsureModelsInitialized(CancellationToken cancellationToken = default) + { + if (!_initialized) + { + OllamaApiClient _client = null; + try + { + _client = new OllamaApiClient(this.Endpoint); + var models = await _client.ListLocalModelsAsync(cancellationToken).ConfigureAwait(false); + this._models = models.Select(m => new OllamaModel(Name: m.Name)).ToList().AsReadOnly(); + + if (this._models.Count == 0) + { + throw new InvalidOperationException("No model is available."); + } + + if (string.IsNullOrEmpty(this.RunningConfig.ModelName)) + { + // Active GPT not specified, but there is only one GPT defined, then use it by default. + _activeModel = _models.First(); + } + else + { + _activeModel = _models.FirstOrDefault(m => m.Name == this.RunningConfig.ModelName); + if (_activeModel == null) + { + string message = $"The Model '{this.RunningConfig.ModelName}' specified as \"Default\" in the configuration doesn't exist."; + throw new InvalidOperationException(message); + } + } + _initialized = true; + } + finally + { + if (_client is IDisposable disposable) + { + disposable.Dispose(); + } + } + } + } + + internal async Task> GetAllModels(CancellationToken cancellationToken = default) + { + await EnsureModelsInitialized(cancellationToken).ConfigureAwait(false); + return this._models; + } + + internal async Task GetModelByName(string name, CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrEmpty(name); + await EnsureModelsInitialized(cancellationToken).ConfigureAwait(false); + var model = _models.FirstOrDefault(m => m.Name == name); + + if (model is not null) + { + return model; + } + + throw new InvalidOperationException($"A model with the name '{name}' doesn't exist."); + } + + + internal async Task UseModel(string name, CancellationToken cancellationToken = default) + { + await EnsureModelsInitialized(cancellationToken).ConfigureAwait(false); + _activeModel = await GetModelByName(name, cancellationToken); + } + + internal void UseModel(OllamaModel model) + { + _activeModel = model; + } + + internal void ShowSystemPrompt(IHost host) + { + host.RenderList( + RunningConfig.SystemPrompt, + [ + new CustomElement(label: "System prompt is", str => str) + ]); + } + + internal void SetSystemPrompt(IHost host, string prompt) + { + this.RunningConfig = this.RunningConfig with { SystemPrompt = prompt ?? string.Empty }; + host.RenderList( + this.RunningConfig.SystemPrompt, + [ + new CustomElement(label: "New system prompt is", str => str) + ]); + } + + internal async Task ListAllModels(IHost host, CancellationToken cancellationToken = default) + { + await EnsureModelsInitialized(cancellationToken).ConfigureAwait(false); + host.RenderTable( + [.. _models], + [ + new PropertyElement(nameof(OllamaModel.Name)), + new CustomElement(label: "Active", m => m.Name == _activeModel?.Name ? "true" : string.Empty) + ]); + } + + internal async Task ShowOneModel(IHost host, string name, CancellationToken cancellationToken = default) + { + var model = await GetModelByName(name, cancellationToken).ConfigureAwait(false); + host.RenderList( + model, + [ + new PropertyElement(nameof(OllamaModel.Name)), + new CustomElement(label: "Active", m => m.Name == _activeModel?.Name ? "true" : string.Empty) + ]); + } + + internal void ShowOneConfig(IHost host, string name, CancellationToken cancellationToken = default) + { + var config = this.Configs.FirstOrDefault(c => c.Name == name); + host.RenderList( + config, + [ + new PropertyElement(nameof(ModelConfig.Name)), + new PropertyElement(nameof(ModelConfig.Description)), + new PropertyElement(nameof(ModelConfig.ModelName)), + new PropertyElement(nameof(ModelConfig.SystemPrompt)), + new CustomElement(label: "Active", m => m.ToRunnigConfig() == this.RunningConfig ? "true" : string.Empty), + ]); + } + + internal async Task UseConfg(IHost host, ModelConfig config, CancellationToken cancellationToken = default) + { + this.RunningConfig = config.ToRunnigConfig(); + await UseModel(this.RunningConfig.ModelName, cancellationToken).ConfigureAwait(false); + } + + internal void ListAllConfigs(IHost host) + { + host.RenderTable( + [.. this.Configs], + [ + new PropertyElement(nameof(ModelConfig.Name)), + new CustomElement(label: "Active", m => m.ToRunnigConfig() == this.RunningConfig ? "true" : string.Empty) + ]); + } + + internal async Task GetActiveModel(CancellationToken cancellationToken = default) + { + await EnsureModelsInitialized(cancellationToken).ConfigureAwait(false); + return _activeModel; + } } -internal class ConfigData +internal record OllamaModel(string Name); + +internal record ModelConfig(string Name, string SystemPrompt, string ModelName, string Description, bool ResetContext); + +internal record ConfigData(List Configs, string Endpoint, bool Stream, string DefaultConfig); + +internal record RunningConfig(string Name = "", string ModelName = "", string SystemPrompt = "", bool ResetContext = false); + +static class ModelConfigExtensions { - public string Model { get; set; } - public string Endpoint { get; set; } - public bool Stream { get; set; } + public static RunningConfig ToRunnigConfig(this ModelConfig config) + { + return new RunningConfig() + { + Name = config.Name, + ModelName = config.ModelName, + SystemPrompt = config.SystemPrompt, + ResetContext = config.ResetContext + }; + } } /// From 2d96206aafccd735e849b7518cbb6914088b4251 Mon Sep 17 00:00:00 2001 From: Ivan P Date: Sun, 9 Mar 2025 12:39:55 +0200 Subject: [PATCH 02/13] Fix indentation in the error message when handling HttpRequestException --- shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs b/shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs index 37dd9ef7..8c76d701 100644 --- a/shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs +++ b/shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs @@ -250,9 +250,9 @@ public async Task ChatAsync(string input, IShell shell) catch (HttpRequestException e) { host.WriteErrorLine($"{e.Message}"); - host.WriteErrorLine($"Ollama active model: \"{activeModel.Name}\""); - host.WriteErrorLine($"Ollama endpoint: \"{_settings.Endpoint}\""); - host.WriteErrorLine($"Ollama settings: \"{SettingFile}\""); + host.WriteErrorLine($"Ollama active model: \"{activeModel.Name}\""); + host.WriteErrorLine($"Ollama endpoint: \"{_settings.Endpoint}\""); + host.WriteErrorLine($"Ollama settings: \"{SettingFile}\""); } finally { From a3b4a2200e363fa01bb4db682295382230a7b555 Mon Sep 17 00:00:00 2001 From: Ivan P Date: Wed, 12 Mar 2025 18:14:17 +0200 Subject: [PATCH 03/13] Refactor model handling in OllamaAgent to simplify model retrieval and improve error logging --- shell/agents/AIShell.Ollama.Agent/Command.cs | 10 +- .../AIShell.Ollama.Agent/OllamaAgent.cs | 8 +- shell/agents/AIShell.Ollama.Agent/Settings.cs | 240 ++++++++---------- 3 files changed, 114 insertions(+), 144 deletions(-) diff --git a/shell/agents/AIShell.Ollama.Agent/Command.cs b/shell/agents/AIShell.Ollama.Agent/Command.cs index 86636fe5..b01ff129 100644 --- a/shell/agents/AIShell.Ollama.Agent/Command.cs +++ b/shell/agents/AIShell.Ollama.Agent/Command.cs @@ -256,16 +256,15 @@ private void UseModelAction(string name) try { - OllamaModel chosenModel = string.IsNullOrEmpty(name) + string chosenModel = string.IsNullOrEmpty(name) ? host.PromptForSelectionAsync( title: "[orange1]Please select a [Blue]Model[/] to use[/]:", choices: setting.GetAllModels().GetAwaiter().GetResult(), - converter: ModelName, CancellationToken.None).GetAwaiter().GetResult() : setting.GetModelByName(name).GetAwaiter().GetResult(); - setting.UseModel(chosenModel); - host.MarkupLine($"Using the model [green]{chosenModel.Name}[/]:"); + setting.UseModel(chosenModel).GetAwaiter().GetResult(); + host.MarkupLine($"Using the model [green]{chosenModel}[/]:"); } catch (InvalidOperationException ex) { @@ -274,7 +273,6 @@ private void UseModelAction(string name) } } - private static string ModelName(OllamaModel model) => model.Name; - private IEnumerable ModelNameCompleter(CompletionContext context) => _agnet.Settings?.GetAllModels().GetAwaiter().GetResult().Select(ModelName) ?? []; + private IEnumerable ModelNameCompleter(CompletionContext context) => _agnet.Settings?.GetAllModels().GetAwaiter().GetResult() ?? []; private string ModelNamesAsString() => string.Join(", ", ModelNameCompleter(null)); } diff --git a/shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs b/shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs index 8c76d701..0251a1fb 100644 --- a/shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs +++ b/shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs @@ -176,11 +176,11 @@ public async Task ChatAsync(string input, IShell shell) return false; } - var activeModel = await _settings.GetActiveModel().ConfigureAwait(false); + var activeModel = await _settings.GetActiveModel(host).ConfigureAwait(false); // Prepare request _request.Prompt = input; - _request.Model = activeModel.Name; + _request.Model = activeModel; _request.Stream = _settings.Stream; if (!string.IsNullOrWhiteSpace(_settings.RunningConfig.SystemPrompt)) @@ -250,7 +250,7 @@ public async Task ChatAsync(string input, IShell shell) catch (HttpRequestException e) { host.WriteErrorLine($"{e.Message}"); - host.WriteErrorLine($"Ollama active model: \"{activeModel.Name}\""); + host.WriteErrorLine($"Ollama active model: \"{activeModel}\""); host.WriteErrorLine($"Ollama endpoint: \"{_settings.Endpoint}\""); host.WriteErrorLine($"Ollama settings: \"{SettingFile}\""); } @@ -341,7 +341,7 @@ private void NewExampleSettingFile() // Enable Ollama streaming "Stream": false, // Specify the default model to use - "DefaultConfig": "PowerShell Expert", + "DefaultConfig": "PowerShell Expert" } """; File.WriteAllText(SettingFile, SampleContent, Encoding.UTF8); diff --git a/shell/agents/AIShell.Ollama.Agent/Settings.cs b/shell/agents/AIShell.Ollama.Agent/Settings.cs index eb649d2a..eed96df7 100644 --- a/shell/agents/AIShell.Ollama.Agent/Settings.cs +++ b/shell/agents/AIShell.Ollama.Agent/Settings.cs @@ -1,165 +1,141 @@ -using System.Collections.Generic; -using System.Text.Json; +using System.Text.Json; using System.Text.Json.Serialization; -using System.Linq; using AIShell.Abstraction; using OllamaSharp; -using OllamaSharp.Models; namespace AIShell.Ollama.Agent; internal class Settings { - private OllamaModel _activeModel; - private ICollection _models = []; private bool _initialized = false; - - public ICollection Configs { get; } + private List AvailableModels { get; set; } = []; + public List Configs { get; } public string Endpoint { get; } public bool Stream { get; } - public RunningConfig RunningConfig { get; private set; } + public ModelConfig RunningConfig { get; private set; } public Settings(ConfigData configData) { if (string.IsNullOrWhiteSpace(configData.Endpoint)) { - throw new ArgumentException("\"Endpoint\" key is missing."); + throw new InvalidOperationException("'Endpoint' key is missing in configuration."); } - Configs = configData.Configs; + Configs = configData.Configs ?? []; Endpoint = configData.Endpoint; Stream = configData.Stream; - RunningConfig = new RunningConfig(); - - if (Configs is not null) + // Ensure the default configuration is available in the list of configurations. + if (!string.IsNullOrEmpty(configData.DefaultConfig) && + !Configs.Any(c => c.Name == configData.DefaultConfig)) { - var modelConfig = Configs.FirstOrDefault(c => c.Name == configData.DefaultConfig); - if (modelConfig is not null) - { - RunningConfig = modelConfig.ToRunnigConfig(); - } + throw new InvalidOperationException($"The selected default configuration '{configData.DefaultConfig}' doesn't exist."); } + + RunningConfig = (Configs, configData.DefaultConfig) switch + { + (not [], "") => Configs.First() with { }, /* No default config - use the first one defined in Configs */ + (not [], _) => Configs.First(c => c.Name == configData.DefaultConfig) with { }, /* Use the default config */ + _ => new ModelConfig(nameof(RunningConfig), "") /* No config available - use empty */ + }; } private async Task EnsureModelsInitialized(CancellationToken cancellationToken = default) { - if (!_initialized) + if (_initialized) + { + return; + } + + OllamaApiClient _client = null; + try { - OllamaApiClient _client = null; - try + _client = new OllamaApiClient(Endpoint); + var models = await _client.ListLocalModelsAsync(cancellationToken).ConfigureAwait(false); + AvailableModels = [.. models.Select(m => m.Name)]; + + if (AvailableModels.Count == 0) { - _client = new OllamaApiClient(this.Endpoint); - var models = await _client.ListLocalModelsAsync(cancellationToken).ConfigureAwait(false); - this._models = models.Select(m => new OllamaModel(Name: m.Name)).ToList().AsReadOnly(); - - if (this._models.Count == 0) - { - throw new InvalidOperationException("No model is available."); - } - - if (string.IsNullOrEmpty(this.RunningConfig.ModelName)) - { - // Active GPT not specified, but there is only one GPT defined, then use it by default. - _activeModel = _models.First(); - } - else - { - _activeModel = _models.FirstOrDefault(m => m.Name == this.RunningConfig.ModelName); - if (_activeModel == null) - { - string message = $"The Model '{this.RunningConfig.ModelName}' specified as \"Default\" in the configuration doesn't exist."; - throw new InvalidOperationException(message); - } - } - _initialized = true; + throw new InvalidOperationException($"No models are available to use from '{Endpoint}'."); } - finally + _initialized = true; + } + finally + { + if (_client is IDisposable disposable) { - if (_client is IDisposable disposable) - { - disposable.Dispose(); - } + disposable.Dispose(); } } } - internal async Task> GetAllModels(CancellationToken cancellationToken = default) + internal async Task> GetAllModels(CancellationToken cancellationToken = default) { await EnsureModelsInitialized(cancellationToken).ConfigureAwait(false); - return this._models; + return AvailableModels; } - internal async Task GetModelByName(string name, CancellationToken cancellationToken = default) + internal async Task GetModelByName(string name, CancellationToken cancellationToken = default) { ArgumentException.ThrowIfNullOrEmpty(name); await EnsureModelsInitialized(cancellationToken).ConfigureAwait(false); - var model = _models.FirstOrDefault(m => m.Name == name); - - if (model is not null) + if (!AvailableModels.Contains(name)) { - return model; + throw new InvalidOperationException($"A model with the name '{name}' doesn't exist in the list of available models."); } - throw new InvalidOperationException($"A model with the name '{name}' doesn't exist."); - } - - - internal async Task UseModel(string name, CancellationToken cancellationToken = default) - { - await EnsureModelsInitialized(cancellationToken).ConfigureAwait(false); - _activeModel = await GetModelByName(name, cancellationToken); + return name; } - internal void UseModel(OllamaModel model) - { - _activeModel = model; - } + private static List> GetSystemPromptRenderElements() => [ new CustomElement(label: "System prompt", s => s) ]; - internal void ShowSystemPrompt(IHost host) - { - host.RenderList( - RunningConfig.SystemPrompt, - [ - new CustomElement(label: "System prompt is", str => str) - ]); - } + internal void ShowSystemPrompt(IHost host) => host.RenderList(RunningConfig.SystemPrompt, GetSystemPromptRenderElements()); internal void SetSystemPrompt(IHost host, string prompt) { - this.RunningConfig = this.RunningConfig with { SystemPrompt = prompt ?? string.Empty }; - host.RenderList( - this.RunningConfig.SystemPrompt, - [ - new CustomElement(label: "New system prompt is", str => str) - ]); + RunningConfig = RunningConfig with { SystemPrompt = prompt ?? string.Empty }; + host.RenderList(RunningConfig.SystemPrompt, GetSystemPromptRenderElements()); } + + private static List> GetRenderModelElements(string currentModelName) => [ + new CustomElement(label: "Model Name", m => m), + new CustomElement(label: "Active", m => m == currentModelName ? "true" : string.Empty) + ]; + + internal async Task UseModel(string name, CancellationToken cancellationToken = default) => + RunningConfig = RunningConfig with { ModelName = await GetModelByName(name, cancellationToken).ConfigureAwait(false) }; internal async Task ListAllModels(IHost host, CancellationToken cancellationToken = default) { await EnsureModelsInitialized(cancellationToken).ConfigureAwait(false); - host.RenderTable( - [.. _models], - [ - new PropertyElement(nameof(OllamaModel.Name)), - new CustomElement(label: "Active", m => m.Name == _activeModel?.Name ? "true" : string.Empty) - ]); + host.RenderTable(AvailableModels, GetRenderModelElements(RunningConfig.ModelName)); } internal async Task ShowOneModel(IHost host, string name, CancellationToken cancellationToken = default) { var model = await GetModelByName(name, cancellationToken).ConfigureAwait(false); - host.RenderList( - model, + host.RenderList(model, GetRenderModelElements(RunningConfig.ModelName)); + } + + internal async Task UseConfg(ModelConfig config, CancellationToken cancellationToken = default) + { + RunningConfig = config with { }; + await UseModel(RunningConfig.ModelName, cancellationToken).ConfigureAwait(false); + } + + internal void ListAllConfigs(IHost host) + { + host.RenderTable( + Configs, [ - new PropertyElement(nameof(OllamaModel.Name)), - new CustomElement(label: "Active", m => m.Name == _activeModel?.Name ? "true" : string.Empty) + new PropertyElement(nameof(ModelConfig.Name)), + new CustomElement(label: "Active", m => m == RunningConfig ? "true" : string.Empty) ]); } - internal void ShowOneConfig(IHost host, string name, CancellationToken cancellationToken = default) + internal void ShowOneConfig(IHost host, string name) { - var config = this.Configs.FirstOrDefault(c => c.Name == name); + var config = Configs.FirstOrDefault(c => c.Name == name); host.RenderList( config, [ @@ -167,54 +143,50 @@ internal void ShowOneConfig(IHost host, string name, CancellationToken cancellat new PropertyElement(nameof(ModelConfig.Description)), new PropertyElement(nameof(ModelConfig.ModelName)), new PropertyElement(nameof(ModelConfig.SystemPrompt)), - new CustomElement(label: "Active", m => m.ToRunnigConfig() == this.RunningConfig ? "true" : string.Empty), + new CustomElement(label: "Active", m => m == RunningConfig ? "true" : string.Empty), ]); } - internal async Task UseConfg(IHost host, ModelConfig config, CancellationToken cancellationToken = default) + internal async Task GetActiveModel(IHost host, CancellationToken cancellationToken = default) { - this.RunningConfig = config.ToRunnigConfig(); - await UseModel(this.RunningConfig.ModelName, cancellationToken).ConfigureAwait(false); - } + await EnsureModelsInitialized(cancellationToken).ConfigureAwait(false); + if (string.IsNullOrEmpty(RunningConfig.ModelName)) + { + // There is no model set, so use the first one available. + RunningConfig = RunningConfig with { ModelName = AvailableModels.First() }; + host.MarkupLine($"No Ollama model is configured. Using the first available model [green]'{RunningConfig.ModelName}'."); + } + else + { + if (!AvailableModels.Contains(RunningConfig.ModelName)) + { + throw new InvalidOperationException($"The configured Ollama model '{RunningConfig.ModelName}' doesn't exist in the list of available models."); + } + } - internal void ListAllConfigs(IHost host) - { - host.RenderTable( - [.. this.Configs], - [ - new PropertyElement(nameof(ModelConfig.Name)), - new CustomElement(label: "Active", m => m.ToRunnigConfig() == this.RunningConfig ? "true" : string.Empty) - ]); + return RunningConfig.ModelName; } - - internal async Task GetActiveModel(CancellationToken cancellationToken = default) - { - await EnsureModelsInitialized(cancellationToken).ConfigureAwait(false); - return _activeModel; - } } -internal record OllamaModel(string Name); - -internal record ModelConfig(string Name, string SystemPrompt, string ModelName, string Description, bool ResetContext); - -internal record ConfigData(List Configs, string Endpoint, bool Stream, string DefaultConfig); +/// +/// Represents a configuration for an Ollama model. +/// +/// Required. The unique identifier name for this configuration. +/// Required. The name of the Ollama model to be used. +/// Optional. The system prompt to be used with this model configuration. +/// Optional. A human-readable description of this configuration. +/// Optional. Indicates whether the context should be reset when switching to this configuration. Defaults to false. -internal record RunningConfig(string Name = "", string ModelName = "", string SystemPrompt = "", bool ResetContext = false); +internal record ModelConfig(string Name, string ModelName, string SystemPrompt = "", string Description = "", bool ResetContext = false); -static class ModelConfigExtensions -{ - public static RunningConfig ToRunnigConfig(this ModelConfig config) - { - return new RunningConfig() - { - Name = config.Name, - ModelName = config.ModelName, - SystemPrompt = config.SystemPrompt, - ResetContext = config.ResetContext - }; - } -} +/// +/// Represents the configuration data for the AI Shell Ollama Agent. +/// +/// Optional. A list of predefined model configurations. +/// Required. The endpoint URL for the agent. +/// Optional. Indicates whether streaming is enabled. Defaults to false. +/// Optional. Specifies the default configuration name. If not provided, the first available config will be used. +internal record ConfigData(List Configs, string Endpoint, bool Stream = false, string DefaultConfig = ""); /// /// Use source generation to serialize and deserialize the setting file. From c18b1873f9f4597ae2880b7d36688d205a7107cc Mon Sep 17 00:00:00 2001 From: Ivan P Date: Sat, 29 Mar 2025 15:42:53 +0200 Subject: [PATCH 04/13] Remove redundant parameter from UseConfg method in ConfigCommand --- shell/agents/AIShell.Ollama.Agent/Command.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/shell/agents/AIShell.Ollama.Agent/Command.cs b/shell/agents/AIShell.Ollama.Agent/Command.cs index b01ff129..3a5498a0 100644 --- a/shell/agents/AIShell.Ollama.Agent/Command.cs +++ b/shell/agents/AIShell.Ollama.Agent/Command.cs @@ -88,7 +88,7 @@ private async Task UseConfigAction(string name) converter: ConfigName, CancellationToken.None).GetAwaiter().GetResult() : setting.Configs.FirstOrDefault(c => c.Name == name)) ?? throw new InvalidOperationException($"The configuration '{name}' doesn't exist."); - await setting.UseConfg(host, chosenConfig); + await setting.UseConfg(chosenConfig); host.MarkupLine($"Using the config [green]{chosenConfig.Name}[/]:"); } catch (InvalidOperationException ex) From 19902320c7f64a5d75f6c4f852da08efb13d733f Mon Sep 17 00:00:00 2001 From: cnupy <38653063+cnupy@users.noreply.github.com> Date: Tue, 1 Apr 2025 08:46:19 +0300 Subject: [PATCH 05/13] Update shell/agents/AIShell.Ollama.Agent/Settings.cs Co-authored-by: Dongbo Wang --- shell/agents/AIShell.Ollama.Agent/Settings.cs | 21 ++++++++++--------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/shell/agents/AIShell.Ollama.Agent/Settings.cs b/shell/agents/AIShell.Ollama.Agent/Settings.cs index eed96df7..839a4e97 100644 --- a/shell/agents/AIShell.Ollama.Agent/Settings.cs +++ b/shell/agents/AIShell.Ollama.Agent/Settings.cs @@ -25,19 +25,20 @@ public Settings(ConfigData configData) Endpoint = configData.Endpoint; Stream = configData.Stream; - // Ensure the default configuration is available in the list of configurations. - if (!string.IsNullOrEmpty(configData.DefaultConfig) && - !Configs.Any(c => c.Name == configData.DefaultConfig)) + if (string.IsNullOrEmpty(configData.DefaultConfig)) { - throw new InvalidOperationException($"The selected default configuration '{configData.DefaultConfig}' doesn't exist."); + RunningConfig = Configs.Count > 0 + ? Configs[0] with { } /* No default config - use the first one defined in Configs */ + : new ModelConfig(nameof(RunningConfig), ModelName: ""); /* No config available - use empty */ } - - RunningConfig = (Configs, configData.DefaultConfig) switch + else { - (not [], "") => Configs.First() with { }, /* No default config - use the first one defined in Configs */ - (not [], _) => Configs.First(c => c.Name == configData.DefaultConfig) with { }, /* Use the default config */ - _ => new ModelConfig(nameof(RunningConfig), "") /* No config available - use empty */ - }; + // Ensure the default configuration is available in the list of configurations. + var first = Configs.FirstOrDefault(c => c.Name == configData.DefaultConfig) + ?? throw new InvalidOperationException($"The selected default configuration '{configData.DefaultConfig}' doesn't exist."); + // Use the default config + RunningConfig = first with { }; + } } private async Task EnsureModelsInitialized(CancellationToken cancellationToken = default) From 153970ad2fee4eb83047f32a3ea3b11da6c6ad61 Mon Sep 17 00:00:00 2001 From: Ivan P Date: Tue, 1 Apr 2025 13:10:15 +0300 Subject: [PATCH 06/13] Refactor model handling in ConfigCommand and Settings classes for improved clarity and consistency --- shell/agents/AIShell.Ollama.Agent/Command.cs | 31 ++++----- shell/agents/AIShell.Ollama.Agent/Settings.cs | 65 ++++++++----------- 2 files changed, 43 insertions(+), 53 deletions(-) diff --git a/shell/agents/AIShell.Ollama.Agent/Command.cs b/shell/agents/AIShell.Ollama.Agent/Command.cs index 3a5498a0..bbe07e7e 100644 --- a/shell/agents/AIShell.Ollama.Agent/Command.cs +++ b/shell/agents/AIShell.Ollama.Agent/Command.cs @@ -8,7 +8,8 @@ namespace AIShell.Ollama.Agent; internal sealed class ConfigCommand : CommandBase { private readonly OllamaAgent _agnet; - public ConfigCommand(OllamaAgent agent) + + public ConfigCommand(OllamaAgent agent) : base("config", "Command for config management within the 'ollama' agent.") { _agnet = agent; @@ -235,8 +236,7 @@ private void ListModelAction(string name) } catch (InvalidOperationException ex) { - string availableModelNames = ModelNamesAsString(); - host.WriteErrorLine($"{ex.Message} Available Models(s): {availableModelNames}."); + host.WriteErrorLine(ex.Message); } } @@ -245,10 +245,10 @@ private void UseModelAction(string name) // Reload the setting file if needed. _agnet.ReloadSettings(); - var setting = _agnet.Settings; + var settings = _agnet.Settings; var host = Shell.Host; - if (setting is null || setting.GetAllModels().GetAwaiter().GetResult().Count is 0) + if (settings is null || settings.GetAllModels().GetAwaiter().GetResult().Count is 0) { host.WriteErrorLine("No models configured."); return; @@ -256,23 +256,24 @@ private void UseModelAction(string name) try { - string chosenModel = string.IsNullOrEmpty(name) - ? host.PromptForSelectionAsync( + if (string.IsNullOrEmpty(name)) + { + name = host.PromptForSelectionAsync( title: "[orange1]Please select a [Blue]Model[/] to use[/]:", - choices: setting.GetAllModels().GetAwaiter().GetResult(), - CancellationToken.None).GetAwaiter().GetResult() - : setting.GetModelByName(name).GetAwaiter().GetResult(); + choices: settings.GetAllModels().GetAwaiter().GetResult(), + CancellationToken.None).GetAwaiter().GetResult(); + } + + settings.EnsureModelNameIsValid(name).GetAwaiter().GetResult(); - setting.UseModel(chosenModel).GetAwaiter().GetResult(); - host.MarkupLine($"Using the model [green]{chosenModel}[/]:"); + settings.UseModel(name).GetAwaiter().GetResult(); + host.MarkupLine($"Using the model [green]{name}[/]:"); } catch (InvalidOperationException ex) { - string availableModelNames = ModelNamesAsString(); - host.WriteErrorLine($"{ex.Message} Available Modless: {availableModelNames}."); + host.WriteErrorLine(ex.Message); } } private IEnumerable ModelNameCompleter(CompletionContext context) => _agnet.Settings?.GetAllModels().GetAwaiter().GetResult() ?? []; - private string ModelNamesAsString() => string.Join(", ", ModelNameCompleter(null)); } diff --git a/shell/agents/AIShell.Ollama.Agent/Settings.cs b/shell/agents/AIShell.Ollama.Agent/Settings.cs index 839a4e97..0a7b8b66 100644 --- a/shell/agents/AIShell.Ollama.Agent/Settings.cs +++ b/shell/agents/AIShell.Ollama.Agent/Settings.cs @@ -8,7 +8,7 @@ namespace AIShell.Ollama.Agent; internal class Settings { private bool _initialized = false; - private List AvailableModels { get; set; } = []; + private List _availableModels = []; public List Configs { get; } public string Endpoint { get; } public bool Stream { get; } @@ -48,44 +48,31 @@ private async Task EnsureModelsInitialized(CancellationToken cancellationToken = return; } - OllamaApiClient _client = null; - try - { - _client = new OllamaApiClient(Endpoint); - var models = await _client.ListLocalModelsAsync(cancellationToken).ConfigureAwait(false); - AvailableModels = [.. models.Select(m => m.Name)]; + using OllamaApiClient client = new (Endpoint); + var models = await client.ListLocalModelsAsync(cancellationToken).ConfigureAwait(false); + _availableModels = [.. models.Select(m => m.Name)]; - if (AvailableModels.Count == 0) - { - throw new InvalidOperationException($"No models are available to use from '{Endpoint}'."); - } - _initialized = true; - } - finally + if (_availableModels.Count == 0) { - if (_client is IDisposable disposable) - { - disposable.Dispose(); - } + throw new InvalidOperationException($"No models are available to use from '{Endpoint}'."); } + _initialized = true; } internal async Task> GetAllModels(CancellationToken cancellationToken = default) { await EnsureModelsInitialized(cancellationToken).ConfigureAwait(false); - return AvailableModels; + return _availableModels; } - internal async Task GetModelByName(string name, CancellationToken cancellationToken = default) + internal async Task EnsureModelNameIsValid(string name, CancellationToken cancellationToken = default) { ArgumentException.ThrowIfNullOrEmpty(name); await EnsureModelsInitialized(cancellationToken).ConfigureAwait(false); - if (!AvailableModels.Contains(name)) + if (!_availableModels.Contains(name)) { - throw new InvalidOperationException($"A model with the name '{name}' doesn't exist in the list of available models."); + throw new InvalidOperationException($"A model with the name '{name}' doesn't exist. The available models are: [{string.Join(", ", _availableModels)}]."); } - - return name; } private static List> GetSystemPromptRenderElements() => [ new CustomElement(label: "System prompt", s => s) ]; @@ -93,35 +80,38 @@ internal async Task GetModelByName(string name, CancellationToken cancel internal void ShowSystemPrompt(IHost host) => host.RenderList(RunningConfig.SystemPrompt, GetSystemPromptRenderElements()); internal void SetSystemPrompt(IHost host, string prompt) - { + { RunningConfig = RunningConfig with { SystemPrompt = prompt ?? string.Empty }; host.RenderList(RunningConfig.SystemPrompt, GetSystemPromptRenderElements()); } - private static List> GetRenderModelElements(string currentModelName) => [ + private static List> GetRenderModelElements(Func isActive) => [ new CustomElement(label: "Model Name", m => m), - new CustomElement(label: "Active", m => m == currentModelName ? "true" : string.Empty) + new CustomElement(label: "Active", m => isActive(m) ? "true" : string.Empty) ]; - internal async Task UseModel(string name, CancellationToken cancellationToken = default) => - RunningConfig = RunningConfig with { ModelName = await GetModelByName(name, cancellationToken).ConfigureAwait(false) }; + internal async Task UseModel(string name, CancellationToken cancellationToken = default) + { + await EnsureModelNameIsValid(name, cancellationToken).ConfigureAwait(false); + RunningConfig = RunningConfig with { ModelName = name }; + } internal async Task ListAllModels(IHost host, CancellationToken cancellationToken = default) { await EnsureModelsInitialized(cancellationToken).ConfigureAwait(false); - host.RenderTable(AvailableModels, GetRenderModelElements(RunningConfig.ModelName)); + host.RenderTable(_availableModels, GetRenderModelElements(m => m == RunningConfig.ModelName)); } internal async Task ShowOneModel(IHost host, string name, CancellationToken cancellationToken = default) { - var model = await GetModelByName(name, cancellationToken).ConfigureAwait(false); - host.RenderList(model, GetRenderModelElements(RunningConfig.ModelName)); + await EnsureModelNameIsValid(name, cancellationToken).ConfigureAwait(false); + host.RenderList(name, GetRenderModelElements(m => m == RunningConfig.ModelName)); } internal async Task UseConfg(ModelConfig config, CancellationToken cancellationToken = default) { + await EnsureModelNameIsValid(config.ModelName, cancellationToken).ConfigureAwait(false); RunningConfig = config with { }; - await UseModel(RunningConfig.ModelName, cancellationToken).ConfigureAwait(false); } internal void ListAllConfigs(IHost host) @@ -154,12 +144,12 @@ internal async Task GetActiveModel(IHost host, CancellationToken cancell if (string.IsNullOrEmpty(RunningConfig.ModelName)) { // There is no model set, so use the first one available. - RunningConfig = RunningConfig with { ModelName = AvailableModels.First() }; - host.MarkupLine($"No Ollama model is configured. Using the first available model [green]'{RunningConfig.ModelName}'."); + RunningConfig = RunningConfig with { ModelName = _availableModels.First() }; + host.MarkupLine($"No Ollama model is configured. Using the first available model [green]'{RunningConfig.ModelName}'[/]."); } else { - if (!AvailableModels.Contains(RunningConfig.ModelName)) + if (!_availableModels.Contains(RunningConfig.ModelName)) { throw new InvalidOperationException($"The configured Ollama model '{RunningConfig.ModelName}' doesn't exist in the list of available models."); } @@ -176,8 +166,7 @@ internal async Task GetActiveModel(IHost host, CancellationToken cancell /// Required. The name of the Ollama model to be used. /// Optional. The system prompt to be used with this model configuration. /// Optional. A human-readable description of this configuration. -/// Optional. Indicates whether the context should be reset when switching to this configuration. Defaults to false. - +/// Optional. Indicates whether the context should be reset after each interaction. Defaults to false. internal record ModelConfig(string Name, string ModelName, string SystemPrompt = "", string Description = "", bool ResetContext = false); /// From 9d1998f2d6cd9aabbe7e22396caac0d2c9145f41 Mon Sep 17 00:00:00 2001 From: Ivan P Date: Sat, 5 Apr 2025 19:47:54 +0300 Subject: [PATCH 07/13] Rename ConfigCommand to PresetCommand and update related methods for preset management --- shell/agents/AIShell.Ollama.Agent/Command.cs | 130 ++++++----- .../AIShell.Ollama.Agent/OllamaAgent.cs | 20 +- shell/agents/AIShell.Ollama.Agent/Settings.cs | 220 +++++++++++++----- 3 files changed, 233 insertions(+), 137 deletions(-) diff --git a/shell/agents/AIShell.Ollama.Agent/Command.cs b/shell/agents/AIShell.Ollama.Agent/Command.cs index bbe07e7e..1fc1040c 100644 --- a/shell/agents/AIShell.Ollama.Agent/Command.cs +++ b/shell/agents/AIShell.Ollama.Agent/Command.cs @@ -5,36 +5,36 @@ namespace AIShell.Ollama.Agent; -internal sealed class ConfigCommand : CommandBase +internal sealed class PresetCommand : CommandBase { private readonly OllamaAgent _agnet; - public ConfigCommand(OllamaAgent agent) - : base("config", "Command for config management within the 'ollama' agent.") + public PresetCommand(OllamaAgent agent) + : base("preset", "Command for preset management within the 'ollama' agent.") { _agnet = agent; - var use = new Command("use", "Specify a config to use."); - var useConfig = new Argument( - name: "Config", + var use = new Command("use", "Specify a preset to use."); + var usePreset = new Argument( + name: "Preset", getDefaultValue: () => null, - description: "Name of a configuration.").AddCompletions(ConfigNameCompleter); - use.AddArgument(useConfig); - use.SetHandler(UseConfigAction, useConfig); + description: "Name of a preset.").AddCompletions(PresetNameCompleter); + use.AddArgument(usePreset); + use.SetHandler(UsePresetAction, usePreset); - var list = new Command("list", "List a specific config, or all available configs."); - var listConfig = new Argument( - name: "Config", + var list = new Command("list", "List a specific preset, or all configured presets."); + var listPreset = new Argument( + name: "Preset", getDefaultValue: () => null, - description: "Name of a configuration.").AddCompletions(ConfigNameCompleter); - list.AddArgument(listConfig); - list.SetHandler(ListConfigAction, listConfig); + description: "Name of a preset.").AddCompletions(PresetNameCompleter); + list.AddArgument(listPreset); + list.SetHandler(ListPresetAction, listPreset); AddCommand(list); AddCommand(use); } - private void ListConfigAction(string name) + private void ListPresetAction(string name) { IHost host = Shell.Host; @@ -45,28 +45,28 @@ private void ListConfigAction(string name) if (settings is null) { - host.WriteErrorLine("Invalid configuration."); - return; - } - - if (string.IsNullOrEmpty(name)) - { - settings.ListAllConfigs(host); + host.WriteErrorLine("Error loading the configuration."); return; } try { - settings.ShowOneConfig(host, name); + if (string.IsNullOrEmpty(name)) + { + settings.ListAllPresets(host); + return; + } + + settings.ShowOnePreset(host, name); } catch (InvalidOperationException ex) { - string availableConfigNames = ConfigNamesAsString(); - host.WriteErrorLine($"{ex.Message} Available cofiguration(s): {availableConfigNames}."); + string availablePresetNames = PresetNamesAsString(); + host.WriteErrorLine($"{ex.Message} Available preset(s): {availablePresetNames}."); } } - private async Task UseConfigAction(string name) + private async Task UsePresetAction(string name) { // Reload the setting file if needed. _agnet.ReloadSettings(); @@ -74,34 +74,40 @@ private async Task UseConfigAction(string name) var setting = _agnet.Settings; var host = Shell.Host; - if (setting is null || setting.Configs.Count is 0) + if (setting is null) { - host.WriteErrorLine("No configs configured."); + host.WriteErrorLine("Error loading the configuration."); + return; + } + + if (setting.Presets.Count is 0) + { + host.WriteErrorLine("There are no presets configured."); return; } try { - ModelConfig chosenConfig = (string.IsNullOrEmpty(name) + ModelConfig chosenPreset = (string.IsNullOrEmpty(name) ? host.PromptForSelectionAsync( - title: "[orange1]Please select a [Blue]Configuration[/] to use[/]:", - choices: setting.Configs, - converter: ConfigName, + title: "[orange1]Please select a [Blue]Preset[/] to use[/]:", + choices: setting.Presets, + converter: PresetName, CancellationToken.None).GetAwaiter().GetResult() - : setting.Configs.FirstOrDefault(c => c.Name == name)) ?? throw new InvalidOperationException($"The configuration '{name}' doesn't exist."); - await setting.UseConfg(chosenConfig); - host.MarkupLine($"Using the config [green]{chosenConfig.Name}[/]:"); + : setting.Presets.FirstOrDefault(c => c.Name == name)) ?? throw new InvalidOperationException($"The preset '{name}' doesn't exist."); + await setting.UsePreset(host, chosenPreset); + host.MarkupLine($"Using the preset [green]{chosenPreset.Name}[/]:"); } catch (InvalidOperationException ex) { - string availableConfigNames = ConfigNamesAsString(); - host.WriteErrorLine($"{ex.Message} Available configurations: {availableConfigNames}."); + string availablePresetNames = PresetNamesAsString(); + host.WriteErrorLine($"{ex.Message} Available presets: {availablePresetNames}."); } } - private static string ConfigName(ModelConfig config) => config.Name.Any(Char.IsWhiteSpace) ? $"\"{config.Name}\"" : config.Name; - private IEnumerable ConfigNameCompleter(CompletionContext context) => _agnet.Settings?.Configs?.Select(ConfigName) ?? []; - private string ConfigNamesAsString() => string.Join(", ", ConfigNameCompleter(null)); + private static string PresetName(ModelConfig preset) => preset.Name.Any(Char.IsWhiteSpace) ? $"\"{preset.Name}\"" : preset.Name; + private IEnumerable PresetNameCompleter(CompletionContext context) => _agnet.Settings?.Presets?.Select(PresetName) ?? []; + private string PresetNamesAsString() => string.Join(", ", PresetNameCompleter(null)); } internal sealed class SystemPromptCommand : CommandBase @@ -139,7 +145,7 @@ private void ShowSystemPromptAction() if (settings is null) { - host.WriteErrorLine("Invalid configuration."); + host.WriteErrorLine("Error loading the configuration."); return; } @@ -165,7 +171,7 @@ private void SetSystemPromptAction(string prompt) if (settings is null) { - host.WriteErrorLine("Invalid configuration."); + host.WriteErrorLine("Error loading the configuration."); return; } @@ -220,18 +226,17 @@ private void ListModelAction(string name) if (settings is null) { - host.WriteErrorLine("Invalid configuration."); + host.WriteErrorLine("Error loading the configuration."); return; } - - if (string.IsNullOrEmpty(name)) - { - settings.ListAllModels(host).GetAwaiter().GetResult(); - return; - } - try { + if (string.IsNullOrEmpty(name)) + { + settings.ListAllModels(host).GetAwaiter().GetResult(); + return; + } + settings.ShowOneModel(host, name).GetAwaiter().GetResult(); } catch (InvalidOperationException ex) @@ -248,26 +253,35 @@ private void UseModelAction(string name) var settings = _agnet.Settings; var host = Shell.Host; - if (settings is null || settings.GetAllModels().GetAwaiter().GetResult().Count is 0) + if (settings is null) { - host.WriteErrorLine("No models configured."); + host.WriteErrorLine("Error loading the configuration."); return; } try { + if (!settings.PerformSelfcheck(host)) + { + return; + } + + if (settings.GetAllModels().GetAwaiter().GetResult().Count is 0) + { + host.WriteErrorLine("No models found."); + return; + } + if (string.IsNullOrEmpty(name)) { name = host.PromptForSelectionAsync( title: "[orange1]Please select a [Blue]Model[/] to use[/]:", - choices: settings.GetAllModels().GetAwaiter().GetResult(), + choices: settings.GetAllModels(host).GetAwaiter().GetResult(), CancellationToken.None).GetAwaiter().GetResult(); } - settings.EnsureModelNameIsValid(name).GetAwaiter().GetResult(); - - settings.UseModel(name).GetAwaiter().GetResult(); - host.MarkupLine($"Using the model [green]{name}[/]:"); + settings.UseModel(host, name).GetAwaiter().GetResult(); + host.MarkupLine($"Using the model [green]{name}[/]"); } catch (InvalidOperationException ex) { diff --git a/shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs b/shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs index 0251a1fb..2082cc55 100644 --- a/shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs +++ b/shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs @@ -110,7 +110,7 @@ public void Initialize(AgentConfig config) /// /// Get commands that an agent can register to the shell when being loaded. /// - public IEnumerable GetCommands() => [new ConfigCommand(this), new ModelCommand(this), new SystemPromptCommand(this)]; + public IEnumerable GetCommands() => [new PresetCommand(this), new ModelCommand(this), new SystemPromptCommand(this)]; /// /// Gets the path to the setting file of the agent. @@ -170,9 +170,8 @@ public async Task ChatAsync(string input, IShell shell) // Reload the setting file if needed. ReloadSettings(); - if (IsLocalHost().IsMatch(_client.Uri.Host) && Process.GetProcessesByName("ollama").Length is 0) + if (!_settings.PerformSelfcheck(host)) { - host.WriteErrorLine("Please be sure the Ollama is installed and server is running. Check all the prerequisites in the README of this agent are met."); return false; } @@ -254,14 +253,6 @@ public async Task ChatAsync(string input, IShell shell) host.WriteErrorLine($"Ollama endpoint: \"{_settings.Endpoint}\""); host.WriteErrorLine($"Ollama settings: \"{SettingFile}\""); } - finally - { - if (_settings.RunningConfig.ResetContext) - { - // Reset the request context - ResetContext(); - } - } return true; } @@ -346,11 +337,4 @@ private void NewExampleSettingFile() """; File.WriteAllText(SettingFile, SampleContent, Encoding.UTF8); } - - /// - /// Defines a generated regular expression to match localhost addresses - /// "localhost", "127.0.0.1" and "[::1]" with case-insensitivity. - /// - [GeneratedRegex("^(localhost|127\\.0\\.0\\.1|\\[::1\\])$", RegexOptions.IgnoreCase)] - internal partial Regex IsLocalHost(); } diff --git a/shell/agents/AIShell.Ollama.Agent/Settings.cs b/shell/agents/AIShell.Ollama.Agent/Settings.cs index 0a7b8b66..60f980e9 100644 --- a/shell/agents/AIShell.Ollama.Agent/Settings.cs +++ b/shell/agents/AIShell.Ollama.Agent/Settings.cs @@ -1,15 +1,19 @@ -using System.Text.Json; +using System.Diagnostics; +using System.Text.Json; using System.Text.Json.Serialization; +using System.Text.RegularExpressions; using AIShell.Abstraction; using OllamaSharp; namespace AIShell.Ollama.Agent; -internal class Settings +internal partial class Settings { private bool _initialized = false; + private bool _runningConfigChecked = false; + private bool? _isRunningLocalHost = null; private List _availableModels = []; - public List Configs { get; } + public List Presets { get; } public string Endpoint { get; } public bool Stream { get; } public ModelConfig RunningConfig { get; private set; } @@ -21,61 +25,65 @@ public Settings(ConfigData configData) throw new InvalidOperationException("'Endpoint' key is missing in configuration."); } - Configs = configData.Configs ?? []; + Presets = configData.Presets ?? []; Endpoint = configData.Endpoint; Stream = configData.Stream; - if (string.IsNullOrEmpty(configData.DefaultConfig)) + if (string.IsNullOrEmpty(configData.DefaultPreset)) { - RunningConfig = Configs.Count > 0 - ? Configs[0] with { } /* No default config - use the first one defined in Configs */ - : new ModelConfig(nameof(RunningConfig), ModelName: ""); /* No config available - use empty */ + RunningConfig = Presets.Count > 0 + ? Presets[0] with { } /* No default preset - use the first one defined in Presets */ + : new ModelConfig(name: nameof(RunningConfig), modelName: ""); /* No presets are defined - use empty */ } else { // Ensure the default configuration is available in the list of configurations. - var first = Configs.FirstOrDefault(c => c.Name == configData.DefaultConfig) - ?? throw new InvalidOperationException($"The selected default configuration '{configData.DefaultConfig}' doesn't exist."); + var first = Presets.FirstOrDefault(c => c.Name == configData.DefaultPreset) + ?? throw new InvalidOperationException($"The selected default preset '{configData.DefaultPreset}' doesn't exist."); // Use the default config RunningConfig = first with { }; } } - private async Task EnsureModelsInitialized(CancellationToken cancellationToken = default) + private async Task EnsureModelsInitialized(IHost host, CancellationToken cancellationToken = default) { if (_initialized) - { - return; + { + return true; } - using OllamaApiClient client = new (Endpoint); - var models = await client.ListLocalModelsAsync(cancellationToken).ConfigureAwait(false); - _availableModels = [.. models.Select(m => m.Name)]; - - if (_availableModels.Count == 0) + if (!PerformSelfcheck(host)) { - throw new InvalidOperationException($"No models are available to use from '{Endpoint}'."); + return false; } + + using OllamaApiClient client = new(Endpoint); + var models = await client.ListLocalModelsAsync(cancellationToken).ConfigureAwait(false); + _availableModels = [.. models.Select(m => m.Name)]; _initialized = true; + return true; } - internal async Task> GetAllModels(CancellationToken cancellationToken = default) + internal async Task> GetAllModels(IHost host = null, CancellationToken cancellationToken = default) { - await EnsureModelsInitialized(cancellationToken).ConfigureAwait(false); - return _availableModels; + if (await EnsureModelsInitialized(host, cancellationToken).ConfigureAwait(false)) + { + return _availableModels; + } + + return []; } - internal async Task EnsureModelNameIsValid(string name, CancellationToken cancellationToken = default) + internal void EnsureModelNameIsValid(string name) { ArgumentException.ThrowIfNullOrEmpty(name); - await EnsureModelsInitialized(cancellationToken).ConfigureAwait(false); - if (!_availableModels.Contains(name)) + if (!_availableModels.Contains(name.AddLatestTagIfNecessery())) { throw new InvalidOperationException($"A model with the name '{name}' doesn't exist. The available models are: [{string.Join(", ", _availableModels)}]."); } } - private static List> GetSystemPromptRenderElements() => [ new CustomElement(label: "System prompt", s => s) ]; + private static List> GetSystemPromptRenderElements() => [new CustomElement(label: "System prompt", s => s)]; internal void ShowSystemPrompt(IHost host) => host.RenderList(RunningConfig.SystemPrompt, GetSystemPromptRenderElements()); @@ -84,51 +92,70 @@ internal void SetSystemPrompt(IHost host, string prompt) RunningConfig = RunningConfig with { SystemPrompt = prompt ?? string.Empty }; host.RenderList(RunningConfig.SystemPrompt, GetSystemPromptRenderElements()); } - + private static List> GetRenderModelElements(Func isActive) => [ new CustomElement(label: "Model Name", m => m), new CustomElement(label: "Active", m => isActive(m) ? "true" : string.Empty) ]; - internal async Task UseModel(string name, CancellationToken cancellationToken = default) + internal async Task UseModel(IHost host, string name, CancellationToken cancellationToken = default) { - await EnsureModelNameIsValid(name, cancellationToken).ConfigureAwait(false); - RunningConfig = RunningConfig with { ModelName = name }; + if (await EnsureModelsInitialized(host, cancellationToken).ConfigureAwait(false)) + { + EnsureModelNameIsValid(name); + RunningConfig = RunningConfig with { ModelName = name }; + _runningConfigChecked = true; + } } internal async Task ListAllModels(IHost host, CancellationToken cancellationToken = default) { - await EnsureModelsInitialized(cancellationToken).ConfigureAwait(false); - host.RenderTable(_availableModels, GetRenderModelElements(m => m == RunningConfig.ModelName)); + if (await EnsureModelsInitialized(host, cancellationToken).ConfigureAwait(false)) + { + host.RenderTable(_availableModels, GetRenderModelElements(m => m == RunningConfig.ModelName.AddLatestTagIfNecessery())); + } } internal async Task ShowOneModel(IHost host, string name, CancellationToken cancellationToken = default) { - await EnsureModelNameIsValid(name, cancellationToken).ConfigureAwait(false); - host.RenderList(name, GetRenderModelElements(m => m == RunningConfig.ModelName)); + if (await EnsureModelsInitialized(host, cancellationToken).ConfigureAwait(false)) + { + EnsureModelNameIsValid(name); + host.RenderList(name, GetRenderModelElements(m => m == RunningConfig.ModelName.AddLatestTagIfNecessery())); + } } - internal async Task UseConfg(ModelConfig config, CancellationToken cancellationToken = default) + internal async Task UsePreset(IHost host, ModelConfig preset, CancellationToken cancellationToken = default) { - await EnsureModelNameIsValid(config.ModelName, cancellationToken).ConfigureAwait(false); - RunningConfig = config with { }; + if (await EnsureModelsInitialized(host, cancellationToken).ConfigureAwait(false)) + { + EnsureModelNameIsValid(preset.ModelName); + RunningConfig = preset with { }; + _runningConfigChecked = true; + } } - internal void ListAllConfigs(IHost host) + internal void ListAllPresets(IHost host) { host.RenderTable( - Configs, + Presets, [ new PropertyElement(nameof(ModelConfig.Name)), new CustomElement(label: "Active", m => m == RunningConfig ? "true" : string.Empty) ]); } - internal void ShowOneConfig(IHost host, string name) + internal void ShowOnePreset(IHost host, string name) { - var config = Configs.FirstOrDefault(c => c.Name == name); + var preset = Presets.FirstOrDefault(c => c.Name == name); + if (preset is null) + { + host.WriteErrorLine($"The preset '{name}' doesn't exist."); + return; + } + host.RenderList( - config, + preset, [ new PropertyElement(nameof(ModelConfig.Name)), new PropertyElement(nameof(ModelConfig.Description)), @@ -140,43 +167,109 @@ internal void ShowOneConfig(IHost host, string name) internal async Task GetActiveModel(IHost host, CancellationToken cancellationToken = default) { - await EnsureModelsInitialized(cancellationToken).ConfigureAwait(false); - if (string.IsNullOrEmpty(RunningConfig.ModelName)) + if (_runningConfigChecked is false) { - // There is no model set, so use the first one available. - RunningConfig = RunningConfig with { ModelName = _availableModels.First() }; - host.MarkupLine($"No Ollama model is configured. Using the first available model [green]'{RunningConfig.ModelName}'[/]."); + if (await EnsureModelsInitialized(host, cancellationToken).ConfigureAwait(false)) + { + if (string.IsNullOrEmpty(RunningConfig.ModelName)) + { + // There is no model set, so use the first one available. + if (_availableModels.Count == 0) + { + throw new InvalidOperationException($"No models are available to use from '{Endpoint}'."); + } + + RunningConfig = RunningConfig with { ModelName = _availableModels.First() }; + host.MarkupLine($"No Ollama model is configured. Using the first available model [green]'{RunningConfig.ModelName}'[/]."); + } + else + { + EnsureModelNameIsValid(RunningConfig.ModelName); + } + + _runningConfigChecked = true; + } + else + { + throw new InvalidOperationException($"Error initializing models from '{Endpoint}'."); + } } - else + + return RunningConfig.ModelName; + } + + internal bool PerformSelfcheck(IHost host) + { + if (_isRunningLocalHost is null) { - if (!_availableModels.Contains(RunningConfig.ModelName)) + var endpointUri = new Uri(Endpoint); + _isRunningLocalHost = IsLocalHost().IsMatch(endpointUri.Host); + } + + if (_isRunningLocalHost is true && Process.GetProcessesByName("ollama").Length is 0) + { + var message = "Please be sure the Ollama is installed and server is running. Check all the prerequisites in the README of this agent are met."; + if (host is null) + { + throw new InvalidOperationException(message); + } + else { - throw new InvalidOperationException($"The configured Ollama model '{RunningConfig.ModelName}' doesn't exist in the list of available models."); + host.WriteErrorLine(message); + return false; } } - return RunningConfig.ModelName; + return true; } + + /// + /// Defines a generated regular expression to match localhost addresses + /// "localhost", "127.0.0.1" and "[::1]" with case-insensitivity. + /// + [GeneratedRegex("^(localhost|127\\.0\\.0\\.1|\\[::1\\])$", RegexOptions.IgnoreCase)] + internal partial Regex IsLocalHost(); } /// /// Represents a configuration for an Ollama model. /// -/// Required. The unique identifier name for this configuration. -/// Required. The name of the Ollama model to be used. -/// Optional. The system prompt to be used with this model configuration. -/// Optional. A human-readable description of this configuration. -/// Optional. Indicates whether the context should be reset after each interaction. Defaults to false. -internal record ModelConfig(string Name, string ModelName, string SystemPrompt = "", string Description = "", bool ResetContext = false); +internal record ModelConfig +{ + [JsonRequired] + public string Name { get; init; } + + [JsonRequired] + public string ModelName { get; init; } + + public string SystemPrompt { get; init; } = string.Empty; + + public string Description { get; init; } = string.Empty; + + /// + /// Initializes a new instance of the class with the specified parameters. + /// + /// The name of the model configuration. + /// The name of the model to be used. + /// An optional system prompt to guide the model's behavior. Defaults to an empty string. + /// An optional description of the model configuration. Defaults to an empty string. + public ModelConfig(string name, string modelName, string systemPrompt = "", string description = "") + { + Name = name; + ModelName = modelName; + SystemPrompt = systemPrompt; + Description = description; + } +} /// /// Represents the configuration data for the AI Shell Ollama Agent. /// -/// Optional. A list of predefined model configurations. -/// Required. The endpoint URL for the agent. +/// Optional. A list of predefined model configurations. +/// Optional. The endpoint URL for the agent. Defaults to "http://localhost:11434" /// Optional. Indicates whether streaming is enabled. Defaults to false. -/// Optional. Specifies the default configuration name. If not provided, the first available config will be used. -internal record ConfigData(List Configs, string Endpoint, bool Stream = false, string DefaultConfig = ""); +/// Optional. Specifies the default preset name. If not provided, the first available preset will be used. +internal record ConfigData(List Presets, string Endpoint = "http://localhost:11434", bool Stream = false, string DefaultPreset = ""); /// /// Use source generation to serialize and deserialize the setting file. @@ -190,3 +283,8 @@ internal record ConfigData(List Configs, string Endpoint, bool Stre UseStringEnumConverter = true)] [JsonSerializable(typeof(ConfigData))] internal partial class SourceGenerationContext : JsonSerializerContext { } + +static class TagExtensions +{ + public static string AddLatestTagIfNecessery(this string model) => model.IndexOf(":") == -1 ? string.Concat(model, ":latest") : model; +} From 6d66786147c92c369b44df18bf639b31a212f875 Mon Sep 17 00:00:00 2001 From: Ivan P Date: Sat, 5 Apr 2025 20:33:50 +0300 Subject: [PATCH 08/13] Rename "Configs" to "Presets" and update related comments in OllamaAgent configuration. --- shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs b/shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs index 2082cc55..21de91e4 100644 --- a/shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs +++ b/shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs @@ -319,7 +319,9 @@ private void NewExampleSettingFile() // 1. Install Ollama: `winget install Ollama.Ollama` // 2. Start Ollama API server: `ollama serve` // 3. Install Ollama model: `ollama pull phi3` - "Configs": [ + + // Declare predefined model configurations + "Presets": [ { "Name": "PowerShell Expert", "Description": "A ollama agent with expertise in PowerShell scripting and command line utilities.", @@ -331,8 +333,8 @@ private void NewExampleSettingFile() "Endpoint": "http://localhost:11434", // Enable Ollama streaming "Stream": false, - // Specify the default model to use - "DefaultConfig": "PowerShell Expert" + // Specify the default preset to use + "DefaultPreset": "PowerShell Expert" } """; File.WriteAllText(SettingFile, SampleContent, Encoding.UTF8); From 51a611ed394102bdd61b86ab3127a8d0cea6ac43 Mon Sep 17 00:00:00 2001 From: Dongbo Wang Date: Wed, 23 Apr 2025 14:10:22 -0700 Subject: [PATCH 09/13] Some refactoring and updates --- shell/agents/AIShell.Ollama.Agent/Command.cs | 32 +++---- .../AIShell.Ollama.Agent/OllamaAgent.cs | 13 +-- shell/agents/AIShell.Ollama.Agent/Settings.cs | 87 +++++++++---------- 3 files changed, 64 insertions(+), 68 deletions(-) diff --git a/shell/agents/AIShell.Ollama.Agent/Command.cs b/shell/agents/AIShell.Ollama.Agent/Command.cs index 1fc1040c..33496698 100644 --- a/shell/agents/AIShell.Ollama.Agent/Command.cs +++ b/shell/agents/AIShell.Ollama.Agent/Command.cs @@ -89,11 +89,11 @@ private async Task UsePresetAction(string name) try { ModelConfig chosenPreset = (string.IsNullOrEmpty(name) - ? host.PromptForSelectionAsync( + ? await host.PromptForSelectionAsync( title: "[orange1]Please select a [Blue]Preset[/] to use[/]:", choices: setting.Presets, converter: PresetName, - CancellationToken.None).GetAwaiter().GetResult() + CancellationToken.None) : setting.Presets.FirstOrDefault(c => c.Name == name)) ?? throw new InvalidOperationException($"The preset '{name}' doesn't exist."); await setting.UsePreset(host, chosenPreset); host.MarkupLine($"Using the preset [green]{chosenPreset.Name}[/]:"); @@ -155,7 +155,7 @@ private void ShowSystemPromptAction() } catch (InvalidOperationException ex) { - host.WriteErrorLine($"{ex.Message}"); + host.WriteErrorLine(ex.Message); } } @@ -181,7 +181,7 @@ private void SetSystemPromptAction(string prompt) } catch (InvalidOperationException ex) { - host.WriteErrorLine($"{ex.Message}."); + host.WriteErrorLine(ex.Message); } } } @@ -215,7 +215,7 @@ public ModelCommand(OllamaAgent agent) AddCommand(use); } - private void ListModelAction(string name) + private async Task ListModelAction(string name) { IHost host = Shell.Host; @@ -233,11 +233,11 @@ private void ListModelAction(string name) { if (string.IsNullOrEmpty(name)) { - settings.ListAllModels(host).GetAwaiter().GetResult(); + await settings.ListAllModels(host); return; } - settings.ShowOneModel(host, name).GetAwaiter().GetResult(); + await settings.ShowOneModel(host, name); } catch (InvalidOperationException ex) { @@ -245,7 +245,7 @@ private void ListModelAction(string name) } } - private void UseModelAction(string name) + private async Task UseModelAction(string name) { // Reload the setting file if needed. _agnet.ReloadSettings(); @@ -261,26 +261,28 @@ private void UseModelAction(string name) try { - if (!settings.PerformSelfcheck(host)) + bool success = await settings.PerformSelfcheck(host, checkEndpointOnly: true); + if (!success) { return; } - if (settings.GetAllModels().GetAwaiter().GetResult().Count is 0) + var allModels = await settings.GetAllModels(); + if (allModels.Count is 0) { - host.WriteErrorLine("No models found."); + host.WriteErrorLine($"No models found from '{settings.Endpoint}'."); return; } if (string.IsNullOrEmpty(name)) { - name = host.PromptForSelectionAsync( + name = await host.PromptForSelectionAsync( title: "[orange1]Please select a [Blue]Model[/] to use[/]:", - choices: settings.GetAllModels(host).GetAwaiter().GetResult(), - CancellationToken.None).GetAwaiter().GetResult(); + choices: allModels, + CancellationToken.None); } - settings.UseModel(host, name).GetAwaiter().GetResult(); + await settings.UseModel(host: null, name); host.MarkupLine($"Using the model [green]{name}[/]"); } catch (InvalidOperationException ex) diff --git a/shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs b/shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs index 21de91e4..0b02d51e 100644 --- a/shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs +++ b/shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs @@ -170,21 +170,22 @@ public async Task ChatAsync(string input, IShell shell) // Reload the setting file if needed. ReloadSettings(); - if (!_settings.PerformSelfcheck(host)) + bool success = await _settings.PerformSelfcheck(host); + if (!success) { return false; } - var activeModel = await _settings.GetActiveModel(host).ConfigureAwait(false); + ModelConfig config = _settings.RunningConfig; // Prepare request _request.Prompt = input; - _request.Model = activeModel; + _request.Model = config.ModelName; _request.Stream = _settings.Stream; - if (!string.IsNullOrWhiteSpace(_settings.RunningConfig.SystemPrompt)) + if (!string.IsNullOrWhiteSpace(config.SystemPrompt)) { - _request.System = _settings.RunningConfig.SystemPrompt; + _request.System = config.SystemPrompt; } try @@ -249,7 +250,7 @@ public async Task ChatAsync(string input, IShell shell) catch (HttpRequestException e) { host.WriteErrorLine($"{e.Message}"); - host.WriteErrorLine($"Ollama active model: \"{activeModel}\""); + host.WriteErrorLine($"Ollama active model: \"{config.ModelName}\""); host.WriteErrorLine($"Ollama endpoint: \"{_settings.Endpoint}\""); host.WriteErrorLine($"Ollama settings: \"{SettingFile}\""); } diff --git a/shell/agents/AIShell.Ollama.Agent/Settings.cs b/shell/agents/AIShell.Ollama.Agent/Settings.cs index 60f980e9..a92ad225 100644 --- a/shell/agents/AIShell.Ollama.Agent/Settings.cs +++ b/shell/agents/AIShell.Ollama.Agent/Settings.cs @@ -52,9 +52,14 @@ private async Task EnsureModelsInitialized(IHost host, CancellationToken c return true; } - if (!PerformSelfcheck(host)) + // Skip the self check when host is null. + if (host is not null) { - return false; + bool success = await PerformSelfcheck(host, checkEndpointOnly: true); + if (!success) + { + return false; + } } using OllamaApiClient client = new(Endpoint); @@ -77,6 +82,7 @@ internal async Task> GetAllModels(IHost host = null, Cancell internal void EnsureModelNameIsValid(string name) { ArgumentException.ThrowIfNullOrEmpty(name); + if (!_availableModels.Contains(name.AddLatestTagIfNecessery())) { throw new InvalidOperationException($"A model with the name '{name}' doesn't exist. The available models are: [{string.Join(", ", _availableModels)}]."); @@ -94,7 +100,7 @@ internal void SetSystemPrompt(IHost host, string prompt) } private static List> GetRenderModelElements(Func isActive) => [ - new CustomElement(label: "Model Name", m => m), + new CustomElement(label: "Name", m => m), new CustomElement(label: "Active", m => isActive(m) ? "true" : string.Empty) ]; @@ -165,64 +171,50 @@ internal void ShowOnePreset(IHost host, string name) ]); } - internal async Task GetActiveModel(IHost host, CancellationToken cancellationToken = default) + internal async Task PerformSelfcheck(IHost host, bool checkEndpointOnly = false) { - if (_runningConfigChecked is false) - { - if (await EnsureModelsInitialized(host, cancellationToken).ConfigureAwait(false)) - { - if (string.IsNullOrEmpty(RunningConfig.ModelName)) - { - // There is no model set, so use the first one available. - if (_availableModels.Count == 0) - { - throw new InvalidOperationException($"No models are available to use from '{Endpoint}'."); - } - - RunningConfig = RunningConfig with { ModelName = _availableModels.First() }; - host.MarkupLine($"No Ollama model is configured. Using the first available model [green]'{RunningConfig.ModelName}'[/]."); - } - else - { - EnsureModelNameIsValid(RunningConfig.ModelName); - } - - _runningConfigChecked = true; - } - else - { - throw new InvalidOperationException($"Error initializing models from '{Endpoint}'."); - } - } - - return RunningConfig.ModelName; - } + _isRunningLocalHost ??= IsLocalHost().IsMatch(new Uri(Endpoint).Host); - internal bool PerformSelfcheck(IHost host) - { - if (_isRunningLocalHost is null) + if (_isRunningLocalHost is true && Process.GetProcessesByName("ollama").Length is 0) { - var endpointUri = new Uri(Endpoint); - _isRunningLocalHost = IsLocalHost().IsMatch(endpointUri.Host); + host.WriteErrorLine("Please be sure the Ollama is installed and server is running. Check all the prerequisites in the README of this agent are met."); + return false; } - if (_isRunningLocalHost is true && Process.GetProcessesByName("ollama").Length is 0) + if (!checkEndpointOnly && !_runningConfigChecked) { - var message = "Please be sure the Ollama is installed and server is running. Check all the prerequisites in the README of this agent are met."; - if (host is null) + await EnsureModelsInitialized(host: null).ConfigureAwait(false); + if (string.IsNullOrEmpty(RunningConfig.ModelName)) { - throw new InvalidOperationException(message); + // There is no model set, so use the first one available. + if (_availableModels.Count is 0) + { + host.WriteErrorLine($"No models are available to use from '{Endpoint}'."); + return false; + } + + RunningConfig = RunningConfig with { ModelName = _availableModels.First() }; + host.MarkupLine($"No Ollama model is configured. Using the first available model [green]'{RunningConfig.ModelName}'[/]."); } else { - host.WriteErrorLine(message); - return false; + try + { + EnsureModelNameIsValid(RunningConfig.ModelName); + } + catch (InvalidOperationException e) + { + host.WriteErrorLine(e.Message); + return false; + } } + + _runningConfigChecked = true; } return true; } - + /// /// Defines a generated regular expression to match localhost addresses /// "localhost", "127.0.0.1" and "[::1]" with case-insensitivity. @@ -286,5 +278,6 @@ internal partial class SourceGenerationContext : JsonSerializerContext { } static class TagExtensions { - public static string AddLatestTagIfNecessery(this string model) => model.IndexOf(":") == -1 ? string.Concat(model, ":latest") : model; + public static string AddLatestTagIfNecessery(this string model) => + model.Contains(':') ? model : string.Concat(model, ":latest"); } From c9f549ec3017012891539d1ab479bd17d8743bd0 Mon Sep 17 00:00:00 2001 From: Dongbo Wang Date: Wed, 23 Apr 2025 14:14:39 -0700 Subject: [PATCH 10/13] Update the readme doc --- shell/agents/AIShell.Ollama.Agent/README.md | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/shell/agents/AIShell.Ollama.Agent/README.md b/shell/agents/AIShell.Ollama.Agent/README.md index bd8e91e1..6e06f120 100644 --- a/shell/agents/AIShell.Ollama.Agent/README.md +++ b/shell/agents/AIShell.Ollama.Agent/README.md @@ -20,11 +20,21 @@ To configure the agent, run `/agent config ollama` to open up the setting file i // 2. Start Ollama API server: `ollama serve` // 3. Install Ollama model: `ollama pull phi3` - // Declare Ollama model - "Model": "phi3", + // Declare predefined model configurations + "Presets": [ + { + "Name": "PowerShell Expert", + "Description": "A ollama agent with expertise in PowerShell scripting and command line utilities.", + "ModelName": "phi3", + "SystemPrompt": "You are a helpful and friendly assistant with expertise in PowerShell scripting and command line." + } + ], + // Declare Ollama endpoint "Endpoint": "http://localhost:11434", // Enable Ollama streaming - "Stream": false + "Stream": false, + // Specify the default preset to use + "DefaultPreset": "PowerShell Expert" } ``` From db22595bfb618ab43b97b1709dbd3520e28db23c Mon Sep 17 00:00:00 2001 From: Dongbo Wang Date: Wed, 23 Apr 2025 14:15:54 -0700 Subject: [PATCH 11/13] A little cleanup --- shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs b/shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs index 0b02d51e..60da8520 100644 --- a/shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs +++ b/shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs @@ -1,14 +1,12 @@ -using System.Diagnostics; using System.Text; using System.Text.Json; -using System.Text.RegularExpressions; using AIShell.Abstraction; using OllamaSharp; using OllamaSharp.Models; namespace AIShell.Ollama.Agent; -public sealed partial class OllamaAgent : ILLMAgent +public sealed class OllamaAgent : ILLMAgent { private bool _reloadSettings; private bool _isDisposed; From ed5fb4412066b3a7905bfec67330f7fbbf29cf60 Mon Sep 17 00:00:00 2001 From: Dongbo Wang Date: Thu, 24 Apr 2025 10:31:40 -0700 Subject: [PATCH 12/13] Add more comments and make some additional updates --- shell/agents/AIShell.Ollama.Agent/Command.cs | 30 ++++++++++++++----- shell/agents/AIShell.Ollama.Agent/Settings.cs | 12 +++++++- 2 files changed, 33 insertions(+), 9 deletions(-) diff --git a/shell/agents/AIShell.Ollama.Agent/Command.cs b/shell/agents/AIShell.Ollama.Agent/Command.cs index 33496698..dbc66422 100644 --- a/shell/agents/AIShell.Ollama.Agent/Command.cs +++ b/shell/agents/AIShell.Ollama.Agent/Command.cs @@ -59,7 +59,7 @@ private void ListPresetAction(string name) settings.ShowOnePreset(host, name); } - catch (InvalidOperationException ex) + catch (Exception ex) { string availablePresetNames = PresetNamesAsString(); host.WriteErrorLine($"{ex.Message} Available preset(s): {availablePresetNames}."); @@ -98,7 +98,7 @@ private async Task UsePresetAction(string name) await setting.UsePreset(host, chosenPreset); host.MarkupLine($"Using the preset [green]{chosenPreset.Name}[/]:"); } - catch (InvalidOperationException ex) + catch (Exception ex) { string availablePresetNames = PresetNamesAsString(); host.WriteErrorLine($"{ex.Message} Available presets: {availablePresetNames}."); @@ -153,7 +153,7 @@ private void ShowSystemPromptAction() { settings.ShowSystemPrompt(host); } - catch (InvalidOperationException ex) + catch (Exception ex) { host.WriteErrorLine(ex.Message); } @@ -179,7 +179,7 @@ private void SetSystemPromptAction(string prompt) { settings.SetSystemPrompt(host, prompt); } - catch (InvalidOperationException ex) + catch (Exception ex) { host.WriteErrorLine(ex.Message); } @@ -239,7 +239,7 @@ private async Task ListModelAction(string name) await settings.ShowOneModel(host, name); } - catch (InvalidOperationException ex) + catch (Exception ex) { host.WriteErrorLine(ex.Message); } @@ -282,14 +282,28 @@ private async Task UseModelAction(string name) CancellationToken.None); } - await settings.UseModel(host: null, name); + await settings.UseModel(host, name); host.MarkupLine($"Using the model [green]{name}[/]"); } - catch (InvalidOperationException ex) + catch (Exception ex) { host.WriteErrorLine(ex.Message); } } - private IEnumerable ModelNameCompleter(CompletionContext context) => _agnet.Settings?.GetAllModels().GetAwaiter().GetResult() ?? []; + private IEnumerable ModelNameCompleter(CompletionContext context) + { + try + { + // Model retrieval may throw. + var results = _agnet.Settings?.GetAllModels().Result; + if (results is not null) + { + return results; + } + } + catch (Exception) { } + + return []; + } } diff --git a/shell/agents/AIShell.Ollama.Agent/Settings.cs b/shell/agents/AIShell.Ollama.Agent/Settings.cs index a92ad225..54e16688 100644 --- a/shell/agents/AIShell.Ollama.Agent/Settings.cs +++ b/shell/agents/AIShell.Ollama.Agent/Settings.cs @@ -45,6 +45,12 @@ public Settings(ConfigData configData) } } + /// + /// Retrieve available models from the Ollama endpoint. + /// + /// Used for writing error to host when it's a local endpoint but the Ollama server is not started. The endpoint check will be skipped if a `null` is specified. + /// Used for cancel the operation. + /// private async Task EnsureModelsInitialized(IHost host, CancellationToken cancellationToken = default) { if (_initialized) @@ -52,7 +58,10 @@ private async Task EnsureModelsInitialized(IHost host, CancellationToken c return true; } - // Skip the self check when host is null. + // The endpoint check is supposed to be interactive and can be skipped in some cases, such as when + // the `PerformSelfcheck` method was already called right before entering this method. + // So, we will simply skip the endpoint check when the passed-in host is null. If there's something + // wrong with the endpoint, the subsequent calls to retrieve models will fail and throw anyway. if (host is not null) { bool success = await PerformSelfcheck(host, checkEndpointOnly: true); @@ -183,6 +192,7 @@ internal async Task PerformSelfcheck(IHost host, bool checkEndpointOnly = if (!checkEndpointOnly && !_runningConfigChecked) { + // Skip the endpoint check in 'EnsureModelsInitialized' as we already did it. await EnsureModelsInitialized(host: null).ConfigureAwait(false); if (string.IsNullOrEmpty(RunningConfig.ModelName)) { From 2cefb83fa4a4a6837bc517bc50a8cdce73dcccd0 Mon Sep 17 00:00:00 2001 From: Dongbo Wang Date: Thu, 24 Apr 2025 10:42:41 -0700 Subject: [PATCH 13/13] Update comments --- shell/agents/AIShell.Ollama.Agent/Settings.cs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/shell/agents/AIShell.Ollama.Agent/Settings.cs b/shell/agents/AIShell.Ollama.Agent/Settings.cs index 54e16688..8b1db2bd 100644 --- a/shell/agents/AIShell.Ollama.Agent/Settings.cs +++ b/shell/agents/AIShell.Ollama.Agent/Settings.cs @@ -48,7 +48,7 @@ public Settings(ConfigData configData) /// /// Retrieve available models from the Ollama endpoint. /// - /// Used for writing error to host when it's a local endpoint but the Ollama server is not started. The endpoint check will be skipped if a `null` is specified. + /// Used for writing error to host when it's a local endpoint but the Ollama server is not started. When the value is null, the endpoint check will be skipped. /// Used for cancel the operation. /// private async Task EnsureModelsInitialized(IHost host, CancellationToken cancellationToken = default) @@ -60,7 +60,7 @@ private async Task EnsureModelsInitialized(IHost host, CancellationToken c // The endpoint check is supposed to be interactive and can be skipped in some cases, such as when // the `PerformSelfcheck` method was already called right before entering this method. - // So, we will simply skip the endpoint check when the passed-in host is null. If there's something + // So, we will simply skip the endpoint check when the passed-in host is null. If there's anything // wrong with the endpoint, the subsequent calls to retrieve models will fail and throw anyway. if (host is not null) {