Skip to content

Commit f60c990

Browse files
committed
Add OpenAI provider support
This change extends the LLM endpoint configuration system to support OpenAI as a new provider alongside existing Ollama and Anthropic options. The assistant submodule has been updated to the latest revision with OpenAI support. The UI now exposes OpenAI as a selectable provider in the endpoint wizard, with appropriate default base URL (`https://api.openai.com`). Configuration logic has been updated to properly initialize OpenAI endpoints with required authentication headers. A new provider constant `kClientTypeOpenAI` has been added to identify OpenAI endpoints. The endpoint data collection flow now correctly handles OpenAI configuration, including API key and max tokens settings. API key input is now properly enabled for OpenAI providers. Additionally, a method signature correction was made to `CanRunTool` to accept tool arguments, and the callback registration was fixed from a typo (`SetTookInvokeCallback` to `SetToolInvokeCallback`). * UI wizard and provider choice display * LLM manager provider constants and endpoint configuration * Endpoint data handling for OpenAI * Tool callback registration and signature **Generated by CodeLite** Signed-off-by: Eran Ifrah <eran@codelite.org>
1 parent bf6820c commit f60c990

File tree

6 files changed

+25
-9
lines changed

6 files changed

+25
-9
lines changed

Plugin/ai/LLMManager.cpp

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -611,7 +611,7 @@ void Manager::Stop()
611611

612612
static std::unordered_set<std::string> allowed_tools;
613613

614-
bool Manager::CanRunTool(const std::string& tool_name)
614+
bool Manager::CanRunTool(const std::string& tool_name, [[maybe_unused]] assistant::json args)
615615
{
616616
if (allowed_tools.contains(tool_name)) {
617617
return true;
@@ -669,7 +669,7 @@ void Manager::Start(std::shared_ptr<assistant::ClientBase> client)
669669
}
670670

671671
m_client->SetCachingPolicy(GetConfig().GetCachePolicy());
672-
m_client->SetTookInvokeCallback(&Manager::CanRunTool);
672+
m_client->SetToolInvokeCallback(&Manager::CanRunTool);
673673
m_client->ClearSystemMessages();
674674
m_client->AddSystemMessage(kSystemMessageRetryProtocol);
675675

@@ -897,6 +897,10 @@ void Manager::AddNewEndpoint(const llm::EndpointData& d)
897897
llm::json http_headers;
898898
http_headers["Host"] = "127.0.0.1";
899899
new_endpoint["http_headers"] = http_headers;
900+
} else if (d.client_type == kClientTypeOpenAI) {
901+
llm::json http_headers;
902+
http_headers["Authorization"] = "Bearer " + d.api_key.value_or("<INSERT_API_KEY>");
903+
new_endpoint["http_headers"] = http_headers;
900904
}
901905

902906
new_endpoint["type"] = d.client_type;

Plugin/ai/LLMManager.hpp

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -136,6 +136,7 @@ struct WXDLLIMPEXP_SDK ThreadTask {
136136
// Type of providers.
137137
constexpr const char* kClientTypeAnthropic = "anthropic";
138138
constexpr const char* kClientTypeOllama = "ollama";
139+
constexpr const char* kClientTypeOpenAI = "openai";
139140

140141
/**
141142
* @brief Singleton manager class for handling LLM (Large Language Model) operations.
@@ -706,7 +707,7 @@ class WXDLLIMPEXP_SDK Manager : public wxEvtHandler
706707
assistant::Config MakeConfig();
707708
void OnFileSaved(clCommandEvent& event);
708709
std::optional<llm::json> GetConfigAsJSON();
709-
static bool CanRunTool(const std::string& tool_name);
710+
static bool CanRunTool(const std::string& tool_name, assistant::json args);
710711

711712
std::unique_ptr<std::thread> m_worker_thread;
712713
std::shared_ptr<assistant::ClientBase> m_client;

Plugin/ai/NewLLMEndpointWizard.cpp

Lines changed: 14 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ namespace
88
const wxString kProviderOllamaLocal = "Ollama (Local)";
99
const wxString kProviderOllamaCloud = "Ollama (Cloud)";
1010
const wxString kProviderAnthropic = "Anthropic";
11+
const wxString kProviderOpenAI = "OpenAI";
1112

1213
bool IsValidURL(const std::string& url)
1314
{
@@ -17,7 +18,8 @@ bool IsValidURL(const std::string& url)
1718

1819
} // namespace
1920

20-
NewLLMEndpointWizard::NewLLMEndpointWizard(wxWindow* parent) : NewLLMEndpointWizardBase(parent)
21+
NewLLMEndpointWizard::NewLLMEndpointWizard(wxWindow* parent)
22+
: NewLLMEndpointWizardBase(parent)
2123
{
2224
int where = m_choiceProviders->FindString(kProviderOllamaLocal);
2325
if (where != wxNOT_FOUND) {
@@ -37,6 +39,8 @@ void NewLLMEndpointWizard::OnProviderChanged(wxCommandEvent& event)
3739
m_textCtrlBaseURL->ChangeValue("https://ollama.com");
3840
} else if (provider == kProviderOllamaLocal) {
3941
m_textCtrlBaseURL->ChangeValue("http://127.0.0.1:11434");
42+
} else if (provider == kProviderOpenAI) {
43+
m_textCtrlBaseURL->ChangeValue("https://api.openai.com");
4044
}
4145
}
4246

@@ -115,22 +119,28 @@ llm::EndpointData NewLLMEndpointWizard::GetData() const
115119
.model = m_textCtrlModel->GetValue().ToStdString(wxConvUTF8),
116120
.context_size = m_spinCtrlContextSizeKB->GetValue() * 1024,
117121
.api_key = m_textCtrlAPIKey->GetValue().ToStdString(wxConvUTF8)};
118-
} else {
122+
} else if (provider == kProviderAnthropic) {
119123
// Anthropic
120124
data = llm::EndpointData{.client_type = llm::kClientTypeAnthropic,
121125
.url = m_textCtrlBaseURL->GetValue().ToStdString(wxConvUTF8),
122126
.model = m_textCtrlModel->GetValue().ToStdString(wxConvUTF8),
123127
.api_key = m_textCtrlAPIKey->GetValue().ToStdString(wxConvUTF8),
124128
.max_tokens = m_spinCtrlMaxTokens->GetValue()};
129+
} else {
130+
// OpenAI
131+
data = llm::EndpointData{.client_type = llm::kClientTypeOpenAI,
132+
.url = m_textCtrlBaseURL->GetValue().ToStdString(wxConvUTF8),
133+
.model = m_textCtrlModel->GetValue().ToStdString(wxConvUTF8),
134+
.api_key = m_textCtrlAPIKey->GetValue().ToStdString(wxConvUTF8),
135+
.max_tokens = m_spinCtrlMaxTokens->GetValue()};
125136
}
126-
127137
return data;
128138
}
129139

130140
void NewLLMEndpointWizard::OnApiKeyUI(wxUpdateUIEvent& event)
131141
{
132142
wxString provider = m_choiceProviders->GetStringSelection();
133-
event.Enable(provider == kProviderAnthropic || provider == kProviderOllamaCloud);
143+
event.Enable(provider == kProviderAnthropic || provider == kProviderOllamaCloud || provider == kProviderOpenAI);
134144
}
135145

136146
void NewLLMEndpointWizard::OnMaxTokensUI(wxUpdateUIEvent& event)

Plugin/ai/UI.cpp

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -240,6 +240,7 @@ NewLLMEndpointWizardBase::NewLLMEndpointWizardBase(
240240
m_choiceProvidersArr.Add(_("Ollama (Local)"));
241241
m_choiceProvidersArr.Add(_("Ollama (Cloud)"));
242242
m_choiceProvidersArr.Add(_("Anthropic"));
243+
m_choiceProvidersArr.Add(_("OpenAI"));
243244
m_choiceProviders = new wxChoice(m_wizardPageSettings,
244245
wxID_ANY,
245246
wxDefaultPosition,

Plugin/ai/UI.wxcp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1269,7 +1269,7 @@
12691269
}, {
12701270
"type": "multi-string",
12711271
"m_label": "Choices:",
1272-
"m_value": "Ollama (Local);Ollama (Cloud);Anthropic"
1272+
"m_value": "Ollama (Local);Ollama (Cloud);Anthropic;OpenAI"
12731273
}, {
12741274
"type": "string",
12751275
"m_label": "Selection:",

0 commit comments

Comments
 (0)