Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -77,4 +77,5 @@ apps/remixdesktop/circom-download
apps/remixdesktop/log_input_signals.txt
apps/remixdesktop/log_input_signals_new.txt
logs
apps/remix-ide-e2e/src/extensions/chrome/metamask
apps/remix-ide-e2e/src/extensions/chrome/metamask
apps/remix-ide-e2e/tmp/
5 changes: 3 additions & 2 deletions apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import * as packageJson from '../../../../../package.json'
import { Plugin } from '@remixproject/engine';
import { IModel, RemoteInferencer, IRemoteModel, IParams, GenerationParams, AssistantParams, CodeExplainAgent, SecurityAgent, CompletionParams, OllamaInferencer, isOllamaAvailable, getBestAvailableModel } from '@remix/remix-ai-core';
import { IModel, RemoteInferencer, IRemoteModel, IParams, GenerationParams, AssistantParams, CodeExplainAgent, SecurityAgent, CompletionParams, OllamaInferencer, isOllamaAvailable, getBestAvailableModel, resetOllamaHostOnSettingsChange } from '@remix/remix-ai-core';
import { CodeCompletionAgent, ContractAgent, workspaceAgent, IContextType } from '@remix/remix-ai-core';
import axios from 'axios';
import { endpointUrls } from "@remix-endpoints-helper"
Expand Down Expand Up @@ -53,6 +53,8 @@ export class RemixAIPlugin extends Plugin {
}

onActivation(): void {
// Expose Ollama reset function globally for settings integration
resetOllamaHostOnSettingsChange();

if (this.isOnDesktop) {
this.useRemoteInferencer = true
Expand Down Expand Up @@ -419,7 +421,6 @@ export class RemixAIPlugin extends Plugin {
this.isInferencing = false
})

console.log(`Ollama model changed to: ${modelName}`)
} catch (error) {
console.error('Failed to set Ollama model:', error)
}
Expand Down
5 changes: 4 additions & 1 deletion apps/remix-ide/src/app/tabs/locales/en/settings.json
Original file line number Diff line number Diff line change
Expand Up @@ -65,5 +65,8 @@
"settings.aiCopilotDescription": "AI Copilot assists with code suggestions and improvements.",
"settings.aiPrivacyPolicy": "AI Privacy & Data Usage",
"settings.viewPrivacyPolicy": "View Privacy Policy",
"settings.aiPrivacyPolicyDescription": "Understand how AI processes your data."
"settings.aiPrivacyPolicyDescription": "Understand how AI processes your data.",
"settings.ollamaConfig": "Ollama URL Configuration",
"settings.ollamaConfigDescription": "Configure Ollama endpoint for local AI model integration",
"settings.ollama-endpoint": "ENDPOINT URL"
}
4 changes: 2 additions & 2 deletions libs/remix-ai-core/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,15 +7,15 @@ import { DefaultModels, InsertionParams, CompletionParams, GenerationParams, Ass
import { buildChatPrompt } from './prompts/promptBuilder'
import { RemoteInferencer } from './inferencers/remote/remoteInference'
import { OllamaInferencer } from './inferencers/local/ollamaInferencer'
import { isOllamaAvailable, getBestAvailableModel, listModels, discoverOllamaHost } from './inferencers/local/ollama'
import { isOllamaAvailable, getBestAvailableModel, listModels, discoverOllamaHost, resetOllamaHostOnSettingsChange } from './inferencers/local/ollama'
import { FIMModelManager, FIMModelConfig, FIM_MODEL_CONFIGS } from './inferencers/local/fimModelConfig'
import { ChatHistory } from './prompts/chat'
import { downloadLatestReleaseExecutable } from './helpers/inferenceServerReleases'
import { ChatCommandParser } from './helpers/chatCommandParser'
export {
IModel, IModelResponse, ChatCommandParser,
ModelType, DefaultModels, ICompletions, IParams, IRemoteModel, buildChatPrompt,
RemoteInferencer, OllamaInferencer, isOllamaAvailable, getBestAvailableModel, listModels, discoverOllamaHost,
RemoteInferencer, OllamaInferencer, isOllamaAvailable, getBestAvailableModel, listModels, discoverOllamaHost, resetOllamaHostOnSettingsChange,
FIMModelManager, FIMModelConfig, FIM_MODEL_CONFIGS,
InsertionParams, CompletionParams, GenerationParams, AssistantParams,
ChatEntry, AIRequestType, ChatHistory, downloadLatestReleaseExecutable
Expand Down
41 changes: 41 additions & 0 deletions libs/remix-ai-core/src/inferencers/local/ollama.ts
Original file line number Diff line number Diff line change
@@ -1,19 +1,54 @@
import axios from 'axios';
import { Registry } from '@remix-project/remix-lib';

const _paq = (typeof window !== 'undefined' && (window as any)._paq) ? (window as any)._paq : []

// default Ollama ports to check (11434 is the legacy/standard port)
const OLLAMA_PORTS = [11434, 11435, 11436];
const OLLAMA_BASE_HOST = 'http://localhost';
const DEFAULT_OLLAMA_HOST = 'http://localhost:11434';

let discoveredOllamaHost: string | null = null;

function getConfiguredOllamaEndpoint(): string | null {
try {
const config = Registry.getInstance().get('config').api;
const configuredEndpoint = config.get('settings/ollama-endpoint');
if (configuredEndpoint && configuredEndpoint !== DEFAULT_OLLAMA_HOST) {
_paq.push(['trackEvent', 'ai', 'remixAI', 'ollama_using_configured_endpoint', configuredEndpoint]);
return configuredEndpoint;
}
} catch (error) {
_paq.push(['trackEvent', 'ai', 'remixAI', 'ollama_config_access_failed', error.message || 'unknown']);
}
return null;
}

export async function discoverOllamaHost(): Promise<string | null> {
if (discoveredOllamaHost) {
_paq.push(['trackEvent', 'ai', 'remixAI', 'ollama_host_cache_hit', discoveredOllamaHost]);
return discoveredOllamaHost;
}

// First, try to use the configured endpoint from settings
const configuredEndpoint = getConfiguredOllamaEndpoint();
if (configuredEndpoint) {
try {
const res = await axios.get(`${configuredEndpoint}/api/tags`, { timeout: 2000 });
if (res.status === 200) {
discoveredOllamaHost = configuredEndpoint;
_paq.push(['trackEvent', 'ai', 'remixAI', 'ollama_configured_endpoint_success', configuredEndpoint]);
return configuredEndpoint;
}
return null;
} catch (error) {
_paq.push(['trackEvent', 'ai', 'remixAI', 'ollama_configured_endpoint_failed', `${configuredEndpoint}:${error.message || 'unknown'}`]);
// Fall back to discovery if configured endpoint fails
return null;
}
}

// Fall back to port discovery if no configured endpoint
for (const port of OLLAMA_PORTS) {
const host = `${OLLAMA_BASE_HOST}:${port}`;
_paq.push(['trackEvent', 'ai', 'remixAI', 'ollama_port_check', `${port}`]);
Expand Down Expand Up @@ -66,6 +101,12 @@ export function resetOllamaHost(): void {
discoveredOllamaHost = null;
}

export function resetOllamaHostOnSettingsChange(): void {
// This function should be called when Ollama settings are updated
resetOllamaHost();
_paq.push(['trackEvent', 'ai', 'remixAI', 'ollama_reset_on_settings_change']);
}

export async function pullModel(modelName: string): Promise<void> {
// in case the user wants to pull a model from registry
_paq.push(['trackEvent', 'ai', 'remixAI', 'ollama_pull_model_start', modelName]);
Expand Down
10 changes: 10 additions & 0 deletions libs/remix-ui/settings/src/lib/remix-ui-settings.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -139,6 +139,16 @@ const settingsSections: SettingsSection[] = [
action: 'link',
link: 'https://remix-ide.readthedocs.io/en/latest/ai.html'
}
},
{
name: 'ollama-config',
label: 'settings.ollamaConfig',
description: 'settings.ollamaConfigDescription',
type: 'toggle',
toggleUIOptions: [{
name: 'ollama-endpoint',
type: 'text'
}]
}]
}
]},
Expand Down
26 changes: 25 additions & 1 deletion libs/remix-ui/settings/src/lib/settingsReducer.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { Registry } from '@remix-project/remix-lib'
import { SettingsActions, SettingsState } from '../types'

import { resetOllamaHostOnSettingsChange } from '@remix/remix-ai-core';
const config = Registry.getInstance().get('config').api
const settingsConfig = Registry.getInstance().get('settingsConfig').api
const defaultTheme = config.get('settings/theme') ? settingsConfig.themes.find((theme) => theme.name.toLowerCase() === config.get('settings/theme').toLowerCase()) : settingsConfig.themes[0]
Expand All @@ -17,12 +17,14 @@ const swarmPrivateBeeAddress = config.get('settings/swarm-private-bee-address')
const swarmPostageStampId = config.get('settings/swarm-postage-stamp-id') || ''
const sindriAccessToken = config.get('settings/sindri-access-token') || ''
const etherscanAccessToken = config.get('settings/etherscan-access-token') || ''
const ollamaEndpoint = config.get('settings/ollama-endpoint') || 'http://localhost:11434'

let githubConfig = config.get('settings/github-config') || false
let ipfsConfig = config.get('settings/ipfs-config') || false
let swarmConfig = config.get('settings/swarm-config') || false
let sindriConfig = config.get('settings/sindri-config') || false
let etherscanConfig = config.get('settings/etherscan-config') || false
let ollamaConfig = config.get('settings/ollama-config') || false
let generateContractMetadata = config.get('settings/generate-contract-metadata')
let autoCompletion = config.get('settings/auto-completion')
let showGas = config.get('settings/show-gas')
Expand All @@ -49,6 +51,10 @@ if (!etherscanConfig && etherscanAccessToken) {
config.set('settings/etherscan-config', true)
etherscanConfig = true
}
if (!ollamaConfig && ollamaEndpoint !== 'http://localhost:11434') {
config.set('settings/ollama-config', true)
ollamaConfig = true
}
if (typeof generateContractMetadata !== 'boolean') {
config.set('settings/generate-contract-metadata', true)
generateContractMetadata = true
Expand Down Expand Up @@ -191,6 +197,14 @@ export const initialState: SettingsState = {
value: '',
isLoading: false
},
'ollama-config': {
value: ollamaConfig,
isLoading: false
},
'ollama-endpoint': {
value: ollamaEndpoint,
isLoading: false
},
toaster: {
value: '',
isLoading: false
Expand All @@ -201,6 +215,16 @@ export const settingReducer = (state: SettingsState, action: SettingsActions): S
switch (action.type) {
case 'SET_VALUE':
config.set('settings/' + action.payload.name, action.payload.value)

// Reset Ollama host cache when endpoint is changed
if (action.payload.name === 'ollama-endpoint') {
try {
resetOllamaHostOnSettingsChange();
} catch (error) {
// Ignore errors - Ollama functionality is optional
}
}

return { ...state, [action.payload.name]: { ...state[action.payload.name], value: action.payload.value, isLoading: false } }
case 'SET_LOADING':
return { ...state, [action.payload.name]: { ...state[action.payload.name], isLoading: true } }
Expand Down
2 changes: 2 additions & 0 deletions libs/remix-ui/settings/src/types/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,8 @@ export interface SettingsState {
'sindri-access-token': ConfigState,
'etherscan-access-token': ConfigState,
'ai-privacy-policy': ConfigState,
'ollama-config': ConfigState,
'ollama-endpoint': ConfigState,
toaster: ConfigState
}
export interface SettingsActionPayloadTypes {
Expand Down