Skip to content

Commit 09867e6

Browse files
committed
custom links for ollama
1 parent 235baf9 commit 09867e6

File tree

7 files changed

+294
-118
lines changed

7 files changed

+294
-118
lines changed

Canvas.html

Lines changed: 42 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -2010,11 +2010,25 @@ <h2 style="color: white; margin-bottom: 20px; text-align: center;">🏠 LM Studi
20102010
<h2 style="color: white; margin-bottom: 20px; text-align: center;">🦙 Ollama Connection</h2>
20112011
20122012
<div style="margin-bottom: 20px;">
2013-
<button id="connectOllama" class="ai-option-btn">
2014-
🔌 Connect to Ollama
2013+
<label style="color: white; display: block; margin-bottom: 8px; font-weight: 600;">Ollama Server URL:</label>
2014+
<input
2015+
type="text"
2016+
id="ollamaUrl"
2017+
value="http://localhost:11434"
2018+
placeholder="http://localhost:11434"
2019+
style="width: 100%; padding: 12px; border: 1px solid rgba(255,255,255,0.3);
2020+
border-radius: 8px; background: rgba(255,255,255,0.1); color: white;
2021+
font-size: 14px; margin-bottom: 10px;"
2022+
/>
2023+
<p style="color: rgba(255,255,255,0.7); font-size: 11px; margin-bottom: 15px;">
2024+
💡 Examples: http://localhost:11434, http://192.168.1.100:11434, https://my-ollama-server.com
2025+
</p>
2026+
2027+
<button id="connectOllama" class="ai-option-btn">
2028+
🔌 Connect to Ollama
20152029
</button>
20162030
<p style="color: rgba(255,255,255,0.8); font-size: 12px; margin-top: 5px;">
2017-
Connects to Ollama running on localhost:11434
2031+
Will connect to the URL specified above
20182032
</p>
20192033
</div>
20202034
@@ -2254,9 +2268,27 @@ <h2 style="color: white; margin-bottom: 20px; text-align: center;">🤖 Local Qw
22542268
updateStatus('🦙 Connecting to Ollama...');
22552269
document.getElementById('aiStatus').textContent = 'AI: Connecting to Ollama...';
22562270

2271+
// Get custom URL from input field, fallback to default
2272+
let ollamaUrl = 'http://localhost:11434';
2273+
const urlInput = document.getElementById('ollamaUrl');
2274+
if (urlInput && urlInput.value.trim()) {
2275+
ollamaUrl = urlInput.value.trim();
2276+
// Ensure URL doesn't end with slash
2277+
if (ollamaUrl.endsWith('/')) {
2278+
ollamaUrl = ollamaUrl.slice(0, -1);
2279+
}
2280+
// Validate URL format
2281+
try {
2282+
new URL(ollamaUrl);
2283+
} catch (e) {
2284+
throw new Error(`Invalid URL format: ${ollamaUrl}. Please use format like http://localhost:11434`);
2285+
}
2286+
}
2287+
2288+
console.log(`🔍 Attempting to connect to Ollama at: ${ollamaUrl}`);
2289+
22572290
// Test Ollama connection by fetching models
2258-
console.log('🔍 Testing Ollama connection...');
2259-
const testResponse = await fetch('http://localhost:11434/api/tags', {
2291+
const testResponse = await fetch(`${ollamaUrl}/api/tags`, {
22602292
method: 'GET',
22612293
headers: {
22622294
'Content-Type': 'application/json'
@@ -2266,7 +2298,7 @@ <h2 style="color: white; margin-bottom: 20px; text-align: center;">🤖 Local Qw
22662298

22672299
if (!testResponse.ok) {
22682300
console.error('❌ Ollama connection test failed:', testResponse.status, testResponse.statusText);
2269-
throw new Error(`Ollama connection failed: ${testResponse.status} ${testResponse.statusText}. Make sure Ollama is running on port 11434.`);
2301+
throw new Error(`Ollama connection failed: ${testResponse.status} ${testResponse.statusText}. Make sure Ollama is running at ${ollamaUrl}.`);
22702302
}
22712303

22722304
const modelsData = await testResponse.json();
@@ -2289,7 +2321,7 @@ <h2 style="color: white; margin-bottom: 20px; text-align: center;">🤖 Local Qw
22892321
selectedModel = modelsData.models?.[0]?.name || 'qwen2.5:0.5b';
22902322
}
22912323

2292-
aiSession = { type: 'ollama', model: selectedModel, baseURL: 'http://localhost:11434' };
2324+
aiSession = { type: 'ollama', model: selectedModel, baseURL: ollamaUrl };
22932325
document.getElementById('aiStatus').textContent = `AI: Ollama (${selectedModel}) ✅`;
22942326
document.getElementById('generateBtn').disabled = false;
22952327
document.getElementById('initAIBtn').textContent = `🦙 Ollama Ready`;
@@ -2298,8 +2330,8 @@ <h2 style="color: white; margin-bottom: 20px; text-align: center;">🤖 Local Qw
22982330
updateHeaderAIStatus();
22992331

23002332
console.log(`🦙 Ollama session initialized successfully with model: ${selectedModel}`);
2301-
updateStatus(`🦙 Ollama connected successfully!`);
2302-
addChatMessage('system', `🦙 Ollama connected! Using model: ${selectedModel}. Fully local processing with GGUF models - no API costs.`);
2333+
updateStatus(`🦙 Ollama connected successfully at ${ollamaUrl}!`);
2334+
addChatMessage('system', `🦙 Ollama connected at ${ollamaUrl}! Using model: ${selectedModel}. Fully local processing with GGUF models - no API costs.`);
23032335

23042336
// Track Ollama connection
23052337
if (window.AppConfig) {
@@ -2311,7 +2343,7 @@ <h2 style="color: white; margin-bottom: 20px; text-align: center;">🤖 Local Qw
23112343

23122344
let errorMessage = `❌ Failed to connect to Ollama: ${error.message}`;
23132345
if (error.message.includes('Failed to fetch')) {
2314-
errorMessage += `\n\n💡 Make sure Ollama is installed and running: \n1. Install Ollama from https://ollama.ai\n2. Run: ollama serve\n3. Pull a Qwen model: ollama pull qwen2.5:0.5b`;
2346+
errorMessage += `\n\n💡 Make sure Ollama is installed and running: \n1. Install Ollama from https://ollama.ai\n2. Run: ollama serve\n3. Pull a Qwen model: ollama pull qwen2.5:0.5b\n4. Ensure CORS is enabled with OLLAMA_ORIGINS`;
23152347
}
23162348

23172349
addChatMessage('system', errorMessage);

DeepResearch.html

Lines changed: 15 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
<head>
44
<meta charset="UTF-8" />
55
<!-- Load Common Meta Configuration -->
6-
<script src="lib/common-meta.js"></script>
6+
<script src="lib/common-meta.js?v=2025010302"></script>
77
<script>
88
document.write(`
99
<!-- Content Security Policy - Fallback for local development and non-Amplify deployments -->
@@ -518,8 +518,17 @@
518518
}
519519

520520
.ai-status.error {
521-
background: rgba(244, 67, 54, 0.2);
522-
border-color: rgba(244, 67, 54, 0.5);
521+
background: linear-gradient(135deg, rgba(255, 152, 0, 0.15) 0%, rgba(255, 193, 7, 0.1) 100%);
522+
border: 1px solid rgba(255, 152, 0, 0.3);
523+
color: rgba(255, 255, 255, 0.95);
524+
box-shadow: 0 2px 8px rgba(255, 152, 0, 0.1);
525+
}
526+
527+
.ai-status.error::before {
528+
content: '⚠️';
529+
font-size: 18px;
530+
margin-right: 8px;
531+
vertical-align: middle;
523532
}
524533

525534
.research-tabs {
@@ -1275,7 +1284,7 @@ <h3>✨ Features:</h3>
12751284
<!-- Scripts -->
12761285
<script src="config.js"></script>
12771286
<script src="lib/useragreement/agreement.js"></script>
1278-
<script src="lib/AIAssistant/AIAssistant-Backend.js"></script>
1287+
<script src="lib/AIAssistant/AIAssistant-Backend.js?v=2025010301"></script>
12791288

12801289
<!-- Vector Store Dependencies (CDN for development) -->
12811290
<!-- Load Transformers.js as ES module with proper handling -->
@@ -1296,8 +1305,9 @@ <h3>✨ Features:</h3>
12961305
// Configure for browser use
12971306
if (transformersModule.env) {
12981307
transformersModule.env.allowRemoteModels = true;
1299-
transformersModule.env.remoteURL = 'https://huggingface.co/';
13001308
transformersModule.env.allowLocalModels = false;
1309+
transformersModule.env.remoteURL = 'https://huggingface.co/';
1310+
transformersModule.env.localURL = null; // Disable local models
13011311
}
13021312

13031313
console.log('✅ Transformers.js configured and available globally');

Playground.html

Lines changed: 23 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
<head>
44
<meta charset="UTF-8" />
55
<!-- Load Common Meta Configuration -->
6-
<script src="lib/common-meta.js"></script>
6+
<script src="lib/common-meta.js?v=2025010302"></script>
77
<script>
88
document.write(`
99
<!-- Content Security Policy - Fallback for local development and non-Amplify deployments -->
@@ -30,6 +30,15 @@
3030
window.transformers = await import('https://cdn.jsdelivr.net/npm/@xenova/[email protected]');
3131
console.log('✅ Transformers.js loaded successfully');
3232

33+
// Configure Transformers.js environment for browser use
34+
if (window.transformers.env) {
35+
window.transformers.env.allowRemoteModels = true;
36+
window.transformers.env.allowLocalModels = false;
37+
window.transformers.env.remoteURL = 'https://huggingface.co/';
38+
window.transformers.env.localURL = null; // Disable local models
39+
console.log('🔧 Transformers.js configured for remote models only');
40+
}
41+
3342
// Signal that Transformers.js is ready
3443
window.dispatchEvent(new CustomEvent('transformersLoaded'));
3544
}
@@ -371,10 +380,18 @@
371380
}
372381

373382
.status-indicator.error {
374-
background: linear-gradient(45deg, #ff416c 0%, #ff4b2b 100%) !important;
375-
color: white !important;
376-
border: 1px solid rgba(255, 65, 108, 0.3) !important;
377-
box-shadow: 0 2px 8px rgba(255, 65, 108, 0.2);
383+
background: linear-gradient(135deg, rgba(255, 152, 0, 0.2) 0%, rgba(255, 193, 7, 0.15) 100%) !important;
384+
color: rgba(255, 255, 255, 0.95) !important;
385+
border: 1px solid rgba(255, 152, 0, 0.4) !important;
386+
box-shadow: 0 2px 8px rgba(255, 152, 0, 0.15);
387+
position: relative;
388+
}
389+
390+
.status-indicator.error::before {
391+
content: '⚠️';
392+
font-size: 16px;
393+
margin-right: 6px;
394+
vertical-align: middle;
378395
}
379396

380397
.status-indicator.info {
@@ -803,7 +820,7 @@ <h2>🚀 Welcome to AI Playground</h2>
803820
<script src="config.js"></script>
804821

805822
<!-- Load Dependencies -->
806-
<script src="lib/AIAssistant/AIAssistant-Backend.js"></script>
823+
<script src="lib/AIAssistant/AIAssistant-Backend.js?v=2025010301"></script>
807824
<script src="lib/Pages/DeepResearch/deepresearch.js"></script>
808825
<script src="lib/Pages/DeepResearch/vector-store.js"></script>
809826

0 commit comments

Comments
 (0)