Skip to content

Commit 49c7129

Browse files
authored
fix: ollama and lm studio url issue fix for docker and build (#1008)
* fix: ollama and lm studio url issue fix for docker and build * vite config fix
1 parent 3ecac25 commit 49c7129

File tree

4 files changed

+63
-14
lines changed

4 files changed

+63
-14
lines changed

Dockerfile

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -45,13 +45,14 @@ ENV WRANGLER_SEND_METRICS=false \
4545
TOGETHER_API_BASE_URL=${TOGETHER_API_BASE_URL} \
4646
AWS_BEDROCK_CONFIG=${AWS_BEDROCK_CONFIG} \
4747
VITE_LOG_LEVEL=${VITE_LOG_LEVEL} \
48-
DEFAULT_NUM_CTX=${DEFAULT_NUM_CTX}
48+
DEFAULT_NUM_CTX=${DEFAULT_NUM_CTX}\
49+
RUNNING_IN_DOCKER=true
4950

5051
# Pre-configure wrangler to disable metrics
5152
RUN mkdir -p /root/.config/.wrangler && \
5253
echo '{"enabled":false}' > /root/.config/.wrangler/metrics.json
5354

54-
RUN npm run build
55+
RUN pnpm run build
5556

5657
CMD [ "pnpm", "run", "dockerstart"]
5758

@@ -84,7 +85,8 @@ ENV GROQ_API_KEY=${GROQ_API_KEY} \
8485
TOGETHER_API_BASE_URL=${TOGETHER_API_BASE_URL} \
8586
AWS_BEDROCK_CONFIG=${AWS_BEDROCK_CONFIG} \
8687
VITE_LOG_LEVEL=${VITE_LOG_LEVEL} \
87-
DEFAULT_NUM_CTX=${DEFAULT_NUM_CTX}
88+
DEFAULT_NUM_CTX=${DEFAULT_NUM_CTX}\
89+
RUNNING_IN_DOCKER=true
8890

8991
RUN mkdir -p ${WORKDIR}/run
9092
CMD pnpm run dev --host

app/lib/modules/llm/providers/lmstudio.ts

Lines changed: 30 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ import type { ModelInfo } from '~/lib/modules/llm/types';
33
import type { IProviderSetting } from '~/types/model';
44
import { createOpenAI } from '@ai-sdk/openai';
55
import type { LanguageModelV1 } from 'ai';
6+
import { logger } from '~/utils/logger';
67

78
export default class LMStudioProvider extends BaseProvider {
89
name = 'LMStudio';
@@ -22,7 +23,7 @@ export default class LMStudioProvider extends BaseProvider {
2223
settings?: IProviderSetting,
2324
serverEnv: Record<string, string> = {},
2425
): Promise<ModelInfo[]> {
25-
const { baseUrl } = this.getProviderBaseUrlAndKey({
26+
let { baseUrl } = this.getProviderBaseUrlAndKey({
2627
apiKeys,
2728
providerSettings: settings,
2829
serverEnv,
@@ -31,7 +32,18 @@ export default class LMStudioProvider extends BaseProvider {
3132
});
3233

3334
if (!baseUrl) {
34-
return [];
35+
throw new Error('No baseUrl found for LMStudio provider');
36+
}
37+
38+
if (typeof window === 'undefined') {
39+
/*
40+
* Running in Server
41+
* Backend: Check if we're running in Docker
42+
*/
43+
const isDocker = process.env.RUNNING_IN_DOCKER === 'true';
44+
45+
baseUrl = isDocker ? baseUrl.replace('localhost', 'host.docker.internal') : baseUrl;
46+
baseUrl = isDocker ? baseUrl.replace('127.0.0.1', 'host.docker.internal') : baseUrl;
3547
}
3648

3749
const response = await fetch(`${baseUrl}/v1/models`);
@@ -51,13 +63,26 @@ export default class LMStudioProvider extends BaseProvider {
5163
providerSettings?: Record<string, IProviderSetting>;
5264
}) => LanguageModelV1 = (options) => {
5365
const { apiKeys, providerSettings, serverEnv, model } = options;
54-
const { baseUrl } = this.getProviderBaseUrlAndKey({
66+
let { baseUrl } = this.getProviderBaseUrlAndKey({
5567
apiKeys,
56-
providerSettings,
68+
providerSettings: providerSettings?.[this.name],
5769
serverEnv: serverEnv as any,
58-
defaultBaseUrlKey: 'OLLAMA_API_BASE_URL',
70+
defaultBaseUrlKey: 'LMSTUDIO_API_BASE_URL',
5971
defaultApiTokenKey: '',
6072
});
73+
74+
if (!baseUrl) {
75+
throw new Error('No baseUrl found for LMStudio provider');
76+
}
77+
78+
if (typeof window === 'undefined') {
79+
const isDocker = process.env.RUNNING_IN_DOCKER === 'true';
80+
baseUrl = isDocker ? baseUrl.replace('localhost', 'host.docker.internal') : baseUrl;
81+
baseUrl = isDocker ? baseUrl.replace('127.0.0.1', 'host.docker.internal') : baseUrl;
82+
}
83+
84+
logger.debug('LMStudio Base Url used: ', baseUrl);
85+
6186
const lmstudio = createOpenAI({
6287
baseUrl: `${baseUrl}/v1`,
6388
apiKey: '',

app/lib/modules/llm/providers/ollama.ts

Lines changed: 21 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ import type { ModelInfo } from '~/lib/modules/llm/types';
33
import type { IProviderSetting } from '~/types/model';
44
import type { LanguageModelV1 } from 'ai';
55
import { ollama } from 'ollama-ai-provider';
6+
import { logger } from '~/utils/logger';
67

78
interface OllamaModelDetails {
89
parent_model: string;
@@ -45,7 +46,7 @@ export default class OllamaProvider extends BaseProvider {
4546
settings?: IProviderSetting,
4647
serverEnv: Record<string, string> = {},
4748
): Promise<ModelInfo[]> {
48-
const { baseUrl } = this.getProviderBaseUrlAndKey({
49+
let { baseUrl } = this.getProviderBaseUrlAndKey({
4950
apiKeys,
5051
providerSettings: settings,
5152
serverEnv,
@@ -54,7 +55,18 @@ export default class OllamaProvider extends BaseProvider {
5455
});
5556

5657
if (!baseUrl) {
57-
return [];
58+
throw new Error('No baseUrl found for OLLAMA provider');
59+
}
60+
61+
if (typeof window === 'undefined') {
62+
/*
63+
* Running in Server
64+
* Backend: Check if we're running in Docker
65+
*/
66+
const isDocker = process.env.RUNNING_IN_DOCKER === 'true';
67+
68+
baseUrl = isDocker ? baseUrl.replace('localhost', 'host.docker.internal') : baseUrl;
69+
baseUrl = isDocker ? baseUrl.replace('127.0.0.1', 'host.docker.internal') : baseUrl;
5870
}
5971

6072
const response = await fetch(`${baseUrl}/api/tags`);
@@ -78,18 +90,23 @@ export default class OllamaProvider extends BaseProvider {
7890
const { apiKeys, providerSettings, serverEnv, model } = options;
7991
let { baseUrl } = this.getProviderBaseUrlAndKey({
8092
apiKeys,
81-
providerSettings,
93+
providerSettings: providerSettings?.[this.name],
8294
serverEnv: serverEnv as any,
8395
defaultBaseUrlKey: 'OLLAMA_API_BASE_URL',
8496
defaultApiTokenKey: '',
8597
});
8698

8799
// Backend: Check if we're running in Docker
88-
const isDocker = process.env.RUNNING_IN_DOCKER === 'true';
100+
if (!baseUrl) {
101+
throw new Error('No baseUrl found for OLLAMA provider');
102+
}
89103

104+
const isDocker = process.env.RUNNING_IN_DOCKER === 'true';
90105
baseUrl = isDocker ? baseUrl.replace('localhost', 'host.docker.internal') : baseUrl;
91106
baseUrl = isDocker ? baseUrl.replace('127.0.0.1', 'host.docker.internal') : baseUrl;
92107

108+
logger.debug('Ollama Base Url used: ', baseUrl);
109+
93110
const ollamaInstance = ollama(model, {
94111
numCtx: DEFAULT_NUM_CTX,
95112
}) as LanguageModelV1 & { config: any };

vite.config.ts

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,11 @@ import { defineConfig, type ViteDevServer } from 'vite';
44
import { nodePolyfills } from 'vite-plugin-node-polyfills';
55
import { optimizeCssModules } from 'vite-plugin-optimize-css-modules';
66
import tsconfigPaths from 'vite-tsconfig-paths';
7-
7+
import * as dotenv from 'dotenv';
88
import { execSync } from 'child_process';
99

10+
dotenv.config();
11+
1012
// Get git hash with fallback
1113
const getGitHash = () => {
1214
try {
@@ -17,18 +19,21 @@ const getGitHash = () => {
1719
};
1820

1921

22+
23+
2024
export default defineConfig((config) => {
2125
return {
2226
define: {
2327
__COMMIT_HASH: JSON.stringify(getGitHash()),
2428
__APP_VERSION: JSON.stringify(process.env.npm_package_version),
29+
// 'process.env': JSON.stringify(process.env)
2530
},
2631
build: {
2732
target: 'esnext',
2833
},
2934
plugins: [
3035
nodePolyfills({
31-
include: ['path', 'buffer'],
36+
include: ['path', 'buffer', 'process'],
3237
}),
3338
config.mode !== 'test' && remixCloudflareDevProxy(),
3439
remixVitePlugin({

0 commit comments

Comments
 (0)