Skip to content

Commit 37e2709

Browse files
authored
Merge pull request #6353 from ethereum/updatedesktop
fix desktop compilation errors
2 parents 58ea696 + ea03dc8 commit 37e2709

File tree

4 files changed

+6
-6
lines changed

4 files changed

+6
-6
lines changed

apps/remixdesktop/src/lib/InferenceServerManager.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -510,12 +510,12 @@ export class InferenceManager implements ICompletions {
510510
console.log('model not ready yet')
511511
return
512512
}
513-
params.chatHistory = params.provider === 'anthropic' ? buildChatPrompt(prompt) : []
513+
params.chatHistory = params.provider === 'anthropic' ? buildChatPrompt() : []
514514

515515
if (params.stream_result) {
516516
return this._streamInferenceRequest('answer', { prompt:userPrompt, ...params })
517517
} else {
518-
return this._makeInferenceRequest('answer', { prompt, ...params }, AIRequestType.GENERAL)
518+
return this._makeInferenceRequest('answer', { prompt: userPrompt, ...params }, AIRequestType.GENERAL)
519519
}
520520
}
521521

apps/remixdesktop/yarn.lock

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -74,9 +74,9 @@
7474
optionalDependencies:
7575
global-agent "^3.0.0"
7676

77-
"@electron/node-gyp@git+https://github.com/electron/node-gyp.git#06b29aafb7708acef8b3669835c8a7857ebc92d2":
77+
"@electron/node-gyp@https://github.com/electron/node-gyp#06b29aafb7708acef8b3669835c8a7857ebc92d2":
7878
version "10.2.0-electron.1"
79-
resolved "git+https://github.com/electron/node-gyp.git#06b29aafb7708acef8b3669835c8a7857ebc92d2"
79+
resolved "https://github.com/electron/node-gyp#06b29aafb7708acef8b3669835c8a7857ebc92d2"
8080
dependencies:
8181
env-paths "^2.2.0"
8282
exponential-backoff "^3.1.1"

libs/remix-ai-core/src/inferencers/local/ollama.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import axios from 'axios';
22

3-
const _paq = (window._paq = window._paq || [])
3+
const _paq = (typeof window !== 'undefined' && (window as any)._paq) ? (window as any)._paq : []
44

55
// default Ollama ports to check (11434 is the legacy/standard port)
66
const OLLAMA_PORTS = [11434, 11435, 11436];

libs/remix-ai-core/src/inferencers/local/ollamaInferencer.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ import {
1919
import axios from "axios";
2020
import { RemoteInferencer } from "../remote/remoteInference";
2121

22-
const _paq = (window._paq = window._paq || [])
22+
const _paq = (typeof window !== 'undefined' && (window as any)._paq) ? (window as any)._paq : []
2323
const defaultErrorMessage = `Unable to get a response from Ollama server`;
2424

2525
export class OllamaInferencer extends RemoteInferencer implements ICompletions, IGeneration {

0 commit comments

Comments
 (0)