Skip to content

Commit f4f2a9d

Browse files
committed
fix: cleanup ollama n8n
1 parent 11f5ab4 commit f4f2a9d

File tree

1 file changed

+7
-12
lines changed

1 file changed

+7
-12
lines changed

core/llm/llms/Ollama.ts

Lines changed: 7 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -440,15 +440,8 @@ class Ollama extends BaseLLM implements ModelInstaller {
440440
body: JSON.stringify(chatOptions),
441441
signal,
442442
});
443-
let _isThinking: boolean = false;
444-
function GetIsThinking(): boolean {
445-
return _isThinking;
446-
}
447-
function SetIsThinking(newValue: boolean): void {
448-
if (_isThinking !== newValue) {
449-
_isThinking = newValue;
450-
}
451-
}
443+
let isThinking: boolean = false;
444+
452445
function convertChatMessage(res: OllamaChatResponse): ChatMessage[] {
453446
if ("error" in res) {
454447
throw new Error(res.error);
@@ -458,18 +451,20 @@ class Ollama extends BaseLLM implements ModelInstaller {
458451
const { content } = res;
459452

460453
if (content === "<think>") {
461-
SetIsThinking(true);
454+
isThinking = true;
462455
}
463456

464-
if (GetIsThinking() && content) {
457+
if (isThinking && content) {
458+
// TODO better support for streaming thinking chunks, or remove this and depend on redux <think/> parsing logic
465459
const thinkingMessage: ThinkingChatMessage = {
466460
role: "thinking",
467461
content: content,
468462
};
469463

470464
if (thinkingMessage) {
465+
// could cause issues with termination if chunk doesn't match this exactly
471466
if (content === "</think>") {
472-
SetIsThinking(false);
467+
isThinking = false;
473468
}
474469
// When Streaming you can't have both thinking and content
475470
return [thinkingMessage];

0 commit comments

Comments
 (0)