11"use client" ;
22
33import { ChatLayout } from "@/components/chat/chat-layout" ;
4+ import { getSelectedModel } from "@/lib/model-helper" ;
5+ import { ChatOllama } from "@langchain/community/chat_models/ollama" ;
6+ import { AIMessage , HumanMessage } from "@langchain/core/messages" ;
7+ import { BytesOutputParser } from "@langchain/core/output_parsers" ;
48import { ChatRequestOptions } from "ai" ;
5- import { useChat } from "ai/react" ;
6- import React from "react" ;
9+ import { Message , useChat } from "ai/react" ;
10+ import React , { useEffect } from "react" ;
711import { v4 as uuidv4 } from "uuid" ;
812
913export default function Page ( { params } : { params : { id : string } } ) {
@@ -16,9 +20,24 @@ export default function Page({ params }: { params: { id: string } }) {
1620 error,
1721 stop,
1822 setMessages,
23+ setInput
1924 } = useChat ( ) ;
2025 const [ chatId , setChatId ] = React . useState < string > ( "" ) ;
21- const [ selectedModel , setSelectedModel ] = React . useState < string > ( "mistral" ) ;
26+ const [ selectedModel , setSelectedModel ] = React . useState < string > (
27+ getSelectedModel ( )
28+ ) ;
29+ const [ ollama , setOllama ] = React . useState < ChatOllama > ( ) ;
30+ const env = process . env . NODE_ENV ;
31+
32+ useEffect ( ( ) => {
33+ if ( env === "production" ) {
34+ const newOllama = new ChatOllama ( {
35+ baseUrl : process . env . OLLAMA_URL || "http://localhost:11434" ,
36+ model : selectedModel ,
37+ } ) ;
38+ setOllama ( newOllama ) ;
39+ }
40+ } , [ selectedModel ] ) ;
2241
2342 React . useEffect ( ( ) => {
2443 if ( params . id ) {
@@ -29,6 +48,51 @@ export default function Page({ params }: { params: { id: string } }) {
2948 }
3049 } , [ setMessages ] ) ;
3150
51+ const addMessage = ( Message : any ) => {
52+ console . log ( "addMessage:" , Message ) ;
53+ messages . push ( Message ) ;
54+ window . dispatchEvent ( new Event ( "storage" ) ) ;
55+ setMessages ( [ ...messages ] ) ;
56+ } ;
57+
58+
59+ // Function to handle chatting with Ollama in production (client side)
60+ const handleSubmitProduction = async (
61+ e : React . FormEvent < HTMLFormElement >
62+ ) => {
63+ e . preventDefault ( ) ;
64+
65+ addMessage ( { role : "user" , content : input , id : chatId } ) ;
66+ setInput ( "" ) ;
67+
68+ if ( ollama ) {
69+ const parser = new BytesOutputParser ( ) ;
70+
71+ console . log ( messages ) ;
72+ const stream = await ollama
73+ . pipe ( parser )
74+ . stream (
75+ ( messages as Message [ ] ) . map ( ( m ) =>
76+ m . role == "user"
77+ ? new HumanMessage ( m . content )
78+ : new AIMessage ( m . content )
79+ )
80+ ) ;
81+
82+ const decoder = new TextDecoder ( ) ;
83+
84+ let responseMessage = "" ;
85+ for await ( const chunk of stream ) {
86+ const decodedChunk = decoder . decode ( chunk ) ;
87+ responseMessage += decodedChunk ;
88+ }
89+ setMessages ( [
90+ ...messages ,
91+ { role : "assistant" , content : responseMessage , id : chatId } ,
92+ ] ) ;
93+ }
94+ } ;
95+
3296 const onSubmit = ( e : React . FormEvent < HTMLFormElement > ) => {
3397 e . preventDefault ( ) ;
3498
@@ -43,8 +107,13 @@ export default function Page({ params }: { params: { id: string } }) {
43107 } ,
44108 } ;
45109
46- // Call the handleSubmit function with the options
47- handleSubmit ( e , requestOptions ) ;
110+ if ( env === "production" && selectedModel !== "REST API" ) {
111+ handleSubmitProduction ( e ) ;
112+ } else {
113+ // use the /api/chat route
114+ // Call the handleSubmit function with the options
115+ handleSubmit ( e , requestOptions ) ;
116+ }
48117 } ;
49118
50119 // When starting a new chat, append the messages to the local storage
0 commit comments