1- import {
2- streamText ,
3- convertToModelMessages ,
4- stepCountIs ,
5- } from 'ai' ;
1+ import { streamText , convertToModelMessages , stepCountIs } from 'ai' ;
62import { withSupermemory } from '@supermemory/tools/ai-sdk' ;
73import { auth , type UserType } from '@/app/(auth)/auth' ;
84import { type RequestHints , systemPrompt } from '@/lib/ai/prompts' ;
@@ -38,7 +34,8 @@ export async function POST(request: Request) {
3834 }
3935
4036 try {
41- const { id, message, selectedChatModel, selectedVisibilityType } = requestBody ;
37+ const { id, message, selectedChatModel, selectedVisibilityType } =
38+ requestBody ;
4239
4340 const session = await auth ( ) ;
4441 if ( ! session ?. user ) {
@@ -74,9 +71,10 @@ export async function POST(request: Request) {
7471 // Convert DB messages to AI SDK v5 format (using parts array)
7572 const formattedPreviousMessages = previousMessages . map ( ( dbMsg : any ) => {
7673 // Ensure parts array exists, or create one from content if needed
77- const parts = Array . isArray ( dbMsg . parts ) && dbMsg . parts . length > 0
78- ? dbMsg . parts
79- : [ { type : 'text' , text : dbMsg . content || '' } ] ;
74+ const parts =
75+ Array . isArray ( dbMsg . parts ) && dbMsg . parts . length > 0
76+ ? dbMsg . parts
77+ : [ { type : 'text' , text : dbMsg . content || '' } ] ;
8078
8179 return {
8280 id : dbMsg . id ,
@@ -95,10 +93,7 @@ export async function POST(request: Request) {
9593 } ;
9694
9795 // Append current message to previous messages
98- const messages = [
99- ...formattedPreviousMessages ,
100- formattedCurrentMessage ,
101- ] ;
96+ const messages = [ ...formattedPreviousMessages , formattedCurrentMessage ] ;
10297
10398 const { longitude, latitude, city, country } = geolocation ( request ) ;
10499 const requestHints : RequestHints = {
@@ -115,7 +110,8 @@ export async function POST(request: Request) {
115110 id : message . id ,
116111 role : 'user' ,
117112 parts : message . parts ,
118- attachments : message . parts ?. filter ( ( part : any ) => part . type === 'file' ) ?? [ ] ,
113+ attachments :
114+ message . parts ?. filter ( ( part : any ) => part . type === 'file' ) ?? [ ] ,
119115 createdAt : new Date ( ) ,
120116 } ,
121117 ] ,
@@ -127,13 +123,19 @@ export async function POST(request: Request) {
127123 // Get the API key for supermemory tools
128124 const supermemoryApiKey = process . env . SUPERMEMORY_API_KEY ;
129125 if ( ! supermemoryApiKey ) {
130- return new ChatSDKError ( 'bad_request:api' , 'SUPERMEMORY_API_KEY is not configured' ) . toResponse ( ) ;
126+ return new ChatSDKError (
127+ 'bad_request:api' ,
128+ 'SUPERMEMORY_API_KEY is not configured' ,
129+ ) . toResponse ( ) ;
131130 }
132131
133132 // Get the API key for Exa tools
134133 const exaApiKey = process . env . EXA_API_KEY ;
135134 if ( ! exaApiKey ) {
136- return new ChatSDKError ( 'bad_request:api' , 'EXA_API_KEY is not configured' ) . toResponse ( ) ;
135+ return new ChatSDKError (
136+ 'bad_request:api' ,
137+ 'EXA_API_KEY is not configured' ,
138+ ) . toResponse ( ) ;
137139 }
138140
139141 // Always use user ID as container tag
@@ -145,25 +147,30 @@ export async function POST(request: Request) {
145147 const webSearchTool = createWebSearchTool ( exaApiKey ) ;
146148
147149 // Wrap the language model with supermemory
148- const baseModel = myProvider ( session . user . id ) . languageModel ( selectedChatModel ) ;
150+ const baseModel = myProvider ( session . user . id ) . languageModel (
151+ selectedChatModel ,
152+ ) ;
149153 const modelWithMemory = withSupermemory ( baseModel , containerTag , {
150154 conversationId : id ,
151- mode : " full" ,
155+ mode : ' full' ,
152156 verbose : true ,
153- addMemory : " always"
157+ addMemory : ' always' ,
154158 } ) ;
155159
156160 const toolsConfig = {
157161 searchMemories : memoryTools . searchMemories ,
158162 webSearch : webSearchTool ,
159163 } ;
160-
164+
161165 // Log what messages we're sending to AI SDK
162166 const convertedMessages = convertToModelMessages ( messages as any ) ;
163167 convertedMessages . forEach ( ( msg , idx ) => {
164168 console . log ( `[Chat API] Message ${ idx } :` , {
165169 role : msg . role ,
166- content : typeof msg . content === 'string' ? msg . content : JSON . stringify ( msg . content )
170+ content :
171+ typeof msg . content === 'string'
172+ ? msg . content
173+ : JSON . stringify ( msg . content ) ,
167174 } ) ;
168175 } ) ;
169176
@@ -178,20 +185,25 @@ export async function POST(request: Request) {
178185 if ( session . user ?. id ) {
179186 try {
180187 // Check if the response contains split delimiter
181- const splitMessages = text . split ( '<SPLIT>' ) . map ( t => t . trim ( ) ) . filter ( t => t . length > 0 ) ;
182-
188+ const splitMessages = text
189+ . split ( '<SPLIT>' )
190+ . map ( ( t ) => t . trim ( ) )
191+ . filter ( ( t ) => t . length > 0 ) ;
192+
183193 // If there are multiple messages, save them separately with small time delays
184194 if ( splitMessages . length > 1 ) {
185- const messagesToSave = splitMessages . map ( ( messageText , index ) => ( {
186- id : generateUUID ( ) ,
187- chatId : id ,
188- role : 'assistant' as const ,
189- parts : [ { type : 'text' as const , text : messageText } ] ,
190- attachments : [ ] ,
191- // Add small time increments to ensure correct ordering
192- createdAt : new Date ( Date . now ( ) + index * 100 ) ,
193- } ) ) ;
194-
195+ const messagesToSave = splitMessages . map (
196+ ( messageText , index ) => ( {
197+ id : generateUUID ( ) ,
198+ chatId : id ,
199+ role : 'assistant' as const ,
200+ parts : [ { type : 'text' as const , text : messageText } ] ,
201+ attachments : [ ] ,
202+ // Add small time increments to ensure correct ordering
203+ createdAt : new Date ( Date . now ( ) + index * 100 ) ,
204+ } ) ,
205+ ) ;
206+
195207 await saveMessages ( { messages : messagesToSave } ) ;
196208 } else {
197209 // Single message, save as before
@@ -217,7 +229,7 @@ export async function POST(request: Request) {
217229 experimental_telemetry : {
218230 isEnabled : isProductionEnvironment ,
219231 functionId : 'stream-text' ,
220- }
232+ } ,
221233 } ) ;
222234
223235 return result . toUIMessageStreamResponse ( ) ;
@@ -254,4 +266,4 @@ export async function DELETE(request: Request) {
254266
255267 const deletedChat = await deleteChatById ( { id } ) ;
256268 return Response . json ( deletedChat , { status : 200 } ) ;
257- }
269+ }
0 commit comments