@@ -3,11 +3,11 @@ import {
33 createDataStreamResponse ,
44 smoothStream ,
55 streamText ,
6+ wrapLanguageModel ,
67} from 'ai' ;
78
89import { auth } from '@/app/(auth)/auth' ;
9- import { customModel } from '@/lib/ai' ;
10- import { models } from '@/lib/ai/models' ;
10+ import { myProvider } from '@/lib/ai/models' ;
1111import { systemPrompt } from '@/lib/ai/prompts' ;
1212import {
1313 deleteChatById ,
@@ -48,8 +48,8 @@ export async function POST(request: Request) {
4848 const {
4949 id,
5050 messages,
51- modelId ,
52- } : { id : string ; messages : Array < Message > ; modelId : string } =
51+ selectedChatModel ,
52+ } : { id : string ; messages : Array < Message > ; selectedChatModel : string } =
5353 await request . json ( ) ;
5454
5555 const session = await auth ( ) ;
@@ -58,12 +58,6 @@ export async function POST(request: Request) {
5858 return new Response ( 'Unauthorized' , { status : 401 } ) ;
5959 }
6060
61- const model = models . find ( ( model ) => model . id === modelId ) ;
62-
63- if ( ! model ) {
64- return new Response ( 'Model not found' , { status : 404 } ) ;
65- }
66-
6761 const userMessage = getMostRecentUserMessage ( messages ) ;
6862
6963 if ( ! userMessage ) {
@@ -84,7 +78,7 @@ export async function POST(request: Request) {
8478 return createDataStreamResponse ( {
8579 execute : ( dataStream ) => {
8680 const result = streamText ( {
87- model : customModel ( model . apiIdentifier ) ,
81+ model : myProvider . languageModel ( selectedChatModel ) ,
8882 system : systemPrompt ,
8983 messages,
9084 maxSteps : 5 ,
@@ -93,32 +87,31 @@ export async function POST(request: Request) {
9387 experimental_generateMessageId : generateUUID ,
9488 tools : {
9589 getWeather,
96- createDocument : createDocument ( { session, dataStream, model } ) ,
97- updateDocument : updateDocument ( { session, dataStream, model } ) ,
90+ createDocument : createDocument ( { session, dataStream } ) ,
91+ updateDocument : updateDocument ( { session, dataStream } ) ,
9892 requestSuggestions : requestSuggestions ( {
9993 session,
10094 dataStream,
101- model,
10295 } ) ,
10396 } ,
104- onFinish : async ( { response } ) => {
97+ onFinish : async ( { response, reasoning } ) => {
10598 if ( session . user ?. id ) {
10699 try {
107- const responseMessagesWithoutIncompleteToolCalls =
108- sanitizeResponseMessages ( response . messages ) ;
100+ const sanitizedResponseMessages = sanitizeResponseMessages ( {
101+ messages : response . messages ,
102+ reasoning,
103+ } ) ;
109104
110105 await saveMessages ( {
111- messages : responseMessagesWithoutIncompleteToolCalls . map (
112- ( message ) => {
113- return {
114- id : message . id ,
115- chatId : id ,
116- role : message . role ,
117- content : message . content ,
118- createdAt : new Date ( ) ,
119- } ;
120- } ,
121- ) ,
106+ messages : sanitizedResponseMessages . map ( ( message ) => {
107+ return {
108+ id : message . id ,
109+ chatId : id ,
110+ role : message . role ,
111+ content : message . content ,
112+ createdAt : new Date ( ) ,
113+ } ;
114+ } ) ,
122115 } ) ;
123116 } catch ( error ) {
124117 console . error ( 'Failed to save chat' ) ;
@@ -131,7 +124,12 @@ export async function POST(request: Request) {
131124 } ,
132125 } ) ;
133126
134- result . mergeIntoDataStream ( dataStream ) ;
127+ result . mergeIntoDataStream ( dataStream , {
128+ sendReasoning : true ,
129+ } ) ;
130+ } ,
131+ onError : ( error ) => {
132+ return 'Oops, an error occured!' ;
135133 } ,
136134 } ) ;
137135}
0 commit comments