@@ -280,47 +280,46 @@ const app = new Hono();
280280// Existing post route...
281281// app.post('/notes', async (c) => { ... })
282282
283- app .get (" / " , async (c ) => {
284- const question = c .req .query (" text" ) || " What is the square root of 9?" ;
285-
286- const embeddings = await c .env .AI .run (" @cf/baai/bge-base-en-v1.5" , {
287- text : question,
288- });
289- const vectors = embeddings . data [ 0 ] ;
290-
291- const vectorQuery = await c . env . VECTOR_INDEX . query (vectors, { topK : 1 });
292- let vecId;
293- if (vectorQuery ? . matches ? . length ) {
294- vecId = vectorQuery .matches [ 0 ]. id ;
295- } else {
296- console . log ( " No matching vector found or vectorQuery.matches is empty " );
297- }
298-
299- let notes = [];
300- if ( vecId) {
301- const query = ` SELECT * FROM notes WHERE id = ? ` ;
302- const { results } = await c . env . DB . prepare (query). bind (vecId). all ();
303- if (results) notes = results . map (( vec ) => vec . text );
304- }
305-
306- const contextMessage = notes . length
307- ? ` Context: \n ${ notes . map (( note ) => ` - ${ note } ` ). join ( " \n " ) } `
308- : " " ;
309-
310- const systemPrompt = ` When answering the question or responding, use the context provided, if it is provided and relevant. ` ;
311-
312- const { response : answer } = await c . env . AI . run (
313- " @cf/meta/llama-3-8b-instruct " ,
314- {
315- messages : [
316- ... ( notes . length ? [ { role: " system " , content: contextMessage }] : []),
317- { role : " system " , content : systemPrompt },
318- { role : " user " , content : question },
319- ],
320- },
321- );
283+ app .get (' / ' , async (c ) => {
284+ const question = c .req .query (' text' ) || " What is the square root of 9?"
285+
286+ const embeddings = await c .env .AI .run (' @cf/baai/bge-base-en-v1.5' , { text : question })
287+ const vectors = embeddings . data [ 0 ]
288+
289+ const vectorQuery = await c . env . VECTOR_INDEX . query (vectors, { topK : 1 }) ;
290+ let vecId;
291+ if ( vectorQuery . matches && vectorQuery . matches . length > 0 && vectorQuery . matches [ 0 ]) {
292+ vecId = vectorQuery . matches [ 0 ]. id ;
293+ } else {
294+ console . log ( " No matching vector found or vectorQuery.matches is empty " ) ;
295+ }
296+
297+ let notes = []
298+ if (vecId) {
299+ const query = ` SELECT * FROM notes WHERE id = ? `
300+ const { results } = await c . env . DB . prepare (query). bind ( vecId). all ()
301+ if (results) notes = results . map ( vec => vec . text )
302+ }
303+
304+ const contextMessage = notes . length
305+ ? ` Context: \n ${ notes . map ( note => ` - ${ note } ` ). join ( " \n " ) } `
306+ : " "
307+
308+ const systemPrompt = ` When answering the question or responding, use the context provided, if it is provided and relevant. `
309+
310+ const { response : answer } = await c . env . AI . run (
311+ ' @cf/meta/llama-3-8b-instruct ' ,
312+ {
313+ messages : [
314+ ... ( notes . length ? [{ role : ' system ' , content : contextMessage }] : []),
315+ { role : ' system ' , content : systemPrompt },
316+ { role: ' user ' , content: question }
317+ ]
318+ }
319+ )
320+
321+ return c . text (answer );
322322
323- return c .text (answer);
324323});
325324
326325app .onError ((err , c ) => {
0 commit comments