@@ -227,7 +227,7 @@ export default {
227227 let response : AiTextGenerationOutput ;
228228 try {
229229 // @ts -expect-error broken bindings
230- response = await env . AI . run ( '@cf/meta/llama-3.1-8b -instruct' , { messages, max_tokens : 150 } ) ;
230+ response = await env . AI . run ( '@cf/meta/llama-3.1-70b -instruct' , { messages, max_tokens : 150 } ) ;
231231 } catch ( e ) {
232232 console . log ( e ) ;
233233 await bot . reply ( `Error: ${ e as string } ` ) ;
@@ -258,7 +258,7 @@ export default {
258258 let response : AiTextGenerationOutput ;
259259 try {
260260 // @ts -expect-error broken bindings
261- response = await env . AI . run ( '@cf/meta/llama-3.1-8b -instruct' , { messages, max_tokens : 100 } ) ;
261+ response = await env . AI . run ( '@cf/meta/llama-3.1-70b -instruct' , { messages, max_tokens : 100 } ) ;
262262 } catch ( e ) {
263263 console . log ( e ) ;
264264 await bot . reply ( `Error: ${ e as string } ` ) ;
@@ -292,7 +292,7 @@ export default {
292292 let response : AiTextGenerationOutput ;
293293 try {
294294 // @ts -expect-error broken bindings
295- response = await env . AI . run ( '@cf/meta/llama-3.1-8b -instruct' , { messages, max_tokens : 150 } ) ;
295+ response = await env . AI . run ( '@cf/meta/llama-3.1-70b -instruct' , { messages, max_tokens : 150 } ) ;
296296 } catch ( e ) {
297297 console . log ( e ) ;
298298 await bot . reply ( `Error: ${ e as string } ` ) ;
0 commit comments