@@ -24,79 +24,81 @@ interface Environment {
2424
2525export default {
2626 fetch : async ( request : Request , env : Environment ) => {
27- const bot = new TelegramBot ( env . SECRET_TELEGRAM_API_TOKEN ) ;
28- await bot
29- . on ( 'default' , async function ( ) {
30- switch ( bot . update_type ) {
31- case 'message' :
32- const messages = [
33- { role : 'system' , content : 'You are a friendly assistant' } ,
34- {
35- role : 'user' ,
36- content : bot . update . message ?. text ?. toString ( ) ?? '' ,
37- } ,
38- ] ;
39- const response = await env . AI . run ( '@cf/meta/llama-3-8b-instruct' , { messages } ) ;
40- if ( 'response' in response ) {
41- await bot . reply ( response . response ?? '' ) ;
42- }
43- break ;
44- case 'inline' :
45- const inline_messages = [
46- { role : 'system' , content : 'You are a friendly assistant' } ,
47- {
48- role : 'user' ,
49- content : bot . update . inline_query ?. query . toString ( ) ?? '' ,
50- } ,
51- ] ;
52- const inline_response = await env . AI . run ( '@cf/meta/llama-3-8b-instruct' , { messages : inline_messages , max_tokens : 50 } ) ;
53- if ( 'response' in inline_response ) {
54- await bot . reply ( inline_response . response ?? '' ) ;
55- }
27+ if ( request . method === 'POST' ) {
28+ const bot = new TelegramBot ( env . SECRET_TELEGRAM_API_TOKEN ) ;
29+ await bot
30+ . on ( 'default' , async function ( ) {
31+ switch ( bot . update_type ) {
32+ case 'message' :
33+ const messages = [
34+ { role : 'system' , content : 'You are a friendly assistant' } ,
35+ {
36+ role : 'user' ,
37+ content : bot . update . message ?. text ?. toString ( ) ?? '' ,
38+ } ,
39+ ] ;
40+ const response = await env . AI . run ( '@cf/meta/llama-3-8b-instruct' , { messages } ) ;
41+ if ( 'response' in response ) {
42+ await bot . reply ( response . response ?? '' ) ;
43+ }
44+ break ;
45+ case 'inline' :
46+ const inline_messages = [
47+ { role : 'system' , content : 'You are a friendly assistant' } ,
48+ {
49+ role : 'user' ,
50+ content : bot . update . inline_query ?. query . toString ( ) ?? '' ,
51+ } ,
52+ ] ;
53+ const inline_response = await env . AI . run ( '@cf/meta/llama-3-8b-instruct' , { messages : inline_messages , max_tokens : 50 } ) ;
54+ if ( 'response' in inline_response ) {
55+ await bot . reply ( inline_response . response ?? '' ) ;
56+ }
5657
57- default :
58- break ;
59- }
60- return new Response ( 'ok' ) ;
61- } )
62- . handle ( request . clone ( ) ) ;
63- const bot2 = new TelegramBot ( env . SECRET_TELEGRAM_API_TOKEN2 ) ;
64- await bot2
65- . on ( 'default' , async function ( ) {
66- switch ( bot2 . update_type ) {
67- case 'message' :
68- await bot2 . reply ( 'https://duckduckgo.com/?q=' + encodeURIComponent ( bot . update . message ?. text ?. toString ( ) ?? '' ) ) ;
69- break ;
70- case 'inline' :
71- await bot2 . reply ( 'https://duckduckgo.com/?q=' + encodeURIComponent ( bot . update . inline_query ?. query ?? '' ) ) ;
72- break ;
58+ default :
59+ break ;
60+ }
61+ return new Response ( 'ok' ) ;
62+ } )
63+ . handle ( request . clone ( ) ) ;
64+ const bot2 = new TelegramBot ( env . SECRET_TELEGRAM_API_TOKEN2 ) ;
65+ await bot2
66+ . on ( 'default' , async function ( ) {
67+ switch ( bot2 . update_type ) {
68+ case 'message' :
69+ await bot2 . reply ( 'https://duckduckgo.com/?q=' + encodeURIComponent ( bot . update . message ?. text ?. toString ( ) ?? '' ) ) ;
70+ break ;
71+ case 'inline' :
72+ await bot2 . reply ( 'https://duckduckgo.com/?q=' + encodeURIComponent ( bot . update . inline_query ?. query ?? '' ) ) ;
73+ break ;
7374
74- default :
75- break ;
76- }
77- return new Response ( 'ok' ) ;
78- } )
79- . handle ( request . clone ( ) ) ;
80- const bot3 = new TelegramBot ( env . SECRET_TELEGRAM_API_TOKEN3 ) ;
81- await bot3
82- . on ( 'default' , async function ( ) {
83- switch ( bot3 . update_type ) {
84- case 'inline' :
85- const { translated_text } = await env . AI . run ( '@cf/meta/m2m100-1.2b' , {
86- text : bot3 . update . inline_query ?. query . toString ( ) ?? '' ,
87- source_lang : 'french' ,
88- target_lang : 'english' ,
89- } ) ;
90- await bot3 . reply ( translated_text ?? '' ) ;
91- break ;
75+ default :
76+ break ;
77+ }
78+ return new Response ( 'ok' ) ;
79+ } )
80+ . handle ( request . clone ( ) ) ;
81+ const bot3 = new TelegramBot ( env . SECRET_TELEGRAM_API_TOKEN3 ) ;
82+ await bot3
83+ . on ( 'default' , async function ( ) {
84+ switch ( bot3 . update_type ) {
85+ case 'inline' :
86+ const { translated_text } = await env . AI . run ( '@cf/meta/m2m100-1.2b' , {
87+ text : bot3 . update . inline_query ?. query . toString ( ) ?? '' ,
88+ source_lang : 'french' ,
89+ target_lang : 'english' ,
90+ } ) ;
91+ await bot3 . reply ( translated_text ?? '' ) ;
92+ break ;
9293
93- default :
94- break ;
95- }
94+ default :
95+ break ;
96+ }
9697
97- return new Response ( 'ok' ) ;
98- } )
99- . handle ( request . clone ( ) ) ;
98+ return new Response ( 'ok' ) ;
99+ } )
100+ . handle ( request . clone ( ) ) ;
101+ }
100102
101103 return new Response ( 'ok' ) ;
102104 } ,
0 commit comments