@@ -11,33 +11,46 @@ import {
11
11
import { getRedisInstance } from './services/redis.ts' ;
12
12
import { getOpenAiInstance } from './services/open-ai.ts' ;
13
13
import { getEnvironmentVariableOr } from './utils/env.ts' ;
14
+ import type { Chat } from '../src/types.ts' ;
15
+
16
+ type AiChatBodyRequest = {
17
+ messages : Chat [ ] ;
18
+ currentCode : string ;
19
+ currentOpts : Record < string , Record < string , string | boolean > > ;
20
+ chatId : string ;
21
+ } ;
22
+
23
+ const buildAiChatMessages = ( request : AiChatBodyRequest ) : Chat [ ] => {
24
+ const validatedMessages = request . messages . filter ( ( message : { role : string ; content : string } ) => {
25
+ return message . content . length < 500 ;
26
+ } ) ;
27
+
28
+ return [
29
+ {
30
+ role : 'system' ,
31
+ content : `
32
+ You are a smart contract assistant built by OpenZeppelin to help users using OpenZeppelin Contracts Wizard.
33
+ The current options are ${ JSON . stringify ( request . currentOpts ) } .
34
+ The current contract code is ${ request . currentCode }
35
+ Please be kind and concise. Keep responses to <100 words.
36
+ ` . trim ( ) ,
37
+ } ,
38
+ ...validatedMessages ,
39
+ ] ;
40
+ } ;
14
41
15
42
export default async ( req : Request ) : Promise < Response > => {
16
43
try {
17
- const data = await req . json ( ) ;
44
+ const aiChatBodyRequest : AiChatBodyRequest = await req . json ( ) ;
18
45
19
46
const redis = getRedisInstance ( ) ;
20
47
const openai = getOpenAiInstance ( ) ;
21
48
22
- const validatedMessages = data . messages . filter ( ( message : { role : string ; content : string } ) => {
23
- return message . content . length < 500 ;
24
- } ) ;
25
-
26
- const messages = [
27
- {
28
- role : 'system' ,
29
- content : `
30
- You are a smart contract assistant built by OpenZeppelin to help users using OpenZeppelin Contracts Wizard.
31
- The current options are ${ JSON . stringify ( data . currentOpts ) } .
32
- Please be kind and concise. Keep responses to <100 words.
33
- ` . trim ( ) ,
34
- } ,
35
- ...validatedMessages ,
36
- ] ;
49
+ const aiChatMessages = buildAiChatMessages ( aiChatBodyRequest ) ;
37
50
38
51
const response = await openai . chat . completions . create ( {
39
52
model : getEnvironmentVariableOr ( 'OPENAI_MODEL' , 'gpt-4o-mini' ) ,
40
- messages,
53
+ messages : aiChatMessages ,
41
54
functions : [
42
55
erc20Function ,
43
56
erc721Function ,
@@ -53,13 +66,13 @@ export default async (req: Request): Promise<Response> => {
53
66
54
67
const stream = OpenAIStream ( response , {
55
68
async onCompletion ( completion ) {
56
- const id = data . chatId ;
69
+ const id = aiChatBodyRequest . chatId ;
57
70
const updatedAt = Date . now ( ) ;
58
71
const payload = {
59
72
id,
60
73
updatedAt,
61
74
messages : [
62
- ...messages ,
75
+ ...aiChatMessages ,
63
76
{
64
77
content : completion ,
65
78
role : 'assistant' ,
0 commit comments