88 * @returns {Object } Returns the response from the AI model.
99 * @example {{ branch | generateDescription(pr, repo, source) }}
1010 * @license MIT
11- * */
11+ * */
1212
13- const lockFiles = [
13+ const MAX_TOKENS = 4096 ;
14+ const OPEN_AI_ENDPOINT = 'https://api.openai.com/v1/chat/completions' ;
15+ const LOCK_FILES = [
1416 'package-lock.json' ,
1517 'yarn.lock' ,
1618 'npm-shrinkwrap.json' ,
@@ -36,8 +38,7 @@ const lockFiles = [
3638 'flake.lock' ,
3739 'pnpm-lock.yaml'
3840] ;
39-
40- const excludeExpressionsList = [
41+ const EXCLUDE_EXPRESSIONS_LIST = [
4142 '.*\\.(ini|csv|xls|xlsx|xlr|doc|docx|txt|pps|ppt|pptx|dot|dotx|log|tar|rtf|dat|ipynb|po|profile|object|obj|dxf|twb|bcsymbolmap|tfstate|pdf|rbi|pem|crt|svg|png|jpeg|jpg|ttf)$' ,
4243 '.*(package-lock|packages\\.lock|package)\\.json$' ,
4344 '.*(yarn|gemfile|podfile|cargo|composer|pipfile|gopkg)\\.lock$' ,
@@ -47,55 +48,58 @@ const excludeExpressionsList = [
4748 '.*public/assets/.*\\.js' ,
4849 '.*ci\\.yml$'
4950] ;
51+ const IGNORE_FILES_REGEX_LIST = [
52+ ...LOCK_FILES . map ( f => f . replace ( '.' , '\\.' ) ) ,
53+ ...EXCLUDE_EXPRESSIONS_LIST
54+ ] ;
55+ const EXCLUDE_PATTERN = new RegExp ( IGNORE_FILES_REGEX_LIST . join ( '|' ) ) ;
5056
51- const ignoreFilesRegexList = lockFiles
52- . map ( file => file . replace ( '.' , '\\.' ) )
53- . concat ( excludeExpressionsList ) ;
54- const excludePattern = new RegExp ( ignoreFilesRegexList . join ( '|' ) ) ;
55-
56- const filterExcludeFiles = file => {
57- return ! excludePattern . test ( file ) || ( file . diff ?. split ( ' ' ) . length ?? 0 ) < 800 ;
58- } ;
59-
60- const buildArrayContext = context => {
61- return context . filter ( element => {
62- if ( typeof element !== 'object' ) {
63- return true ;
64- }
65-
66- return context . filter ( filterExcludeFiles ) ;
67- } ) ;
57+ /**
58+ * @description Check if a file should be excluded from the context like "package-lock.json"
59+ * @param {* } fileObject
60+ * @returns returns true if the file should be excluded
61+ */
62+ const shouldExcludeFile = fileObject => {
63+ const shouldExludeByName = EXCLUDE_PATTERN . test ( fileObject . original_file ) ;
64+ const shouldExludeBySize = ( fileObject . diff ?. split ( ' ' ) . length ?? 0 ) > 1000 ;
65+
66+ return shouldExludeByName || shouldExludeBySize ;
6867} ;
6968
70- const buildSourceContext = context => {
71- return context . diff . files . filter ( filterExcludeFiles ) ;
69+ /**
70+ * @description Check if a file should be included in the context
71+ * @param {* } fileObject
72+ * @returns returns true if the file should be included
73+ */
74+ const shouldIncludeFile = fileObject => {
75+ return ! shouldExcludeFile ( fileObject ) ;
7276} ;
7377
7478const buildContextForGPT = context => {
7579 if ( Array . isArray ( context ) ) {
76- return buildArrayContext ( context ) ;
80+ return context . filter ( element =>
81+ typeof element !== 'object' ? true : context . filter ( shouldIncludeFile )
82+ ) ;
7783 }
7884
7985 if ( context ?. diff ?. files ) {
80- return buildSourceContext ( context ) ;
86+ const files = context . diff . files . filter ( shouldIncludeFile ) ;
87+ return files ;
8188 }
8289
8390 return context ;
8491} ;
8592
86- const askAI = async ( context , role = '' , prompt , token , callback ) => {
87- const cacheKey = `${ __filename } ${ role } ${ prompt } ` ;
88-
89- if ( process . env [ cacheKey ] ) {
90- return callback ( null , process . env [ cacheKey ] ) ;
91- }
92-
93- const maxTokens = 4096 ;
94- const endpoint = 'https://api.openai.com/v1/chat/completions' ;
95-
93+ const askAI = async ( context , role , prompt , token , callback ) => {
9694 const formattedContext = buildContextForGPT ( context ) ;
9795
98- const response = await fetch ( endpoint , {
96+ if ( ! formattedContext ?. length ) {
97+ const message = `There are no context files to analyze.\nAll ${ context ?. diff ?. files ?. length } files were excluded by pattern or size.` ;
98+ console . log ( message ) ;
99+ return callback ( null , message ) ;
100+ }
101+
102+ const response = await fetch ( OPEN_AI_ENDPOINT , {
99103 method : 'POST' ,
100104 headers : {
101105 'Content-Type' : 'application/json' ,
@@ -104,32 +108,32 @@ const askAI = async (context, role = '', prompt, token, callback) => {
104108 body : JSON . stringify ( {
105109 model : 'gpt-4o-2024-08-06' ,
106110 messages : [
107- ...( role ?
108- [
109- {
110- role : 'system' ,
111- content : `You are a ${ role } . Answer only to the request, without any introductory or conclusion text.`
112- } ]
113- : [ ] ) ,
111+ {
112+ role : 'system' ,
113+ content : `You are a ${ role } . Answer only to the request, without any introductory or conclusion text.`
114+ } ,
114115 {
115116 role : 'user' ,
116117 content : JSON . stringify ( formattedContext )
117118 } ,
118119 { role : 'user' , content : prompt }
119120 ] ,
120- max_tokens : maxTokens
121+ max_tokens : MAX_TOKENS
121122 } )
122123 } ) ;
123124
124125 const data = await response . json ( ) ;
125126
127+ if ( data ?. error ?. message ) {
128+ console . error ( data . error . message ) ;
129+ return callback ( null , data . error . message ) ;
130+ }
131+
126132 const suggestion =
127133 data . choices ?. [ 0 ] ?. message ?. content ??
128134 'context was too big for api, try with smaller context object' ;
129135
130- process . env [ cacheKey ] = suggestion ;
131-
132- return callback ( null , process . env [ cacheKey ] ) ;
136+ return callback ( null , suggestion ) ;
133137} ;
134138
135139module . exports = {
0 commit comments