@@ -53,29 +53,83 @@ export function generateContentPatch (
5353 const serviceProvider = Vendors . VERTEXAI
5454 const customAttributes = context . active ( ) . getValue ( LANGTRACE_ADDITIONAL_SPAN_ATTRIBUTES_KEY ) ?? { }
5555
56- const prompts = args . flatMap ( ( arg : string | { contents : CandidateContent [ ] } ) => {
57- if ( typeof arg === 'string' ) {
56+ let argTools : any [ ] = [ ]
57+ const prompts = args . flatMap ( ( arg : string | { contents ?: CandidateContent [ ] , tools ?: any , functionResponse ?: any } ) => {
58+ if ( Array . isArray ( arg ) ) {
59+ // Handle the case where `arg` is an array (like [ { functionResponse: ... } ])
60+ return arg . flatMap ( innerArg => {
61+ if ( Array . isArray ( innerArg . tools ) ) argTools = argTools . concat ( innerArg . tools )
62+ if ( innerArg . functionResponse != null ) {
63+ return [ { role : 'model' , content : JSON . stringify ( innerArg . functionResponse ) } ]
64+ } else if ( innerArg . contents != null ) {
65+ return innerArg . contents . map ( ( content : CandidateContent ) => ( {
66+ role : content . role ,
67+ content : content . parts . map ( ( part : CandidateContentPart ) => {
68+ if ( typeof part . text === 'string' ) {
69+ return part . text
70+ } else if ( 'functionCall' in part ) {
71+ return JSON . stringify ( ( part as any ) . functionCall )
72+ } else if ( typeof part === 'object' ) {
73+ return JSON . stringify ( part )
74+ } else {
75+ return ''
76+ }
77+ } ) . join ( '' )
78+ } ) )
79+ } else {
80+ return [ ]
81+ }
82+ } )
83+ } else if ( typeof arg === 'string' ) {
5884 // Handle the case where `arg` is a string
5985 return [ { role : 'user' , content : arg } ]
60- } else {
86+ } else if ( arg . contents != null ) {
87+ if ( Array . isArray ( arg . tools ) ) argTools = argTools . concat ( arg . tools )
6188 // Handle the case where `arg` has the `contents` structure
6289 return arg . contents . map ( content => ( {
6390 role : content . role ,
64- content : content . parts . map ( part => part . text ) . join ( '' )
91+ content : content . parts . map ( ( part : CandidateContentPart ) => {
92+ if ( typeof part . text === 'string' ) {
93+ return part . text
94+ } else if ( 'functionCall' in part ) {
95+ return JSON . stringify ( ( part as any ) . functionCall )
96+ } else if ( typeof part === 'object' ) {
97+ return JSON . stringify ( part )
98+ } else {
99+ return ''
100+ }
101+ } ) . join ( '' )
65102 } ) )
103+ } else if ( arg . functionResponse != null ) {
104+ // Handle the case where `arg` has a `functionResponse` structure
105+ return [ { role : 'model' , content : JSON . stringify ( arg . functionResponse ) } ]
106+ } else {
107+ return [ ]
66108 }
67109 } )
68110
111+ const allTools = argTools . concat ( this ?. tools ?? [ ] )
69112 const attributes : LLMSpanAttributes = {
70113 'langtrace.sdk.name' : sdkName ,
71114 'langtrace.service.name' : serviceProvider ,
72115 'langtrace.service.type' : 'llm' ,
73116 'gen_ai.operation.name' : 'chat' ,
74117 'langtrace.service.version' : version ,
75118 'langtrace.version' : langtraceVersion ,
76- 'url.full' : '' ,
77- 'url.path' : this ?. publisherModelEndpoint ,
78- 'gen_ai.request.model' : this ?. model ,
119+ 'url.full' : this ?. apiEndpoint ,
120+ 'url.path' : this ?. publisherModelEndpoint ?? this ?. resourcePath ?? undefined ,
121+ 'gen_ai.request.model' : ( ( ) => {
122+ if ( this ?. model !== undefined && this . model !== null ) {
123+ return this . model
124+ }
125+ if ( typeof this ?. resourcePath === 'string' ) {
126+ return this . resourcePath . split ( '/' ) . pop ( )
127+ }
128+ if ( typeof this ?. publisherModelEndpoint === 'string' ) {
129+ return this . publisherModelEndpoint . split ( '/' ) . pop ( )
130+ }
131+ return undefined
132+ } ) ( ) ,
79133 'http.max.retries' : this ?. _client ?. maxRetries ,
80134 'http.timeout' : this ?. _client ?. timeout ,
81135 'gen_ai.request.temperature' : this ?. generationConfig ?. temperature ,
@@ -86,6 +140,7 @@ export function generateContentPatch (
86140 'gen_ai.request.frequency_penalty' : this ?. generationConfig ?. frequencyPenalty ,
87141 'gen_ai.request.presence_penalty' : this ?. generationConfig ?. presencePenalty ,
88142 'gen_ai.request.seed' : this ?. generationConfig ?. seed ,
143+ 'gen_ai.request.tools' : allTools . length > 0 ? JSON . stringify ( allTools ) : undefined ,
89144 ...customAttributes
90145 }
91146
@@ -179,7 +234,17 @@ async function * handleStreamResponse (
179234 const { content } = chunk . candidates . map ( ( candidate : Candidate ) => {
180235 return {
181236 role : candidate . content . role ,
182- content : candidate . content . parts . map ( ( part : CandidateContentPart ) => part . text ) . join ( '' )
237+ content : candidate . content . parts . map ( ( part : CandidateContentPart ) => {
238+ if ( typeof part . text === 'string' ) {
239+ return part . text
240+ } else if ( 'functionCall' in part ) {
241+ return JSON . stringify ( part . functionCall )
242+ } else if ( typeof part === 'object' ) {
243+ return JSON . stringify ( part )
244+ } else {
245+ return ''
246+ }
247+ } ) . join ( '' )
183248 }
184249 } ) [ 0 ]
185250 const tokenCount = estimateTokens ( content )
0 commit comments