@@ -8,6 +8,21 @@ export interface PerplexityMessage {
88 content : string ;
99}
1010
11+ export interface PerplexityToolFunction {
12+ name : string ;
13+ description ?: string ;
14+ parameters : Record < string , any > ;
15+ }
16+
17+ export interface PerplexityTool {
18+ type : 'function' ;
19+ function : PerplexityToolFunction ;
20+ }
21+
22+ export interface PerplexityResponseFormat {
23+ type : 'text' | 'json_object' ;
24+ }
25+
1126export interface PerplexityRequest {
1227 model : string ;
1328 messages : PerplexityMessage [ ] ;
@@ -18,6 +33,17 @@ export interface PerplexityRequest {
1833 presence_penalty ?: number ;
1934 frequency_penalty ?: number ;
2035 stream ?: boolean ;
36+ tools ?: PerplexityTool [ ] ;
37+ response_format ?: PerplexityResponseFormat ;
38+ }
39+
40+ export interface PerplexityToolCall {
41+ id : string ;
42+ type : string ;
43+ function : {
44+ name : string ;
45+ arguments : string ;
46+ } ;
2147}
2248
2349export interface PerplexityResponse {
@@ -32,6 +58,7 @@ export interface PerplexityResponse {
3258 completion_tokens : number ;
3359 total_tokens : number ;
3460 } ;
61+ tool_calls ?: PerplexityToolCall [ ] ;
3562}
3663
3764/**
@@ -104,6 +131,12 @@ export class PerplexityService {
104131 model ?: string ;
105132 maxTokens ?: number ;
106133 temperature ?: number ;
134+ topP ?: number ;
135+ topK ?: number ;
136+ presencePenalty ?: number ;
137+ frequencyPenalty ?: number ;
138+ tools ?: PerplexityTool [ ] ;
139+ responseFormat ?: PerplexityResponseFormat ;
107140 } ,
108141 ) : Promise < PerplexityResponse > {
109142 try {
@@ -116,6 +149,16 @@ export class PerplexityService {
116149 temperature : options ?. temperature || 0.7 ,
117150 } ;
118151
152+ // Add optional parameters if provided
153+ if ( options ?. topP !== undefined ) request . top_p = options . topP ;
154+ if ( options ?. topK !== undefined ) request . top_k = options . topK ;
155+ if ( options ?. presencePenalty !== undefined )
156+ request . presence_penalty = options . presencePenalty ;
157+ if ( options ?. frequencyPenalty !== undefined )
158+ request . frequency_penalty = options . frequencyPenalty ;
159+ if ( options ?. tools ) request . tools = options . tools ;
160+ if ( options ?. responseFormat ) request . response_format = options . responseFormat ;
161+
119162 const response = await axios . post < PerplexityResponse > (
120163 `${ this . baseUrl } /chat/completions` ,
121164 request ,
@@ -140,6 +183,68 @@ export class PerplexityService {
140183 }
141184 }
142185
186+ /**
187+ * Queries the Perplexity AI API with streaming responses
188+ * @returns A readable stream of the response
189+ */
190+ async createStreamingChatCompletion (
191+ messages : PerplexityMessage [ ] ,
192+ options ?: {
193+ model ?: string ;
194+ maxTokens ?: number ;
195+ temperature ?: number ;
196+ topP ?: number ;
197+ topK ?: number ;
198+ presencePenalty ?: number ;
199+ frequencyPenalty ?: number ;
200+ tools ?: PerplexityTool [ ] ;
201+ responseFormat ?: PerplexityResponseFormat ;
202+ } ,
203+ ) : Promise < ReadableStream > {
204+ try {
205+ const apiKey = await this . getApiKey ( ) ;
206+
207+ const request : PerplexityRequest = {
208+ model : options ?. model || this . defaultModel ,
209+ messages,
210+ max_tokens : options ?. maxTokens || this . defaultMaxTokens ,
211+ temperature : options ?. temperature || 0.7 ,
212+ stream : true ,
213+ } ;
214+
215+ // Add optional parameters if provided
216+ if ( options ?. topP !== undefined ) request . top_p = options . topP ;
217+ if ( options ?. topK !== undefined ) request . top_k = options . topK ;
218+ if ( options ?. presencePenalty !== undefined )
219+ request . presence_penalty = options . presencePenalty ;
220+ if ( options ?. frequencyPenalty !== undefined )
221+ request . frequency_penalty = options . frequencyPenalty ;
222+ if ( options ?. tools ) request . tools = options . tools ;
223+ if ( options ?. responseFormat ) request . response_format = options . responseFormat ;
224+
225+ const response = await axios . post ( `${ this . baseUrl } /chat/completions` , request , {
226+ headers : {
227+ 'Content-Type' : 'application/json' ,
228+ Authorization : `Bearer ${ apiKey } ` ,
229+ } ,
230+ responseType : 'stream' ,
231+ } ) ;
232+
233+ return response . data ;
234+ } catch ( error : unknown ) {
235+ if ( axios . isAxiosError ( error ) ) {
236+ this . logger . error (
237+ `Perplexity API streaming error: ${ error . response ?. status } - ${ error . message } ` ,
238+ ) ;
239+ throw new Error ( `Perplexity API streaming error: ${ error . message } ` ) ;
240+ }
241+
242+ const errorMessage = error instanceof Error ? error . message : 'Unknown error' ;
243+ this . logger . error ( `Failed to create streaming chat completion: ${ errorMessage } ` ) ;
244+ throw new Error ( 'Failed to create streaming chat completion' ) ;
245+ }
246+ }
247+
143248 /**
144249 * Generates a simplified explanation of medical text
145250 *
@@ -197,6 +302,7 @@ export class PerplexityService {
197302 const response = await this . createChatCompletion ( messages , {
198303 temperature : 0.3 , // Lower temperature for more accurate/factual responses
199304 maxTokens : 4000 , // Ensure there's enough space for the full corrected analysis
305+ responseFormat : { type : 'json_object' } , // Use JSON mode for reliable JSON response
200306 } ) ;
201307
202308 // Parse the response to get the corrected analysis
0 commit comments