Skip to content

Commit b06f6e3

Browse files
authored
Merge pull request #715 from meetpateltech/perplexity-models
feat: added perplexity model
2 parents 9efc709 + 86f37fc commit b06f6e3

File tree

5 files changed

+43
-0
lines changed

5 files changed

+43
-0
lines changed

.env.example

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -70,6 +70,11 @@ LMSTUDIO_API_BASE_URL=
7070
# You only need this environment variable set if you want to use xAI models
7171
XAI_API_KEY=
7272

73+
# Get your Perplexity API Key here -
74+
# https://www.perplexity.ai/settings/api
75+
# You only need this environment variable set if you want to use Perplexity models
76+
PERPLEXITY_API_KEY=
77+
7378
# Include this environment variable if you want more logging for debugging locally
7479
VITE_LOG_LEVEL=debug
7580

app/lib/.server/llm/api-key.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,8 @@ export function getAPIKey(cloudflareEnv: Env, provider: string, userApiKeys?: Re
3939
return env.TOGETHER_API_KEY || cloudflareEnv.TOGETHER_API_KEY;
4040
case 'xAI':
4141
return env.XAI_API_KEY || cloudflareEnv.XAI_API_KEY;
42+
case 'Perplexity':
43+
return env.PERPLEXITY_API_KEY || cloudflareEnv.PERPLEXITY_API_KEY;
4244
case 'Cohere':
4345
return env.COHERE_API_KEY;
4446
case 'AzureOpenAI':

app/lib/.server/llm/model.ts

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -128,6 +128,15 @@ export function getXAIModel(apiKey: OptionalApiKey, model: string) {
128128
return openai(model);
129129
}
130130

131+
export function getPerplexityModel(apiKey: OptionalApiKey, model: string) {
132+
const perplexity = createOpenAI({
133+
baseURL: 'https://api.perplexity.ai/',
134+
apiKey,
135+
});
136+
137+
return perplexity(model);
138+
}
139+
131140
export function getModel(
132141
provider: string,
133142
model: string,
@@ -170,6 +179,8 @@ export function getModel(
170179
return getXAIModel(apiKey, model);
171180
case 'Cohere':
172181
return getCohereAIModel(apiKey, model);
182+
case 'Perplexity':
183+
return getPerplexityModel(apiKey, model);
173184
default:
174185
return getOllamaModel(baseURL, model);
175186
}

app/utils/constants.ts

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -293,6 +293,30 @@ const PROVIDER_LIST: ProviderInfo[] = [
293293
],
294294
getApiKeyLink: 'https://api.together.xyz/settings/api-keys',
295295
},
296+
{
297+
name: 'Perplexity',
298+
staticModels: [
299+
{
300+
name: 'llama-3.1-sonar-small-128k-online',
301+
label: 'Sonar Small Online',
302+
provider: 'Perplexity',
303+
maxTokenAllowed: 8192,
304+
},
305+
{
306+
name: 'llama-3.1-sonar-large-128k-online',
307+
label: 'Sonar Large Online',
308+
provider: 'Perplexity',
309+
maxTokenAllowed: 8192,
310+
},
311+
{
312+
name: 'llama-3.1-sonar-huge-128k-online',
313+
label: 'Sonar Huge Online',
314+
provider: 'Perplexity',
315+
maxTokenAllowed: 8192,
316+
},
317+
],
318+
getApiKeyLink: 'https://www.perplexity.ai/settings/api',
319+
},
296320
];
297321

298322
export const DEFAULT_PROVIDER = PROVIDER_LIST[0];

worker-configuration.d.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,4 +14,5 @@ interface Env {
1414
GOOGLE_GENERATIVE_AI_API_KEY: string;
1515
MISTRAL_API_KEY: string;
1616
XAI_API_KEY: string;
17+
PERPLEXITY_API_KEY: string;
1718
}

0 commit comments

Comments
 (0)