Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
136 changes: 136 additions & 0 deletions app/components/@settings/tabs/providers/components/ModelInput.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,136 @@
import React, { useState, useCallback, useEffect } from 'react';
import { Input } from '~/components/ui/Input';
import { Label } from '~/components/ui/Label';
import { CheckCircle, AlertCircle } from 'lucide-react';

interface ModelInputProps {
provider: string;
value: string;
onChange: (value: string) => void;
suggestedModels?: string[];
validateModel?: (model: string) => boolean;
placeholder?: string;
label?: string;
helpText?: string;
}

export function ModelInput({
provider,
value,
onChange,
suggestedModels = [],
validateModel,
placeholder = 'Enter model name',
label = 'Custom Model',
helpText,
}: ModelInputProps) {
const [inputValue, setInputValue] = useState(value);
const [isValid, setIsValid] = useState<boolean | null>(null);
const [showSuggestions, setShowSuggestions] = useState(false);
const [filteredSuggestions, setFilteredSuggestions] = useState<string[]>([]);

useEffect(() => {
setInputValue(value);
}, [value]);

useEffect(() => {
if (inputValue && suggestedModels.length > 0) {
const filtered = suggestedModels.filter((model) => model.toLowerCase().includes(inputValue.toLowerCase()));
setFilteredSuggestions(filtered);
} else {
setFilteredSuggestions(suggestedModels);
}
}, [inputValue, suggestedModels]);

const handleInputChange = useCallback(
(e: React.ChangeEvent<HTMLInputElement>) => {
const newValue = e.target.value;
setInputValue(newValue);
setShowSuggestions(true);

if (newValue && validateModel) {
setIsValid(validateModel(newValue));
} else {
setIsValid(null);
}
},
[validateModel],
);

const handleBlur = useCallback(() => {
setTimeout(() => setShowSuggestions(false), 200);

if (inputValue !== value) {
onChange(inputValue);
}
}, [inputValue, value, onChange]);

const handleSuggestionClick = useCallback(
(model: string) => {
setInputValue(model);
onChange(model);
setShowSuggestions(false);

if (validateModel) {
setIsValid(validateModel(model));
}
},
[onChange, validateModel],
);

return (
<div className="space-y-2">
<Label htmlFor={`${provider}-model-input`}>{label}</Label>
<div className="relative">
<div className="relative">
<Input
id={`${provider}-model-input`}
type="text"
value={inputValue}
onChange={handleInputChange}
onBlur={handleBlur}
onFocus={() => setShowSuggestions(true)}
placeholder={placeholder}
className={`pr-10 ${isValid === false ? 'border-red-500' : isValid === true ? 'border-green-500' : ''}`}
/>
{isValid !== null && (
<div className="absolute right-2 top-1/2 -translate-y-1/2">
{isValid ? (
<CheckCircle className="h-5 w-5 text-green-500" />
) : (
<AlertCircle className="h-5 w-5 text-red-500" />
)}
</div>
)}
</div>

{showSuggestions && filteredSuggestions.length > 0 && (
<div className="absolute z-10 mt-1 w-full rounded-md bg-popover border border-border shadow-lg">
<ul className="max-h-60 overflow-auto rounded-md py-1">
{filteredSuggestions.map((model) => (
<li
key={model}
className="cursor-pointer px-3 py-2 hover:bg-accent hover:text-accent-foreground"
onClick={() => handleSuggestionClick(model)}
>
<div className="font-medium">{model}</div>
</li>
))}
</ul>
</div>
)}
</div>

{helpText && <p className="text-sm text-muted-foreground">{helpText}</p>}

{isValid === false && inputValue && (
<div className="mt-2 rounded-md border border-red-500 bg-red-50 p-3 text-sm text-red-800 dark:bg-red-900/20 dark:text-red-400">
<div className="flex items-start gap-2">
<AlertCircle className="h-4 w-4 mt-0.5" />
<span>This model name may not be valid for {provider}. Please check the model name.</span>
</div>
</div>
)}
</div>
);
}
2 changes: 1 addition & 1 deletion app/lib/.server/llm/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ export const PROVIDER_COMPLETION_LIMITS: Record<string, number> = {
Mistral: 8192,
Ollama: 8192,
OpenRouter: 8192,
Perplexity: 8192,
Perplexity: 127072, // Sonar models support 128k context
Together: 8192,
xAI: 8192,
LMStudio: 8192,
Expand Down
170 changes: 170 additions & 0 deletions app/lib/modules/llm/providers/perplexity-utils.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,170 @@
/**
* Perplexity Model Validation and Management Utilities
* Author: Keoma Wright
* Purpose: Provides validation and management utilities for Perplexity AI models
*/

export interface PerplexityModelInfo {
id: string;
name: string;
context: number;
category: 'search' | 'reasoning' | 'research' | 'chat';
deprecated?: boolean;
replacement?: string;
}

// Comprehensive list of Perplexity models with metadata
export const PERPLEXITY_MODELS: PerplexityModelInfo[] = [
// Current generation models (2025)
{
id: 'sonar',
name: 'Sonar (Latest)',
context: 127072,
category: 'search',
},
{
id: 'sonar-reasoning',
name: 'Sonar Reasoning',
context: 127072,
category: 'reasoning',
},
{
id: 'sonar-deep-research',
name: 'Sonar Deep Research',
context: 127072,
category: 'research',
},

// Llama-based models (still supported)
{
id: 'llama-3.1-sonar-small-128k-online',
name: 'Llama 3.1 Sonar Small (Online)',
context: 127072,
category: 'search',
},
{
id: 'llama-3.1-sonar-large-128k-online',
name: 'Llama 3.1 Sonar Large (Online)',
context: 127072,
category: 'search',
},
{
id: 'llama-3.1-sonar-small-128k-chat',
name: 'Llama 3.1 Sonar Small (Chat)',
context: 127072,
category: 'chat',
},
{
id: 'llama-3.1-sonar-large-128k-chat',
name: 'Llama 3.1 Sonar Large (Chat)',
context: 127072,
category: 'chat',
},

// Deprecated models (for backward compatibility)
{
id: 'sonar-pro',
name: 'Sonar Pro (Deprecated)',
context: 8192,
category: 'search',
deprecated: true,
replacement: 'sonar',
},
{
id: 'sonar-reasoning-pro',
name: 'Sonar Reasoning Pro (Deprecated)',
context: 8192,
category: 'reasoning',
deprecated: true,
replacement: 'sonar-reasoning',
},
];

/**
* Validates if a model ID is supported by Perplexity
*/
export function validatePerplexityModel(modelId: string): boolean {
return PERPLEXITY_MODELS.some((model) => model.id === modelId);
}

/**
* Gets model information by ID
*/
export function getPerplexityModelInfo(modelId: string): PerplexityModelInfo | undefined {
return PERPLEXITY_MODELS.find((model) => model.id === modelId);
}

/**
* Gets non-deprecated models
*/
export function getActivePerplexityModels(): PerplexityModelInfo[] {
return PERPLEXITY_MODELS.filter((model) => !model.deprecated);
}

/**
* Gets model suggestions based on partial input
*/
export function getPerplexityModelSuggestions(partial: string): PerplexityModelInfo[] {
const lowerPartial = partial.toLowerCase();
return PERPLEXITY_MODELS.filter(
(model) => model.id.toLowerCase().includes(lowerPartial) || model.name.toLowerCase().includes(lowerPartial),
);
}

/**
* Checks if a model is deprecated and returns replacement info
*/
export function checkDeprecatedModel(modelId: string): {
deprecated: boolean;
replacement?: string;
message?: string;
} {
const model = getPerplexityModelInfo(modelId);

if (!model) {
return { deprecated: false };
}

if (model.deprecated) {
return {
deprecated: true,
replacement: model.replacement,
message: `Model "${modelId}" is deprecated. Please use "${model.replacement}" instead.`,
};
}

return { deprecated: false };
}

/**
* Groups models by category
*/
export function getPerplexityModelsByCategory(): Record<string, PerplexityModelInfo[]> {
return PERPLEXITY_MODELS.reduce(
(acc, model) => {
if (!acc[model.category]) {
acc[model.category] = [];
}

acc[model.category].push(model);

return acc;
},
{} as Record<string, PerplexityModelInfo[]>,
);
}

/**
* Pattern matching for flexible model validation
*/
export const PERPLEXITY_MODEL_PATTERNS = [
/^sonar(-\w+)?$/,
/^llama-\d+(\.\d+)?-sonar-(small|large)-\d+k-(online|chat)$/,
];

/**
* Flexible validation that accepts patterns
*/
export function isValidPerplexityModelPattern(modelId: string): boolean {
return PERPLEXITY_MODEL_PATTERNS.some((pattern) => pattern.test(modelId));
}
Loading
Loading