Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ Want to know more about its architecture and how it works? You can read it [here

## ✨ Features

🤖 **Support for all major AI providers** - Use local LLMs through Ollama or connect to OpenAI, Anthropic Claude, Google Gemini, Groq, and more. Mix and match models based on your needs.
🤖 **Support for all major AI providers** - Use local LLMs through Ollama or connect to OpenAI, Anthropic Claude, Google Gemini, Groq, Novita AI, and more. Mix and match models based on your needs.

⚡ **Smart search modes** - Choose Speed Mode when you need quick answers, Balanced Mode for everyday searches, or Quality Mode for deep research.

Expand Down
2 changes: 2 additions & 0 deletions src/lib/models/providers/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import GroqProvider from './groq';
import LemonadeProvider from './lemonade';
import AnthropicProvider from './anthropic';
import LMStudioProvider from './lmstudio';
import NovitaProvider from './novita';

export const providers: Record<string, ProviderConstructor<any>> = {
openai: OpenAIProvider,
Expand All @@ -18,6 +19,7 @@ export const providers: Record<string, ProviderConstructor<any>> = {
lemonade: LemonadeProvider,
anthropic: AnthropicProvider,
lmstudio: LMStudioProvider,
novita: NovitaProvider,
};

export const getModelProvidersUIConfigSection =
Expand Down
131 changes: 131 additions & 0 deletions src/lib/models/providers/novita/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,131 @@
import { UIConfigField } from '@/lib/config/types';
import { getConfiguredModelProviderById } from '@/lib/config/serverRegistry';
import { Model, ModelList, ProviderMetadata } from '../../types';
import BaseEmbedding from '../../base/embedding';
import BaseModelProvider from '../../base/provider';
import BaseLLM from '../../base/llm';
import NovitaLLM from './novitaLLM';
import NovitaEmbedding from './novitaEmbedding';

interface NovitaConfig {
apiKey: string;
}

const providerConfigFields: UIConfigField[] = [
{
type: 'password',
name: 'API Key',
key: 'apiKey',
description: 'Your Novita AI API key',
required: true,
placeholder: 'Novita AI API Key',
env: 'NOVITA_API_KEY',
scope: 'server',
},
];

class NovitaProvider extends BaseModelProvider<NovitaConfig> {
constructor(id: string, name: string, config: NovitaConfig) {
super(id, name, config);
}

async getDefaultModels(): Promise<ModelList> {
const res = await fetch(`https://api.novita.ai/openai/models`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${this.config.apiKey}`,
},
});

const data = await res.json();

const defaultChatModels: Model[] = [];

if (data.data && Array.isArray(data.data)) {
data.data.forEach((m: any) => {
defaultChatModels.push({
key: m.id,
name: m.id,
});
});
}

return {
embedding: [],
chat: defaultChatModels,
};
}

async getModelList(): Promise<ModelList> {
const defaultModels = await this.getDefaultModels();
const configProvider = getConfiguredModelProviderById(this.id)!;

return {
embedding: [
...defaultModels.embedding,
...configProvider.embeddingModels,
],
chat: [...defaultModels.chat, ...configProvider.chatModels],
};
}

async loadChatModel(key: string): Promise<BaseLLM<any>> {
const modelList = await this.getModelList();

const exists = modelList.chat.find((m) => m.key === key);

if (!exists) {
throw new Error(
'Error Loading Novita Chat Model. Invalid Model Selected',
);
}

return new NovitaLLM({
apiKey: this.config.apiKey,
model: key,
baseURL: 'https://api.novita.ai/openai',
});
}

async loadEmbeddingModel(key: string): Promise<BaseEmbedding<any>> {
const modelList = await this.getModelList();
const exists = modelList.embedding.find((m) => m.key === key);

if (!exists) {
throw new Error(
'Error Loading Novita Embedding Model. Invalid Model Selected.',
);
}

return new NovitaEmbedding({
apiKey: this.config.apiKey,
model: key,
baseURL: 'https://api.novita.ai/openai',
});
}

static parseAndValidate(raw: any): NovitaConfig {
if (!raw || typeof raw !== 'object')
throw new Error('Invalid config provided. Expected object');
if (!raw.apiKey)
throw new Error('Invalid config provided. API key must be provided');

return {
apiKey: String(raw.apiKey),
};
}

static getProviderConfigFields(): UIConfigField[] {
return providerConfigFields;
}

static getProviderMetadata(): ProviderMetadata {
return {
key: 'novita',
name: 'Novita AI',
};
}
}

export default NovitaProvider;
5 changes: 5 additions & 0 deletions src/lib/models/providers/novita/novitaEmbedding.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
import OpenAIEmbedding from '../openai/openaiEmbedding';

class NovitaEmbedding extends OpenAIEmbedding {}

export default NovitaEmbedding;
5 changes: 5 additions & 0 deletions src/lib/models/providers/novita/novitaLLM.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
import OpenAILLM from '../openai/openaiLLM';

class NovitaLLM extends OpenAILLM {}

export default NovitaLLM;