Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ https://github.com/user-attachments/assets/22770da4-af69-412c-ae05-1aae0fff4a10
- [Perplexity](https://www.perplexity.ai/): Fine-tuned LLaMA models for robust question generation.
- [Mistral](https://mistral.ai/): Lightweight models for fast and efficient processing.
- [Cohere](https://cohere.com/): Free to use with strengths in generating coherent questions.
- [Groq](https://groq.com/): Ultra-fast inference with free tier access to Llama, Mixtral, and Gemma models.
- [Ollama](https://ollama.com/): Local LLMs for enhanced privacy and offline processing.

## Usage
Expand Down
2 changes: 2 additions & 0 deletions src/generators/generatorFactory.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import PerplexityGenerator from "./perplexity/perplexityGenerator";
import MistralGenerator from "./mistral/mistralGenerator";
import CohereGenerator from "./cohere/cohereGenerator";
import OllamaGenerator from "./ollama/ollamaGenerator";
import GroqGenerator from "./groq/groqGenerator";

export default class GeneratorFactory {
private static generatorMap: { [key in Provider]: new (settings: QuizSettings) => Generator } = {
Expand All @@ -18,6 +19,7 @@ export default class GeneratorFactory {
[Provider.MISTRAL]: MistralGenerator,
[Provider.COHERE]: CohereGenerator,
[Provider.OLLAMA]: OllamaGenerator,
[Provider.GROQ]: GroqGenerator,
};

public static createInstance(settings: QuizSettings): Generator {
Expand Down
42 changes: 42 additions & 0 deletions src/generators/groq/groqGenerator.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
import { Notice } from "obsidian";
import OpenAI from "openai";
import Generator from "../generator";
import { QuizSettings } from "../../settings/config";

export default class GroqGenerator extends Generator {
private readonly groq: OpenAI;

constructor(settings: QuizSettings) {
super(settings);
this.groq = new OpenAI({
apiKey: this.settings.groqApiKey,
baseURL: this.settings.groqBaseURL,
dangerouslyAllowBrowser: true,
});
}

public async generateQuiz(contents: string[]): Promise<string | null> {
try {
const response = await this.groq.chat.completions.create({
model: this.settings.groqTextGenModel,
messages: [
{ role: "system", content: this.systemPrompt() },
{ role: "user", content: this.userPrompt(contents) },
],
});

if (response.choices[0].finish_reason === "length") {
new Notice("Generation truncated: Token limit reached");
}

return response.choices[0].message.content;
} catch (error) {
throw new Error((error as Error).message);
}
}

public async shortOrLongAnswerSimilarity(userAnswer: string, answer: string): Promise<number> {
throw new Error("Groq does not support grading short and long answer questions. Please switch to a provider that offers embedding models.");
}
}

26 changes: 26 additions & 0 deletions src/generators/groq/groqModels.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
export const enum GroqTextGenModel {
LLAMA_3_3_70B_VERSATILE = "llama-3.3-70b-versatile",
LLAMA_3_1_70B_VERSATILE = "llama-3.1-70b-versatile",
LLAMA_3_1_8B_INSTANT = "llama-3.1-8b-instant",
LLAMA_3_2_1B_PREVIEW = "llama-3.2-1b-preview",
LLAMA_3_2_3B_PREVIEW = "llama-3.2-3b-preview",
LLAMA_3_2_11B_TEXT_PREVIEW = "llama-3.2-11b-text-preview",
LLAMA_3_2_90B_TEXT_PREVIEW = "llama-3.2-90b-text-preview",
MIXTRAL_8X7B_32768 = "mixtral-8x7b-32768",
GEMMA2_9B_IT = "gemma2-9b-it",
GEMMA_7B_IT = "gemma-7b-it",
}

export const groqTextGenModels: Record<GroqTextGenModel, string> = {
[GroqTextGenModel.LLAMA_3_3_70B_VERSATILE]: "Llama 3.3 70B Versatile",
[GroqTextGenModel.LLAMA_3_1_70B_VERSATILE]: "Llama 3.1 70B Versatile",
[GroqTextGenModel.LLAMA_3_1_8B_INSTANT]: "Llama 3.1 8B Instant",
[GroqTextGenModel.LLAMA_3_2_1B_PREVIEW]: "Llama 3.2 1B Preview",
[GroqTextGenModel.LLAMA_3_2_3B_PREVIEW]: "Llama 3.2 3B Preview",
[GroqTextGenModel.LLAMA_3_2_11B_TEXT_PREVIEW]: "Llama 3.2 11B Text Preview",
[GroqTextGenModel.LLAMA_3_2_90B_TEXT_PREVIEW]: "Llama 3.2 90B Text Preview",
[GroqTextGenModel.MIXTRAL_8X7B_32768]: "Mixtral 8x7B 32768",
[GroqTextGenModel.GEMMA2_9B_IT]: "Gemma 2 9B IT",
[GroqTextGenModel.GEMMA_7B_IT]: "Gemma 7B IT",
};

2 changes: 2 additions & 0 deletions src/generators/providers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ export enum Provider {
MISTRAL = "MISTRAL",
COHERE = "COHERE",
OLLAMA = "OLLAMA",
GROQ = "GROQ",
}

export const providers: Record<Provider, string> = {
Expand All @@ -16,4 +17,5 @@ export const providers: Record<Provider, string> = {
[Provider.MISTRAL]: "Mistral",
[Provider.COHERE]: "Cohere",
[Provider.OLLAMA]: "Ollama",
[Provider.GROQ]: "Groq",
};
14 changes: 14 additions & 0 deletions src/settings/model/groq/groqConfig.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
import { GroqTextGenModel } from "../../../generators/groq/groqModels";

export interface GroqConfig {
groqApiKey: string;
groqBaseURL: string;
groqTextGenModel: string;
}

export const DEFAULT_GROQ_SETTINGS: GroqConfig = {
groqApiKey: "",
groqBaseURL: "https://api.groq.com/openai/v1",
groqTextGenModel: GroqTextGenModel.LLAMA_3_3_70B_VERSATILE,
};

57 changes: 57 additions & 0 deletions src/settings/model/groq/groqSettings.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
import { Setting } from "obsidian";
import QuizGenerator from "../../../main";
import { groqTextGenModels } from "../../../generators/groq/groqModels";
import { DEFAULT_GROQ_SETTINGS } from "./groqConfig";

const displayGroqSettings = (containerEl: HTMLElement, plugin: QuizGenerator, refreshSettings: () => void): void => {
new Setting(containerEl)
.setName("Groq API key")
.setDesc("Enter your Groq API key here.")
.addText(text =>
text
.setValue(plugin.settings.groqApiKey)
.onChange(async (value) => {
plugin.settings.groqApiKey = value.trim();
await plugin.saveSettings();
}).inputEl.type = "password"
);

new Setting(containerEl)
.setName("Groq API base url")
.setDesc("Enter your Groq API base URL here.")
.addButton(button =>
button
.setClass("clickable-icon")
.setIcon("rotate-ccw")
.setTooltip("Restore default")
.onClick(async () => {
plugin.settings.groqBaseURL = DEFAULT_GROQ_SETTINGS.groqBaseURL;
await plugin.saveSettings();
refreshSettings();
})
)
.addText(text =>
text
.setValue(plugin.settings.groqBaseURL)
.onChange(async (value) => {
plugin.settings.groqBaseURL = value.trim();
await plugin.saveSettings();
})
);

new Setting(containerEl)
.setName("Generation model")
.setDesc("Model used for quiz generation.")
.addDropdown(dropdown =>
dropdown
.addOptions(groqTextGenModels)
.setValue(plugin.settings.groqTextGenModel)
.onChange(async (value) => {
plugin.settings.groqTextGenModel = value;
await plugin.saveSettings();
})
);
};

export default displayGroqSettings;

4 changes: 3 additions & 1 deletion src/settings/model/modelConfig.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,9 @@ import { DEFAULT_PERPLEXITY_SETTINGS, PerplexityConfig } from "./perplexity/perp
import { DEFAULT_MISTRAL_SETTINGS, MistralConfig } from "./mistral/mistralConfig";
import { CohereConfig, DEFAULT_COHERE_SETTINGS } from "./cohere/cohereConfig";
import { DEFAULT_OLLAMA_SETTINGS, OllamaConfig } from "./ollama/ollamaConfig";
import { DEFAULT_GROQ_SETTINGS, GroqConfig } from "./groq/groqConfig";

export interface ModelConfig extends OpenAIConfig, GoogleConfig, AnthropicConfig, PerplexityConfig, MistralConfig, CohereConfig, OllamaConfig {
export interface ModelConfig extends OpenAIConfig, GoogleConfig, AnthropicConfig, PerplexityConfig, MistralConfig, CohereConfig, OllamaConfig, GroqConfig {
provider: string;
}

Expand All @@ -20,4 +21,5 @@ export const DEFAULT_MODEL_SETTINGS: ModelConfig = {
...DEFAULT_MISTRAL_SETTINGS,
...DEFAULT_COHERE_SETTINGS,
...DEFAULT_OLLAMA_SETTINGS,
...DEFAULT_GROQ_SETTINGS,
};
3 changes: 3 additions & 0 deletions src/settings/model/modelSettings.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import displayPerplexitySettings from "./perplexity/perplexitySettings";
import displayMistralSettings from "./mistral/mistralSettings";
import displayCohereSettings from "./cohere/cohereSettings";
import displayOllamaSettings from "./ollama/ollamaSettings";
import displayGroqSettings from "./groq/groqSettings";

const displayModelSettings = (containerEl: HTMLElement, plugin: QuizGenerator, refreshSettings: () => void): void => {
new Setting(containerEl).setName("Model").setHeading();
Expand Down Expand Up @@ -40,6 +41,8 @@ const displayModelSettings = (containerEl: HTMLElement, plugin: QuizGenerator, r
displayCohereSettings(containerEl, plugin, refreshSettings);
} else if (plugin.settings.provider === Provider.OLLAMA) {
displayOllamaSettings(containerEl, plugin, refreshSettings);
} else if (plugin.settings.provider === Provider.GROQ) {
displayGroqSettings(containerEl, plugin, refreshSettings);
}
};

Expand Down