Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions lib/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,3 +6,4 @@ export * from './utils/llm/grok'
export * from './utils/llm/structures/zod'
export * from './utils/prompts'
export * from './utils/strategies'
export * from './utils/errors'
1 change: 1 addition & 0 deletions lib/types/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@ export type LlmProcessOutput = {
model: string
}
response: Strategy[] | null
error?: string | null
}

export type ProcessAddressProps = {
Expand Down
3 changes: 3 additions & 0 deletions lib/utils/errors.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
export function stringifyError(error: unknown): string {
return error instanceof Error ? error.message : JSON.stringify(error)
}
13 changes: 9 additions & 4 deletions lib/utils/llm/gemini.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import { GoogleGenerativeAI, Schema, SchemaType } from '@google/generative-ai'
import { LlmProcessOutput, LlmProcessProps, Strategy } from '../../types'
import { stringifyError } from '../errors'

export const GEMINI_MODELS = {
// TODO: more pre-config models
Expand Down Expand Up @@ -57,6 +58,7 @@ const schema = {

export async function callGemini(llmInput: LlmProcessProps): Promise<LlmProcessOutput> {
let output = null
let error = null
const model = llmInput.model || GEMINI_MODELS.gemini20flashExp

try {
Expand All @@ -74,10 +76,12 @@ export async function callGemini(llmInput: LlmProcessProps): Promise<LlmProcessO

try {
output = JSON.parse(content || '[]') as Strategy[]
} catch (error) {
console.error('Invalid JSON in Gemini AI output: ', error)
} catch (err) {
error = stringifyError(err)
console.error(`Invalid JSON in Gemini AI output: ${error}`)
}
} catch (error) {
} catch (err) {
error = stringifyError(err)
console.error(`Error querying Gemini AI: ${error}`)
}

Expand All @@ -86,6 +90,7 @@ export async function callGemini(llmInput: LlmProcessProps): Promise<LlmProcessO
provider: 'Google',
model
},
response: output
response: output,
error
}
}
13 changes: 9 additions & 4 deletions lib/utils/llm/grok.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import OpenAI from 'openai'
import { zodResponseFormat } from 'openai/helpers/zod'
import { LlmProcessOutput, LlmProcessProps, Strategy } from '../../types'
import { StrategiesZodSchema } from './structures/zod'
import { stringifyError } from '../errors'

export const XAI_MODELS = {
grok2latest: 'grok-2-latest'
Expand All @@ -14,6 +15,7 @@ const apiClient = new OpenAI({

export async function callGrok(llmInput: LlmProcessProps): Promise<LlmProcessOutput> {
let output = null
let error = null
const model = llmInput.model || XAI_MODELS.grok2latest

try {
Expand All @@ -36,10 +38,12 @@ export async function callGrok(llmInput: LlmProcessProps): Promise<LlmProcessOut
try {
const parsed = JSON.parse(outputContent) as { strategies: Strategy[] }
output = parsed.strategies || []
} catch (error) {
console.error('Invalid JSON in Grok output: ', error)
} catch (err) {
error = stringifyError(err)
console.error(`Invalid JSON in Grok output: ${error}`)
}
} catch (error) {
} catch (err) {
error = stringifyError(err)
console.error(`Error querying Grok: ${error}`)
}

Expand All @@ -48,6 +52,7 @@ export async function callGrok(llmInput: LlmProcessProps): Promise<LlmProcessOut
provider: 'xAI',
model
},
response: output
response: output,
error
}
}