File tree Expand file tree Collapse file tree 6 files changed +23
-3
lines changed
Expand file tree Collapse file tree 6 files changed +23
-3
lines changed Original file line number Diff line number Diff line change @@ -55,6 +55,8 @@ export type LlmProcessOutput = {
5555 model : string
5656 }
5757 response : Strategy [ ] | null
58+ inputTokens : number
59+ outputTokens : number
5860 error ?: string | null
5961}
6062
Original file line number Diff line number Diff line change @@ -14,6 +14,8 @@ export async function callGemini(llmInput: LlmProcessProps): Promise<LlmProcessO
1414 let output = null
1515 let error = null
1616 const model = llmInput . model || GEMINI_MODELS . gemini20flashExp
17+ let inputTokens : number | undefined
18+ let outputTokens : number | undefined
1719
1820 try {
1921 const aiModel = genAI . getGenerativeModel ( {
@@ -28,6 +30,8 @@ export async function callGemini(llmInput: LlmProcessProps): Promise<LlmProcessO
2830 // console.log(JSON.stringify(result))
2931
3032 const content = result . response . text ( )
33+ inputTokens = result . response . usageMetadata ?. promptTokenCount || 0
34+ outputTokens = result . response . usageMetadata ?. candidatesTokenCount || 0
3135
3236 try {
3337 output = JSON . parse ( content || '[]' ) as Strategy [ ]
@@ -46,6 +50,8 @@ export async function callGemini(llmInput: LlmProcessProps): Promise<LlmProcessO
4650 model
4751 } ,
4852 response : output ,
53+ inputTokens : inputTokens || 0 ,
54+ outputTokens : outputTokens || 0 ,
4955 error
5056 }
5157}
Original file line number Diff line number Diff line change @@ -18,6 +18,8 @@ export async function callGrok(llmInput: LlmProcessProps): Promise<LlmProcessOut
1818 let output = null
1919 let error = null
2020 const model = llmInput . model || XAI_MODELS . grok3latest
21+ let inputTokens : number | undefined
22+ let outputTokens : number | undefined
2123
2224 try {
2325 const completion = await apiClient . chat . completions . create ( {
@@ -36,6 +38,8 @@ export async function callGrok(llmInput: LlmProcessProps): Promise<LlmProcessOut
3638 } )
3739
3840 const outputContent = completion . choices [ 0 ] . message . content || '{}'
41+ inputTokens = completion . usage ?. prompt_tokens || 0
42+ outputTokens = completion . usage ?. completion_tokens || 0
3943
4044 try {
4145 const parsed = JSON . parse ( outputContent ) as { strategies : Strategy [ ] }
@@ -55,6 +59,8 @@ export async function callGrok(llmInput: LlmProcessProps): Promise<LlmProcessOut
5559 model
5660 } ,
5761 response : output ,
62+ inputTokens : inputTokens || 0 ,
63+ outputTokens : outputTokens || 0 ,
5864 error
5965 }
6066}
Original file line number Diff line number Diff line change @@ -43,6 +43,8 @@ export async function llmMockProcess({ prompt }: LlmProcessProps): Promise<LlmPr
4343 name : 'L2 Yield Farming' ,
4444 risk : StrategyRisk . HIGH
4545 }
46- ]
46+ ] ,
47+ inputTokens : 0 ,
48+ outputTokens : 0
4749 }
4850}
Original file line number Diff line number Diff line change @@ -94,7 +94,9 @@ export const processAddress = async (
9494 provider : 'local' ,
9595 model : 'local'
9696 } ,
97- response : EMPTY_PORTFOLIO_STRATEGIES
97+ response : EMPTY_PORTFOLIO_STRATEGIES ,
98+ inputTokens : 0 ,
99+ outputTokens : 0
98100 }
99101 ]
100102 }
Original file line number Diff line number Diff line change @@ -47,7 +47,9 @@ const mockedLlmOutput: LlmProcessOutput = {
4747 name : 'Example USDC strategy name' ,
4848 risk : StrategyRisk . LOW
4949 }
50- ]
50+ ] ,
51+ inputTokens : 0 ,
52+ outputTokens : 0
5153}
5254
5355jest . mock ( 'ambire-common/dist/src/consts/networks' , ( ) => {
You can’t perform that action at this time.
0 commit comments