Skip to content

Commit 7299d4c

Browse files
kalenkevichhappyhuman
authored andcommitted
Bug fix: Update BaseLlm constructor to use a parameter object.
PiperOrigin-RevId: 828077838
1 parent 432d9bd commit 7299d4c

File tree

6 files changed

+93
-13
lines changed

6 files changed

+93
-13
lines changed

core/src/common.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@ export {Gemini} from './models/google_llm.js';
3030
export type {GeminiParams} from './models/google_llm.js';
3131
export type {LlmRequest} from './models/llm_request.js';
3232
export type {LlmResponse} from './models/llm_response.js';
33+
export {LLMRegistry} from './models/registry.js';
3334
export {BasePlugin} from './plugins/base_plugin.js';
3435
export {LoggingPlugin} from './plugins/logging_plugin.js';
3536
export {PluginManager} from './plugins/plugin_manager.js';

core/src/models/base_llm.ts

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -12,13 +12,17 @@ import {LlmResponse} from './llm_response.js';
1212
* The BaseLLM class.
1313
*/
1414
export abstract class BaseLlm {
15+
readonly model: string;
16+
1517
/**
1618
* Creates an instance of BaseLLM.
17-
*
18-
* @param model The name of the LLM, e.g. gemini-1.5-flash or
19+
* @param params The parameters for creating a BaseLlm instance.
20+
* @param params.model The name of the LLM, e.g. gemini-1.5-flash or
1921
* gemini-1.5-flash-001.
2022
*/
21-
constructor(readonly model: string) {}
23+
constructor({model}: {model: string}) {
24+
this.model = model;
25+
}
2226

2327
/**
2428
* List of supported models in regex for LlmRegistry.

core/src/models/google_llm.ts

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -66,14 +66,18 @@ export class Gemini extends BaseLlm {
6666
* @param params The parameters for creating a Gemini instance.
6767
*/
6868
constructor({
69-
model = 'gemini-2.5-flash',
69+
model,
7070
apiKey,
7171
vertexai,
7272
project,
7373
location,
74-
headers
75-
}: GeminiParams = {}) {
76-
super(model);
74+
headers,
75+
}: GeminiParams) {
76+
if (!model) {
77+
model = 'gemini-2.5-flash';
78+
}
79+
80+
super({model});
7781

7882
this.project = project;
7983
this.location = location;

core/src/models/registry.ts

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ import {Gemini} from './google_llm.js';
1313
* type[BaseLlm] equivalent in TypeScript, represents a class that can be new-ed
1414
* to create a BaseLlm instance.
1515
*/
16-
export type BaseLlmType = (new (model: string) => BaseLlm)&{
16+
export type BaseLlmType = (new (params: {model: string}) => BaseLlm)&{
1717
readonly supportedModels: Array<string|RegExp>;
1818
};
1919

@@ -68,7 +68,7 @@ export class LLMRegistry {
6868
* @returns The LLM instance.
6969
*/
7070
static newLlm(model: string): BaseLlm {
71-
return new (LLMRegistry.resolve(model))(model);
71+
return new (LLMRegistry.resolve(model))({model});
7272
}
7373

7474
private static _register(modelNameRegex: string|RegExp, llmCls: BaseLlmType) {
@@ -85,9 +85,10 @@ export class LLMRegistry {
8585
* Registers a new LLM class.
8686
* @param llmCls The class that implements the model.
8787
*/
88-
static register<T extends BaseLlm>(llmCls: (new(model: string) => T)&{
89-
readonly supportedModels: Array<string|RegExp>;
90-
}) {
88+
static register<T extends BaseLlm>(
89+
llmCls: (new(params: {model: string}) => T)&{
90+
readonly supportedModels: Array<string|RegExp>;
91+
}) {
9192
for (const regex of llmCls.supportedModels) {
9293
LLMRegistry._register(regex, llmCls);
9394
}

core/test/agents/llm_agent_test.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ class MockLlm extends BaseLlm {
3333
error: Error|null;
3434

3535
constructor(response: LlmResponse|null, error: Error|null = null) {
36-
super('mock-llm');
36+
super({model: 'mock-llm'});
3737
this.response = response;
3838
this.error = error;
3939
}

core/test/models/registry_test.ts

Lines changed: 70 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,70 @@
1+
import {BaseLlm, BaseLlmConnection, LlmAgent, LLMRegistry, LlmRequest, LlmResponse} from '@google/adk';
2+
import {Blob, Content, createModelContent, GenerateContentResponse} from '@google/genai';
3+
4+
class TestLlmConnection implements BaseLlmConnection {
5+
async sendHistory(history: Content[]): Promise<void> {
6+
return Promise.resolve();
7+
}
8+
9+
async sendContent(content: Content): Promise<void> {}
10+
11+
async sendRealtime(blob: Blob): Promise<void> {}
12+
13+
async * receive(): AsyncGenerator<LlmResponse, void, void> {}
14+
15+
async close(): Promise<void> {}
16+
}
17+
18+
class TestLlmModel extends BaseLlm {
19+
constructor({model}: {model: string}) {
20+
super({model});
21+
}
22+
23+
static override readonly supportedModels = ['test-llm-model'];
24+
25+
async *
26+
generateContentAsync(llmRequest: LlmRequest, stream?: boolean):
27+
AsyncGenerator<LlmResponse, void> {
28+
const generateContentResponse = new GenerateContentResponse();
29+
30+
generateContentResponse.candidates =
31+
[{content: createModelContent('test-llm-model-response')}];
32+
const candidate = generateContentResponse.candidates[0];
33+
34+
yield {
35+
content: candidate.content,
36+
groundingMetadata: candidate.groundingMetadata,
37+
usageMetadata: generateContentResponse.usageMetadata,
38+
finishReason: candidate.finishReason,
39+
};
40+
}
41+
42+
async connect(llmRequest: LlmRequest): Promise<BaseLlmConnection> {
43+
return new TestLlmConnection();
44+
}
45+
}
46+
47+
describe('LLMRegistry', () => {
48+
beforeAll(() => {
49+
LLMRegistry.register(TestLlmModel);
50+
});
51+
52+
it('resolves model to LLM class', () => {
53+
expect(LLMRegistry.newLlm('test-llm-model')).toBeInstanceOf(TestLlmModel);
54+
});
55+
56+
it('resolves the provided as a string model correctly in LlmAgent', () => {
57+
const agent = new LlmAgent({name: 'test_agent', model: 'test-llm-model'});
58+
59+
expect(agent.canonicalModel).toBeInstanceOf(TestLlmModel);
60+
});
61+
62+
it('resolves the provided as class model correctly in LlmAgent', () => {
63+
const agent = new LlmAgent({
64+
name: 'test_agent',
65+
model: new TestLlmModel({model: 'test-llm-model'})
66+
});
67+
68+
expect(agent.canonicalModel).toBeInstanceOf(TestLlmModel);
69+
})
70+
});

0 commit comments

Comments
 (0)