11package koog
22
33import ai.koog.agents.core.agent.AIAgent
4- import ai.koog.agents.core.tools.Tool
54import ai.koog.agents.core.tools.ToolRegistry
65import ai.koog.agents.core.tools.reflect.asTools
76import ai.koog.agents.features.eventHandler.feature.handleEvents
87import ai.koog.agents.mcp.McpToolRegistryProvider
98import ai.koog.prompt.executor.clients.google.GoogleModels
109import ai.koog.prompt.executor.clients.openai.OpenAIModels
1110import ai.koog.prompt.executor.llms.all.simpleGoogleAIExecutor
11+ import ai.koog.prompt.executor.llms.all.simpleOllamaAIExecutor
1212import ai.koog.prompt.executor.llms.all.simpleOpenAIExecutor
13+ import ai.koog.prompt.llm.LLMCapability
14+ import ai.koog.prompt.llm.LLMProvider
15+ import ai.koog.prompt.llm.LLModel
1316import kotlinx.coroutines.runBlocking
14- import kotlin.uuid.ExperimentalUuidApi
15- import kotlin.uuid.Uuid
1617
1718val openAIApiKey = " "
1819val apiKeyGoogle = " "
1920
2021
21- @OptIn( ExperimentalUuidApi :: class )
22+
2223fun main () = runBlocking {
24+
25+ val model = LLModel (
26+ provider = LLMProvider .Ollama ,
27+ id = " gpt-oss" ,
28+ // id = "llama3.1:8b",
29+ capabilities = listOf (
30+ LLMCapability .Temperature ,
31+ LLMCapability .Schema .JSON .Simple ,
32+ LLMCapability .Tools
33+ ),
34+ )
35+
36+
2337 val agent = AIAgent (
2438 // executor = simpleOpenAIExecutor(openAIApiKey),
25- executor = simpleGoogleAIExecutor(apiKeyGoogle),
39+ // executor = simpleGoogleAIExecutor(apiKeyGoogle),
40+ executor = simpleOllamaAIExecutor(),
2641 // llmModel = OpenAIModels.Chat.GPT4o,
27- llmModel = GoogleModels .Gemini1_5Pro ,
42+ // llmModel = GoogleModels.Gemini1_5Pro,
43+ llmModel = model,
2844 toolRegistry = createToolSetRegistry()
2945 ) {
3046 handleEvents {
31- onToolCall { tool : Tool < * , * >, toolArgs : Tool . Args ->
32- println (" Tool called: tool ${tool.name} , args $toolArgs " )
47+ onToolCall { eventContext ->
48+ println (" Tool called: ${eventContext.tool} with args ${eventContext. toolArgs} " )
3349 }
34-
35- onAgentRunError { strategyName: String , sessionUuid: Uuid ? , throwable: Throwable ->
36- println (" An error occurred: ${throwable.message} \n ${throwable.stackTraceToString()} " )
50+ onAgentRunError { eventContext ->
51+ println (" An error occurred: ${eventContext.throwable.message} \n ${eventContext.throwable.stackTraceToString()} " )
3752 }
38-
39- onAgentFinished { strategyName, result ->
40- println (" Agent (strategy = $strategyName ) finished with result: $result " )
53+ onAgentFinished { eventContext ->
54+ println (" Agent finished with result: ${eventContext.result} " )
4155 }
4256 }
4357 }
4458
45- agent.run (
59+
60+ val output = agent.run (
4661 """
4762 Get emission data for France and Germany for 2023 and 2024.
48- Use units of millions for the emissions data.
63+ Also break down by sector.
64+ Use 3 letter country codes.
65+ Use units of millions for the emissions data.
4966 """ .trimIndent()
5067 )
68+
69+ println (output)
5170}
5271
5372
73+
5474suspend fun createToolSetRegistry (): ToolRegistry {
5575 val processClimateTrace = ProcessBuilder (" java" , " -jar" ,
5676 " ./mcp-server/build/libs/serverAll.jar" , " --stdio"
@@ -61,6 +81,6 @@ suspend fun createToolSetRegistry(): ToolRegistry {
6181 val localToolSetRegistry = ToolRegistry { tools(ClimateTraceTool ().asTools()) }
6282
6383 // Can use either local toolset or one based on MCP server
64- // return toolRegistryClimateTrace
65- return localToolSetRegistry
84+ return toolRegistryClimateTrace
85+ // return localToolSetRegistry
6686}
0 commit comments