@@ -107,29 +107,27 @@ Please find [an example in our Spring Boot application](../../sample-code/spring
107107First define a function that will be called by the LLM:
108108
109109``` java
110- public class MockWeatherService implements Function<Request , Response > {
111- public enum Unit {C , F }
112- public record Request (String location , Unit unit ) {}
113- public record Response (double temp , Unit unit ) {}
114-
115- public Response apply (Request request ) {
116- return new Response (30.0 , Unit . C );
110+ class WeatherMethod {
111+ enum Unit {C ,F }
112+ record Request (String location , Unit unit ) {}
113+ record Response (double temp , Unit unit ) {}
114+
115+ @Tool (description = " Get the weather in location" )
116+ Response getCurrentWeather (@ToolParam Request request ) {
117+ int temperature = request. location. hashCode() % 30 ;
118+ return new Response (temperature, request. unit);
117119 }
118120}
119121```
120122
121- Then add your function to the options:
123+ Then add your tool to the options:
122124
123125``` java
124126OrchestrationChatOptions options = new OrchestrationChatOptions (config);
125- options. setToolCallbacks(
126- List . of(
127- FunctionToolCallback . builder(
128- " CurrentWeather" , new MockWeatherService ()) // (1) function name and instance
129- .description(" Get the weather in location" ) // (2) function description
130- .inputType(MockWeatherService . Request . class) // (3) function input type
131- .build()));
127+ options. setToolCallbacks(List . of(ToolCallbacks . from(new WeatherMethod ())));
128+
132129options. setInternalToolExecutionEnabled(false );// tool execution is not yet available in orchestration
130+
133131Prompt prompt = new Prompt (" What is the weather in Potsdam and in Toulouse?" , options);
134132
135133ChatResponse response = client. call(prompt);
0 commit comments