|
18 | 18 |
|
19 | 19 | public class AzureOpenAIChatModelFactory implements ChatModelFactory { |
20 | 20 |
|
21 | | - private final ModelProvider MODEL_PROVIDER = ModelProvider.AzureOpenAI;; |
| 21 | + private final ModelProvider MODEL_PROVIDER = ModelProvider.AzureOpenAI; |
22 | 22 |
|
23 | 23 | @Override |
24 | 24 | public ChatLanguageModel createChatModel(@NotNull ChatModel chatModel) { |
25 | | - boolean isO1 = chatModel.getModelName().startsWith("o1-"); |
| 25 | + String modelName = chatModel.getModelName(); |
| 26 | + |
| 27 | + boolean isReasoningModel = isReasoningModelWithLimitedParameters(modelName); |
26 | 28 |
|
27 | 29 | final var builder = AzureOpenAiChatModel.builder() |
28 | 30 | .apiKey(getApiKey(MODEL_PROVIDER)) |
29 | 31 | .deploymentName(DevoxxGenieStateService.getInstance().getAzureOpenAIDeployment()) |
30 | 32 | .maxRetries(chatModel.getMaxRetries()) |
| 33 | + .temperature(isReasoningModel ? 1.0 : chatModel.getTemperature()) |
31 | 34 | .timeout(Duration.ofSeconds(chatModel.getTimeout())) |
32 | | - .topP(isO1 ? 1.0 : chatModel.getTopP()) |
| 35 | + .topP(isReasoningModel ? 1.0 : chatModel.getTopP()) |
33 | 36 | .endpoint(DevoxxGenieStateService.getInstance().getAzureOpenAIEndpoint()) |
34 | 37 | .listeners(getListener()); |
35 | 38 |
|
36 | 39 | return builder.build(); |
37 | 40 | } |
38 | 41 |
|
| 42 | + /** |
| 43 | + * Returns whether the model is a reasoning model with limited parameter support, in order to provide default |
| 44 | + * values instead of given configuration. |
| 45 | + * <p> |
| 46 | + * @see <a href="https://learn.microsoft.com/en-us/azure/ai-foundry/openai/how-to/reasoning?tabs=python-secure%2Cpy#not-supported">Azure OpenAI reasoning models - parameters not supported</a> |
| 47 | + * for details on parameter support for reasoning models. |
| 48 | + * |
| 49 | + * @param modelName name of the model to check |
| 50 | + * @return true if the model name indicates a reasoning model |
| 51 | + */ |
| 52 | + static boolean isReasoningModelWithLimitedParameters(String modelName) { |
| 53 | + boolean isO1 = modelName.startsWith("o1"); |
| 54 | + boolean isO3 = modelName.startsWith("o3"); |
| 55 | + boolean isO4 = modelName.startsWith("o4-mini"); |
| 56 | + boolean isCodex = modelName.equalsIgnoreCase("codex-mini"); |
| 57 | + |
| 58 | + return isO1 || isO3 || isO4 || isCodex; |
| 59 | + } |
| 60 | + |
39 | 61 | @Override |
40 | 62 | public StreamingChatLanguageModel createStreamingChatModel(@NotNull ChatModel chatModel) { |
41 | 63 | boolean isO1 = chatModel.getModelName().startsWith("o1-"); |
|
0 commit comments