Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -174,10 +174,6 @@ OllamaApi.ChatRequest ollamaChatRequest(Prompt prompt, boolean stream) {
requestBuilder.withKeepAlive(mergedOptions.getKeepAlive());
}

if (mergedOptions.getTemplate() != null) {
requestBuilder.withTemplate(mergedOptions.getTemplate());
}

return requestBuilder.build();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -416,7 +416,6 @@ public Message build() {
* @param format The format to return the response in. Currently, the only accepted
* value is "json".
* @param keepAlive The duration to keep the model loaded in ollama while idle. https://pkg.go.dev/time#ParseDuration
* @param template The prompt template (overrides what is defined in the Modelfile).
* @param options Additional model parameters. You can use the {@link OllamaOptions} builder
* to create the options then {@link OllamaOptions#toMap()} to convert the options into a
* map.
Expand All @@ -428,7 +427,6 @@ public record ChatRequest(
@JsonProperty("stream") Boolean stream,
@JsonProperty("format") String format,
@JsonProperty("keep_alive") String keepAlive,
@JsonProperty("template") String template,
@JsonProperty("options") Map<String, Object> options) {

public static Builder builder(String model) {
Expand All @@ -442,7 +440,6 @@ public static class Builder {
private boolean stream = false;
private String format;
private String keepAlive;
private String template;
private Map<String, Object> options = Map.of();

public Builder(String model) {
Expand Down Expand Up @@ -470,11 +467,6 @@ public Builder withKeepAlive(String keepAlive) {
return this;
}

public Builder withTemplate(String template) {
this.template = template;
return this;
}

public Builder withOptions(Map<String, Object> options) {
Objects.requireNonNull(options, "The options can not be null.");

Expand All @@ -489,7 +481,7 @@ public Builder withOptions(OllamaOptions options) {
}

public ChatRequest build() {
return new ChatRequest(model, messages, stream, format, keepAlive, template, options);
return new ChatRequest(model, messages, stream, format, keepAlive, options);
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ public class OllamaOptions implements ChatOptions, EmbeddingOptions {

public static final String DEFAULT_MODEL = OllamaModel.MISTRAL.id();

private static final List<String> NON_SUPPORTED_FIELDS = List.of("model", "format", "keep_alive", "template");
private static final List<String> NON_SUPPORTED_FIELDS = List.of("model", "format", "keep_alive");

// @formatter:off
/**
Expand Down Expand Up @@ -255,13 +255,6 @@ public class OllamaOptions implements ChatOptions, EmbeddingOptions {
*/
@JsonProperty("keep_alive") private String keepAlive;

/**
* The prompt template to use (overrides what is defined in the Modelfile).
* Part of Chat completion <a href="https://github.com/ollama/ollama/blob/main/docs/api.md#parameters-1">advanced parameters</a>.
*/
@JsonProperty("template") private String template;


/**
* @param model The ollama model names to use. See the {@link OllamaModel} for the common models.
*/
Expand All @@ -288,11 +281,6 @@ public OllamaOptions withKeepAlive(String keepAlive) {
return this;
}

public OllamaOptions withTemplate(String template) {
this.template = template;
return this;
}

public OllamaOptions withUseNUMA(Boolean useNUMA) {
this.useNUMA = useNUMA;
return this;
Expand Down Expand Up @@ -469,14 +457,6 @@ public void setKeepAlive(String keepAlive) {
this.keepAlive = keepAlive;
}

public String getTemplate() {
return this.template;
}

public void setTemplate(String template) {
this.template = template;
}

public Boolean getUseNUMA() {
return this.useNUMA;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,6 @@ Here are the advanced request parameter for the Ollama chat client:
| spring.ai.ollama.chat.options.model | The name of the https://github.com/ollama/ollama?tab=readme-ov-file#model-library[supported models] to use. | mistral
| spring.ai.ollama.chat.options.format | The format to return a response in. Currently the only accepted value is `json` | -
| spring.ai.ollama.chat.options.keep_alive | Controls how long the model will stay loaded into memory following the request | 5m
| spring.ai.ollama.chat.options.template | The prompt template to use (overrides what is defined in the Modelfile) | -
|====

The `options` properties are based on the link:https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values[Ollama Valid Parameters and Values] and link:https://github.com/jmorganca/ollama/blob/main/api/types.go[Ollama Types]. The default values are based on: link:https://github.com/ollama/ollama/blob/b538dc3858014f94b099730a592751a5454cab0a/api/types.go#L364[Ollama type defaults].
Expand Down