Skip to content

Commit 31b402f

Browse files
committed
refactor: adhere to the code convention
1 parent 14abb96 commit 31b402f

File tree

6 files changed

+179
-205
lines changed

6 files changed

+179
-205
lines changed

models/spring-ai-solar/src/main/java/org/springframework/ai/solar/aot/SolarRuntimeHints.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,8 @@
2626
import org.springframework.lang.Nullable;
2727

2828
/**
29-
* The SolarRuntimeHints class is responsible for registering runtime hints for Solar API classes.
29+
* The SolarRuntimeHints class is responsible for registering runtime hints for Solar API
30+
* classes.
3031
*
3132
* @author Seunghyeon Ji
3233
*/

models/spring-ai-solar/src/main/java/org/springframework/ai/solar/api/SolarApi.java

Lines changed: 86 additions & 114 deletions
Original file line numberDiff line numberDiff line change
@@ -86,9 +86,9 @@ public SolarApi(String baseUrl, String apiKey, RestClient.Builder restClientBuil
8686
h.setContentType(MediaType.APPLICATION_JSON);
8787
};
8888
this.restClient = restClientBuilder.baseUrl(baseUrl)
89-
.defaultHeaders(finalHeaders)
90-
.defaultStatusHandler(responseErrorHandler)
91-
.build();
89+
.defaultHeaders(finalHeaders)
90+
.defaultStatusHandler(responseErrorHandler)
91+
.build();
9292
this.webClient = webClientBuilder.baseUrl(baseUrl).defaultHeaders(finalHeaders).build();
9393
}
9494

@@ -103,10 +103,10 @@ public ResponseEntity<ChatCompletion> chatCompletionEntity(ChatCompletionRequest
103103
Assert.isTrue(!chatRequest.stream(), "Request must set the stream property to false.");
104104

105105
return this.restClient.post()
106-
.uri("/v1/solar/chat/completions")
107-
.body(chatRequest)
108-
.retrieve()
109-
.toEntity(ChatCompletion.class);
106+
.uri("/v1/solar/chat/completions")
107+
.body(chatRequest)
108+
.retrieve()
109+
.toEntity(ChatCompletion.class);
110110
}
111111

112112
/**
@@ -120,13 +120,13 @@ public Flux<ChatCompletionChunk> chatCompletionStream(ChatCompletionRequest chat
120120
Assert.isTrue(chatRequest.stream(), "Request must set the stream property to true.");
121121

122122
return this.webClient.post()
123-
.uri("/v1/solar/chat/completions", chatRequest.model)
124-
.body(Mono.just(chatRequest), ChatCompletionRequest.class)
125-
.retrieve()
126-
.bodyToFlux(String.class)
127-
.takeUntil(SSE_DONE_PREDICATE)
128-
.filter(SSE_DONE_PREDICATE.negate())
129-
.map(content -> ModelOptionsUtils.jsonToObject(content, ChatCompletionChunk.class));
123+
.uri("/v1/solar/chat/completions", chatRequest.model)
124+
.body(Mono.just(chatRequest), ChatCompletionRequest.class)
125+
.retrieve()
126+
.bodyToFlux(String.class)
127+
.takeUntil(SSE_DONE_PREDICATE)
128+
.filter(SSE_DONE_PREDICATE.negate())
129+
.map(content -> ModelOptionsUtils.jsonToObject(content, ChatCompletionChunk.class));
130130
}
131131

132132
/**
@@ -173,42 +173,42 @@ public String getValue() {
173173
* Creates a model response for the given chat conversation.
174174
*
175175
* @param messages A list of messages comprising the conversation so far.
176-
* @param model The model name to generate the completion.
177-
* Value in: "solar-pro" | "solar-mini" | "solar-mini-ja"
178-
* @param maxTokens An optional parameter that limits the maximum number of tokens to generate.
179-
* If max_tokens is set, sum of input tokens and max_tokens should be
180-
* lower than or equal to context length of model. Default value is inf.
181-
* @param stream An optional parameter that specifies whether a response should be sent as a stream. If set true,
182-
* partial message deltas will be sent. Tokens will be sent as data-only server-sent events. Default value is false.
183-
* @param temperature An optional parameter to set the sampling temperature.
184-
* The value should lie between 0 and 2. Higher values like 0.8 result in a more random output,
185-
* whereas lower values such as 0.2 enhance focus and determinism in the output. Default value is 0.7.
186-
* not both.
187-
* @param topP An optional parameter to trigger nucleus sampling.
188-
* The tokens with top_p probability mass will be considered, which means, setting this value to 0.1 will consider
189-
* tokens comprising the top 10% probability.
176+
* @param model The model name to generate the completion. Value in: "solar-pro" |
177+
* "solar-mini" | "solar-mini-ja"
178+
* @param maxTokens An optional parameter that limits the maximum number of tokens to
179+
* generate. If max_tokens is set, sum of input tokens and max_tokens should be lower
180+
* than or equal to context length of model. Default value is inf.
181+
* @param stream An optional parameter that specifies whether a response should be
182+
* sent as a stream. If set true, partial message deltas will be sent. Tokens will be
183+
* sent as data-only server-sent events. Default value is false.
184+
* @param temperature An optional parameter to set the sampling temperature. The value
185+
* should lie between 0 and 2. Higher values like 0.8 result in a more random output,
186+
* whereas lower values such as 0.2 enhance focus and determinism in the output.
187+
* Default value is 0.7. not both.
188+
* @param topP An optional parameter to trigger nucleus sampling. The tokens with
189+
* top_p probability mass will be considered, which means, setting this value to 0.1
190+
* will consider tokens comprising the top 10% probability.
190191
* @param responseFormat An object specifying the format that the model must generate.
191-
* To generate JSON object without providing schema (JSON Mode), set response_format: {\"type\": \"json_object\"}.
192-
* To generate JSON object with your own schema (Structured Outputs),
193-
* set response_format: {“type”: “json_schema”, “json_schema”: { … your json schema … }}.
192+
* To generate JSON object without providing schema (JSON Mode), set response_format:
193+
* {\"type\": \"json_object\"}. To generate JSON object with your own schema
194+
* (Structured Outputs), set response_format: {“type”: “json_schema”, “json_schema”: {
195+
* … your json schema … }}.
194196
*/
195197
@JsonInclude(JsonInclude.Include.NON_NULL)
196198
public record ChatCompletionRequest(@JsonProperty("messages") List<ChatCompletionMessage> messages,
197-
@JsonProperty("model") String model,
198-
@JsonProperty("max_tokens") Integer maxTokens,
199-
@JsonProperty("stream") Boolean stream,
200-
@JsonProperty("temperature") Double temperature,
201-
@JsonProperty("top_p") Double topP,
202-
@JsonProperty("response_format") ResponseFormat responseFormat
203-
) {
199+
@JsonProperty("model") String model, @JsonProperty("max_tokens") Integer maxTokens,
200+
@JsonProperty("stream") Boolean stream, @JsonProperty("temperature") Double temperature,
201+
@JsonProperty("top_p") Double topP, @JsonProperty("response_format") ResponseFormat responseFormat) {
202+
204203
/**
205204
* Shortcut constructor for a chat completion request with the given messages and
206205
* model.
207206
* @param messages A list of messages comprising the conversation so far.
208207
* @param model ID of the model to use.
209-
* @param temperature An optional parameter to set the sampling temperature.
210-
* The value should lie between 0 and 2. Higher values like 0.8 result in a more random output,
211-
* whereas lower values such as 0.2 enhance focus and determinism in the output. Default value is 0.7.
208+
* @param temperature An optional parameter to set the sampling temperature. The
209+
* value should lie between 0 and 2. Higher values like 0.8 result in a more
210+
* random output, whereas lower values such as 0.2 enhance focus and determinism
211+
* in the output. Default value is 0.7.
212212
*/
213213
public ChatCompletionRequest(List<ChatCompletionMessage> messages, String model, Double temperature) {
214214
this(messages, model, null, null, temperature, null, null);
@@ -232,14 +232,16 @@ public ChatCompletionRequest(List<ChatCompletionMessage> messages, String model,
232232
* model and control for streaming.
233233
* @param messages A list of messages comprising the conversation so far.
234234
* @param model ID of the model to use.
235-
* @param temperature An optional parameter to set the sampling temperature.
236-
* The value should lie between 0 and 2. Higher values like 0.8 result in a more random output,
237-
* whereas lower values such as 0.2 enhance focus and determinism in the output. Default value is 0.7.
235+
* @param temperature An optional parameter to set the sampling temperature. The
236+
* value should lie between 0 and 2. Higher values like 0.8 result in a more
237+
* random output, whereas lower values such as 0.2 enhance focus and determinism
238+
* in the output. Default value is 0.7.
238239
* @param stream If set, partial message deltas will be sent.Tokens will be sent
239240
* as data-only server-sent events as they become available, with the stream
240241
* terminated by a data: [DONE] message.
241242
*/
242-
public ChatCompletionRequest(List<ChatCompletionMessage> messages, String model, Double temperature, boolean stream) {
243+
public ChatCompletionRequest(List<ChatCompletionMessage> messages, String model, Double temperature,
244+
boolean stream) {
243245
this(messages, model, null, stream, temperature, null, null);
244246
}
245247

@@ -250,7 +252,8 @@ public ChatCompletionRequest(List<ChatCompletionMessage> messages, String model,
250252
* @param jsonSchema The JSON schema to be used for structured output.
251253
*/
252254
@JsonInclude(JsonInclude.Include.NON_NULL)
253-
public record ResponseFormat(@JsonProperty("type") String type, @JsonProperty("json_schema") String jsonSchema) {
255+
public record ResponseFormat(@JsonProperty("type") String type,
256+
@JsonProperty("json_schema") String jsonSchema) {
254257
}
255258
}
256259

@@ -308,41 +311,36 @@ public enum Role {
308311
}
309312

310313
/**
311-
* Represents a chat completion response returned by model, based on the provided input.
314+
* Represents a chat completion response returned by model, based on the provided
315+
* input.
312316
*
313317
* @param id A unique identifier for the chat completion. Each chunk has the same ID.
314318
* @param object The object type, which is always 'chat.completion'.
315-
* @param created The Unix timestamp (in seconds) of when the chat completion was created.
316-
* Each chunk has the same timestamp.
319+
* @param created The Unix timestamp (in seconds) of when the chat completion was
320+
* created. Each chunk has the same timestamp.
317321
* @param model A string representing the version of the model being used.
318322
* @param systemFingerprint This field is not yet available.
319323
* @param choices A list of chat completion choices.
320324
* @param usage Usage statistics for the completion request.
321325
*/
322326
@JsonInclude(JsonInclude.Include.NON_NULL)
323-
public record ChatCompletion(@JsonProperty("id") String id,
324-
@JsonProperty("object") String object,
325-
@JsonProperty("created") Long created,
326-
@JsonProperty("model") String model,
327-
@JsonProperty("system_fingerprint") Object systemFingerprint,
328-
@JsonProperty("choices") List<Choice> choices,
329-
@JsonProperty("usage") Usage usage) {
327+
public record ChatCompletion(@JsonProperty("id") String id, @JsonProperty("object") String object,
328+
@JsonProperty("created") Long created, @JsonProperty("model") String model,
329+
@JsonProperty("system_fingerprint") Object systemFingerprint, @JsonProperty("choices") List<Choice> choices,
330+
@JsonProperty("usage") Usage usage) {
330331
/**
331332
* Choice statistics for the completion request.
332333
*
333-
* @param finishReason A unique identifier for the chat completion. Each chunk has the same ID.
334+
* @param finishReason A unique identifier for the chat completion. Each chunk has
335+
* the same ID.
334336
* @param index The index of the choice in the list of choices.
335337
* @param message A chat completion message generated by the model.
336338
* @param logprobs This field is not yet available.
337339
* @param usage Usage statistics for the completion request.
338340
*/
339-
public record Choice(
340-
@JsonProperty("finish_reason") String finishReason,
341-
@JsonProperty("index") int index,
342-
@JsonProperty("message") Message message,
343-
@JsonProperty("logprobs") Object logprobs,
344-
@JsonProperty("usage") Usage usage
345-
) {
341+
public record Choice(@JsonProperty("finish_reason") String finishReason, @JsonProperty("index") int index,
342+
@JsonProperty("message") Message message, @JsonProperty("logprobs") Object logprobs,
343+
@JsonProperty("usage") Usage usage) {
346344
}
347345

348346
/**
@@ -352,11 +350,8 @@ public record Choice(
352350
* @param role The role of the author of this message.
353351
* @param toolCalls A list of tools selected by model to call.
354352
*/
355-
public record Message(
356-
@JsonProperty("content") String content,
357-
@JsonProperty("role") String role,
358-
@JsonProperty("tool_calls") ToolCalls toolCalls
359-
) {
353+
public record Message(@JsonProperty("content") String content, @JsonProperty("role") String role,
354+
@JsonProperty("tool_calls") ToolCalls toolCalls) {
360355
}
361356

362357
/**
@@ -366,11 +361,8 @@ public record Message(
366361
* @param type The type of tool.
367362
* @param function A function object to call.
368363
*/
369-
public record ToolCalls(
370-
@JsonProperty("id") String id,
371-
@JsonProperty("type") String type,
372-
@JsonProperty("function") Function function
373-
) {
364+
public record ToolCalls(@JsonProperty("id") String id, @JsonProperty("type") String type,
365+
@JsonProperty("function") Function function) {
374366
}
375367

376368
/**
@@ -379,25 +371,21 @@ public record ToolCalls(
379371
* @param name The name of function to call.
380372
* @param arguments A JSON input to function.
381373
*/
382-
public record Function(
383-
@JsonProperty("name") String name,
384-
@JsonProperty("arguments") String arguments
385-
) {
374+
public record Function(@JsonProperty("name") String name, @JsonProperty("arguments") String arguments) {
386375
}
387376

388377
/**
389378
* Usage statistics for the completion request.
390379
*
391380
* @param completionTokens Number of tokens in the generated completion.
392381
* @param promptTokens Number of tokens in the prompt.
393-
* @param totalTokens Total number of tokens used in the request (prompt + completion).
382+
* @param totalTokens Total number of tokens used in the request (prompt +
383+
* completion).
394384
*/
395385
@JsonInclude(JsonInclude.Include.NON_NULL)
396-
public record Usage(
397-
@JsonProperty("completion_tokens") Integer completionTokens,
386+
public record Usage(@JsonProperty("completion_tokens") Integer completionTokens,
398387
@JsonProperty("prompt_tokens") Integer promptTokens,
399-
@JsonProperty("total_tokens") Integer totalTokens
400-
) {
388+
@JsonProperty("total_tokens") Integer totalTokens) {
401389
}
402390
}
403391

@@ -407,36 +395,29 @@ public record Usage(
407395
*
408396
* @param id A unique identifier for the chat completion. Each chunk has the same ID.
409397
* @param object The object type, which is always 'chat.completion.chunk'.
410-
* @param created The Unix timestamp (in seconds) of when the chat completion was created.
411-
* Each chunk has the same timestamp.
398+
* @param created The Unix timestamp (in seconds) of when the chat completion was
399+
* created. Each chunk has the same timestamp.
412400
* @param model A string representing the version of the model being used.
413401
* @param systemFingerprint This field is not yet available.
414402
* @param choices A list of chat completion choices.
415403
*/
416404
@JsonInclude(JsonInclude.Include.NON_NULL)
417-
public record ChatCompletionChunk(
418-
@JsonProperty("id") String id,
419-
@JsonProperty("object") String object,
420-
@JsonProperty("created") Long created,
421-
@JsonProperty("model") String model,
422-
@JsonProperty("system_fingerprint") Object systemFingerprint,
423-
@JsonProperty("choices") List<Choice> choices
424-
) {
405+
public record ChatCompletionChunk(@JsonProperty("id") String id, @JsonProperty("object") String object,
406+
@JsonProperty("created") Long created, @JsonProperty("model") String model,
407+
@JsonProperty("system_fingerprint") Object systemFingerprint,
408+
@JsonProperty("choices") List<Choice> choices) {
425409
/**
426410
* A list of chat completion choices.
427411
*
428-
* @param finishReason The reason the model stopped generating tokens.
429-
* This will be stop if the model hit a natural stop point or a provided stop sequence,
412+
* @param finishReason The reason the model stopped generating tokens. This will
413+
* be stop if the model hit a natural stop point or a provided stop sequence,
430414
* length if the maximum number of tokens specified in the request was reached.
431415
* @param index The index of the choice in the list of choices.
432416
* @param delta A chat completion message generated by the model.
433417
* @param logprobs This field is not yet available.
434418
*/
435-
public record Choice(
436-
@JsonProperty("finish_reason") String finishReason,
437-
@JsonProperty("index") int index,
438-
@JsonProperty("delta") Delta delta,
439-
@JsonProperty("logprobs") Object logprobs) {
419+
public record Choice(@JsonProperty("finish_reason") String finishReason, @JsonProperty("index") int index,
420+
@JsonProperty("delta") Delta delta, @JsonProperty("logprobs") Object logprobs) {
440421
}
441422

442423
/**
@@ -446,11 +427,8 @@ public record Choice(
446427
* @param role The role of the author of this message.
447428
* @param toolCalls A list of tools selected by model to call.
448429
*/
449-
public record Delta(
450-
@JsonProperty("content") String content,
451-
@JsonProperty("role") String role,
452-
@JsonProperty("tool_calls") ToolCalls toolCalls
453-
) {
430+
public record Delta(@JsonProperty("content") String content, @JsonProperty("role") String role,
431+
@JsonProperty("tool_calls") ToolCalls toolCalls) {
454432
}
455433

456434
/**
@@ -460,11 +438,8 @@ public record Delta(
460438
* @param type The type of tool.
461439
* @param function A function object to call.
462440
*/
463-
public record ToolCalls(
464-
@JsonProperty("id") String id,
465-
@JsonProperty("type") String type,
466-
@JsonProperty("function") Function function
467-
) {
441+
public record ToolCalls(@JsonProperty("id") String id, @JsonProperty("type") String type,
442+
@JsonProperty("function") Function function) {
468443
}
469444

470445
/**
@@ -473,10 +448,7 @@ public record ToolCalls(
473448
* @param name The name of function to call.
474449
* @param arguments A JSON input to function.
475450
*/
476-
public record Function(
477-
@JsonProperty("name") String name,
478-
@JsonProperty("arguments") String arguments
479-
) {
451+
public record Function(@JsonProperty("name") String name, @JsonProperty("arguments") String arguments) {
480452
}
481453

482454
}

0 commit comments

Comments
 (0)