Skip to content

Commit 049ff57

Browse files
committed
Add documentation how to use the OpenAI client to access DeepSeek
Signed-off-by: Alexandros Pappas <[email protected]>
1 parent 2f14597 commit 049ff57

File tree

5 files changed

+560
-0
lines changed

5 files changed

+560
-0
lines changed
Lines changed: 343 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,343 @@
1+
/*
2+
* Copyright 2024-2025 the original author or authors.
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* https://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
package org.springframework.ai.openai.chat.proxy;
18+
19+
import java.util.ArrayList;
20+
import java.util.Arrays;
21+
import java.util.List;
22+
import java.util.Map;
23+
import java.util.stream.Collectors;
24+
25+
import org.junit.jupiter.api.Disabled;
26+
import org.junit.jupiter.api.Test;
27+
import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable;
28+
import org.junit.jupiter.params.ParameterizedTest;
29+
import org.junit.jupiter.params.provider.ValueSource;
30+
import org.slf4j.Logger;
31+
import org.slf4j.LoggerFactory;
32+
import reactor.core.publisher.Flux;
33+
34+
import org.springframework.ai.chat.client.ChatClient;
35+
import org.springframework.ai.chat.messages.AssistantMessage;
36+
import org.springframework.ai.chat.messages.Message;
37+
import org.springframework.ai.chat.messages.UserMessage;
38+
import org.springframework.ai.chat.model.ChatResponse;
39+
import org.springframework.ai.chat.model.Generation;
40+
import org.springframework.ai.chat.prompt.Prompt;
41+
import org.springframework.ai.chat.prompt.PromptTemplate;
42+
import org.springframework.ai.chat.prompt.SystemPromptTemplate;
43+
import org.springframework.ai.converter.BeanOutputConverter;
44+
import org.springframework.ai.converter.ListOutputConverter;
45+
import org.springframework.ai.converter.MapOutputConverter;
46+
import org.springframework.ai.model.function.FunctionCallback;
47+
import org.springframework.ai.openai.OpenAiChatModel;
48+
import org.springframework.ai.openai.OpenAiChatOptions;
49+
import org.springframework.ai.openai.api.OpenAiApi;
50+
import org.springframework.ai.openai.api.tool.MockWeatherService;
51+
import org.springframework.ai.openai.chat.ActorsFilms;
52+
import org.springframework.beans.factory.annotation.Autowired;
53+
import org.springframework.beans.factory.annotation.Value;
54+
import org.springframework.boot.SpringBootConfiguration;
55+
import org.springframework.boot.test.context.SpringBootTest;
56+
import org.springframework.context.annotation.Bean;
57+
import org.springframework.core.convert.support.DefaultConversionService;
58+
import org.springframework.core.io.Resource;
59+
60+
import static org.assertj.core.api.Assertions.assertThat;
61+
62+
/**
63+
* @author Alexandros Pappas
64+
*
65+
* The DeepSeek API uses an API format compatible with OpenAI, allowing developers to
66+
* easily integrate it into existing systems that use the OpenAI SDK5.
67+
*
68+
* For more information on DeepSeek behavior, refer to its API documentation:
69+
* <a href="https://api-docs.deepseek.com/">DeepSeek API</a>
70+
*/
71+
@SpringBootTest(classes = DeepSeekWithOpenAiChatModelIT.Config.class)
72+
@EnabledIfEnvironmentVariable(named = "DEEPSEEK_API_KEY", matches = ".+")
73+
@Disabled("Requires DeepSeek credits")
74+
class DeepSeekWithOpenAiChatModelIT {
75+
76+
private static final Logger logger = LoggerFactory.getLogger(DeepSeekWithOpenAiChatModelIT.class);
77+
78+
private static final String DEEPSEEK_BASE_URL = "https://api.deepseek.com";
79+
80+
private static final String DEFAULT_DEEPSEEK_MODEL = "deepseek-chat";
81+
82+
@Value("classpath:/prompts/system-message.st")
83+
private Resource systemResource;
84+
85+
@Autowired
86+
private OpenAiChatModel chatModel;
87+
88+
@Test
89+
void roleTest() {
90+
UserMessage userMessage = new UserMessage(
91+
"Tell me about 3 famous pirates from the Golden Age of Piracy and what they did.");
92+
SystemPromptTemplate systemPromptTemplate = new SystemPromptTemplate(this.systemResource);
93+
Message systemMessage = systemPromptTemplate.createMessage(Map.of("name", "Bob", "voice", "pirate"));
94+
Prompt prompt = new Prompt(List.of(userMessage, systemMessage));
95+
ChatResponse response = this.chatModel.call(prompt);
96+
assertThat(response.getResults()).hasSize(1);
97+
assertThat(response.getResults().get(0).getOutput().getText()).contains("Blackbeard");
98+
}
99+
100+
@Test
101+
void streamRoleTest() {
102+
UserMessage userMessage = new UserMessage(
103+
"Tell me about 3 famous pirates from the Golden Age of Piracy and what they did.");
104+
SystemPromptTemplate systemPromptTemplate = new SystemPromptTemplate(this.systemResource);
105+
Message systemMessage = systemPromptTemplate.createMessage(Map.of("name", "Bob", "voice", "pirate"));
106+
Prompt prompt = new Prompt(List.of(userMessage, systemMessage));
107+
Flux<ChatResponse> flux = this.chatModel.stream(prompt);
108+
109+
List<ChatResponse> responses = flux.collectList().block();
110+
assertThat(responses.size()).isGreaterThan(1);
111+
112+
String stitchedResponseContent = responses.stream()
113+
.map(ChatResponse::getResults)
114+
.flatMap(List::stream)
115+
.map(Generation::getOutput)
116+
.map(AssistantMessage::getText)
117+
.collect(Collectors.joining());
118+
119+
assertThat(stitchedResponseContent).contains("Blackbeard");
120+
}
121+
122+
@Test
123+
void streamingWithTokenUsage() {
124+
var promptOptions = OpenAiChatOptions.builder().streamUsage(true).seed(1).build();
125+
126+
var prompt = new Prompt("List two colors of the Polish flag. Be brief.", promptOptions);
127+
128+
var streamingTokenUsage = this.chatModel.stream(prompt).blockLast().getMetadata().getUsage();
129+
var referenceTokenUsage = this.chatModel.call(prompt).getMetadata().getUsage();
130+
131+
assertThat(streamingTokenUsage.getPromptTokens()).isGreaterThan(0);
132+
assertThat(streamingTokenUsage.getGenerationTokens()).isGreaterThan(0);
133+
assertThat(streamingTokenUsage.getTotalTokens()).isGreaterThan(0);
134+
135+
assertThat(streamingTokenUsage.getPromptTokens()).isEqualTo(referenceTokenUsage.getPromptTokens());
136+
assertThat(streamingTokenUsage.getGenerationTokens()).isEqualTo(referenceTokenUsage.getGenerationTokens());
137+
assertThat(streamingTokenUsage.getTotalTokens()).isEqualTo(referenceTokenUsage.getTotalTokens());
138+
139+
}
140+
141+
@Test
142+
void listOutputConverter() {
143+
DefaultConversionService conversionService = new DefaultConversionService();
144+
ListOutputConverter outputConverter = new ListOutputConverter(conversionService);
145+
146+
String format = outputConverter.getFormat();
147+
String template = """
148+
List five {subject}
149+
{format}
150+
""";
151+
PromptTemplate promptTemplate = new PromptTemplate(template,
152+
Map.of("subject", "ice cream flavors", "format", format));
153+
Prompt prompt = new Prompt(promptTemplate.createMessage());
154+
Generation generation = this.chatModel.call(prompt).getResult();
155+
156+
List<String> list = outputConverter.convert(generation.getOutput().getText());
157+
assertThat(list).hasSize(5);
158+
159+
}
160+
161+
@Test
162+
void mapOutputConverter() {
163+
MapOutputConverter outputConverter = new MapOutputConverter();
164+
165+
String format = outputConverter.getFormat();
166+
String template = """
167+
Provide me a List of {subject}
168+
{format}
169+
""";
170+
PromptTemplate promptTemplate = new PromptTemplate(template,
171+
Map.of("subject", "numbers from 1 to 9 under they key name 'numbers'", "format", format));
172+
Prompt prompt = new Prompt(promptTemplate.createMessage());
173+
Generation generation = this.chatModel.call(prompt).getResult();
174+
175+
Map<String, Object> result = outputConverter.convert(generation.getOutput().getText());
176+
assertThat(result.get("numbers")).isEqualTo(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9));
177+
178+
}
179+
180+
@Test
181+
void beanOutputConverter() {
182+
183+
BeanOutputConverter<ActorsFilms> outputConverter = new BeanOutputConverter<>(ActorsFilms.class);
184+
185+
String format = outputConverter.getFormat();
186+
String template = """
187+
Generate the filmography for a random actor.
188+
{format}
189+
""";
190+
PromptTemplate promptTemplate = new PromptTemplate(template, Map.of("format", format));
191+
Prompt prompt = new Prompt(promptTemplate.createMessage());
192+
Generation generation = this.chatModel.call(prompt).getResult();
193+
194+
ActorsFilms actorsFilms = outputConverter.convert(generation.getOutput().getText());
195+
assertThat(actorsFilms.getActor()).isNotEmpty();
196+
}
197+
198+
@Test
199+
void beanOutputConverterRecords() {
200+
201+
BeanOutputConverter<DeepSeekWithOpenAiChatModelIT.ActorsFilmsRecord> outputConverter = new BeanOutputConverter<>(
202+
DeepSeekWithOpenAiChatModelIT.ActorsFilmsRecord.class);
203+
204+
String format = outputConverter.getFormat();
205+
String template = """
206+
Generate the filmography of 5 movies for Tom Hanks.
207+
{format}
208+
""";
209+
PromptTemplate promptTemplate = new PromptTemplate(template, Map.of("format", format));
210+
Prompt prompt = new Prompt(promptTemplate.createMessage());
211+
Generation generation = this.chatModel.call(prompt).getResult();
212+
213+
DeepSeekWithOpenAiChatModelIT.ActorsFilmsRecord actorsFilms = outputConverter
214+
.convert(generation.getOutput().getText());
215+
logger.info("" + actorsFilms);
216+
assertThat(actorsFilms.actor()).isEqualTo("Tom Hanks");
217+
assertThat(actorsFilms.movies()).hasSize(5);
218+
}
219+
220+
@Test
221+
void beanStreamOutputConverterRecords() {
222+
223+
BeanOutputConverter<DeepSeekWithOpenAiChatModelIT.ActorsFilmsRecord> outputConverter = new BeanOutputConverter<>(
224+
DeepSeekWithOpenAiChatModelIT.ActorsFilmsRecord.class);
225+
226+
String format = outputConverter.getFormat();
227+
String template = """
228+
Generate the filmography of 5 movies for Tom Hanks.
229+
{format}
230+
""";
231+
PromptTemplate promptTemplate = new PromptTemplate(template, Map.of("format", format));
232+
Prompt prompt = new Prompt(promptTemplate.createMessage());
233+
234+
String generationTextFromStream = this.chatModel.stream(prompt)
235+
.collectList()
236+
.block()
237+
.stream()
238+
.map(ChatResponse::getResults)
239+
.flatMap(List::stream)
240+
.map(Generation::getOutput)
241+
.map(AssistantMessage::getText)
242+
.collect(Collectors.joining());
243+
244+
DeepSeekWithOpenAiChatModelIT.ActorsFilmsRecord actorsFilms = outputConverter.convert(generationTextFromStream);
245+
logger.info("" + actorsFilms);
246+
assertThat(actorsFilms.actor()).isEqualTo("Tom Hanks");
247+
assertThat(actorsFilms.movies()).hasSize(5);
248+
}
249+
250+
@Test
251+
@Disabled("The current version of the deepseek-chat model's Function Calling capability is unstable, which may result in looped calls or empty responses.")
252+
void functionCallTest() {
253+
254+
UserMessage userMessage = new UserMessage("What's the weather like in San Francisco, Tokyo, and Paris?");
255+
256+
List<Message> messages = new ArrayList<>(List.of(userMessage));
257+
258+
var promptOptions = OpenAiChatOptions.builder()
259+
.functionCallbacks(List.of(FunctionCallback.builder()
260+
.function("getCurrentWeather", new MockWeatherService())
261+
.description("Get the weather in location")
262+
.inputType(MockWeatherService.Request.class)
263+
.build()))
264+
.build();
265+
266+
ChatResponse response = this.chatModel.call(new Prompt(messages, promptOptions));
267+
268+
logger.info("Response: {}", response);
269+
270+
assertThat(response.getResult().getOutput().getText()).contains("30", "10", "15");
271+
}
272+
273+
@Test
274+
@Disabled("The current version of the deepseek-chat model's Function Calling capability is unstable, which may result in looped calls or empty responses.")
275+
void streamFunctionCallTest() {
276+
277+
UserMessage userMessage = new UserMessage(
278+
"What's the weather like in San Francisco, Tokyo, and Paris? Return the temperature in Celsius.");
279+
280+
List<Message> messages = new ArrayList<>(List.of(userMessage));
281+
282+
var promptOptions = OpenAiChatOptions.builder()
283+
.functionCallbacks(List.of(FunctionCallback.builder()
284+
.function("getCurrentWeather", new MockWeatherService())
285+
.description("Get the weather in location")
286+
.inputType(MockWeatherService.Request.class)
287+
.build()))
288+
.build();
289+
290+
Flux<ChatResponse> response = this.chatModel.stream(new Prompt(messages, promptOptions));
291+
292+
String content = response.collectList()
293+
.block()
294+
.stream()
295+
.map(ChatResponse::getResults)
296+
.flatMap(List::stream)
297+
.map(Generation::getOutput)
298+
.map(AssistantMessage::getText)
299+
.collect(Collectors.joining());
300+
logger.info("Response: {}", content);
301+
302+
assertThat(content).contains("30", "10", "15");
303+
}
304+
305+
@ParameterizedTest(name = "{0} : {displayName} ")
306+
@ValueSource(strings = { "deepseek-chat", "deepseek-reasoner" })
307+
void validateCallResponseMetadata(String model) {
308+
// @formatter:off
309+
ChatResponse response = ChatClient.create(this.chatModel).prompt()
310+
.options(OpenAiChatOptions.builder().model(model).build())
311+
.user("Tell me about 3 famous pirates from the Golden Age of Piracy and what they did")
312+
.call()
313+
.chatResponse();
314+
// @formatter:on
315+
316+
logger.info(response.toString());
317+
assertThat(response.getMetadata().getId()).isNotEmpty();
318+
assertThat(response.getMetadata().getModel()).containsIgnoringCase(model);
319+
assertThat(response.getMetadata().getUsage().getPromptTokens()).isPositive();
320+
assertThat(response.getMetadata().getUsage().getGenerationTokens()).isPositive();
321+
assertThat(response.getMetadata().getUsage().getTotalTokens()).isPositive();
322+
}
323+
324+
record ActorsFilmsRecord(String actor, List<String> movies) {
325+
326+
}
327+
328+
@SpringBootConfiguration
329+
static class Config {
330+
331+
@Bean
332+
public OpenAiApi chatCompletionApi() {
333+
return new OpenAiApi(DEEPSEEK_BASE_URL, System.getenv("DEEPSEEK_API_KEY"));
334+
}
335+
336+
@Bean
337+
public OpenAiChatModel openAiClient(OpenAiApi openAiApi) {
338+
return new OpenAiChatModel(openAiApi, OpenAiChatOptions.builder().model(DEFAULT_DEEPSEEK_MODEL).build());
339+
}
340+
341+
}
342+
343+
}
125 KB
Loading

spring-ai-docs/src/main/antora/modules/ROOT/nav.adoc

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
**** xref:api/chat/functions/anthropic-chat-functions.adoc[Anthropic Function Calling]
1919
*** xref:api/chat/azure-openai-chat.adoc[Azure OpenAI]
2020
**** xref:api/chat/functions/azure-open-ai-chat-functions.adoc[Azure OpenAI Function Calling]
21+
*** xref:api/chat/deepseek-chat.adoc[DeepSeek AI]
2122
*** xref:api/chat/google-vertexai.adoc[Google VertexAI]
2223
**** xref:api/chat/vertexai-gemini-chat.adoc[VertexAI Gemini]
2324
***** xref:api/chat/functions/vertexai-gemini-chat-functions.adoc[Gemini Function Calling]

spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/comparison.adoc

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ This table compares various Chat Models supported by Spring AI, detailing their
2121

2222
| xref::api/chat/anthropic-chat.adoc[Anthropic Claude] | text, pdf, image ^a| image::yes.svg[width=16] ^a| image::yes.svg[width=16] ^a| image::yes.svg[width=16] ^a| image::yes.svg[width=16] ^a| image::no.svg[width=12] ^a| image::no.svg[width=12] ^a| image::no.svg[width=12]
2323
| xref::api/chat/azure-openai-chat.adoc[Azure OpenAI] | text, image ^a| image::yes.svg[width=16] ^a| image::yes.svg[width=16] ^a| image::yes.svg[width=16] ^a| image::yes.svg[width=16] ^a| image::yes.svg[width=16] ^a| image::no.svg[width=12] ^a| image::yes.svg[width=16]
24+
| xref::api/chat/deepseek-chat.adoc[DeepSeek (OpenAI-proxy)] | text ^a| image::no.svg[width=12] ^a| image::yes.svg[width=16] ^a| image::yes.svg[width=16] ^a| image::yes.svg[width=16] ^a| image::yes.svg[width=16] ^a| image::yes.svg[width=16] ^a| image::yes.svg[width=16]
2425
| xref::api/chat/vertexai-gemini-chat.adoc[Google VertexAI Gemini] | text, pdf, image, audio, video ^a| image::yes.svg[width=16] ^a| image::yes.svg[width=16] ^a| image::yes.svg[width=16] ^a| image::yes.svg[width=16] ^a| image::yes.svg[width=16] ^a| image::no.svg[width=12] ^a| image::yes.svg[width=16]
2526
| xref::api/chat/groq-chat.adoc[Groq (OpenAI-proxy)] | text, image ^a| image::yes.svg[width=16] ^a| image::yes.svg[width=16] ^a| image::yes.svg[width=16] ^a| image::yes.svg[width=16] ^a| image::no.svg[width=12] ^a| image::no.svg[width=12] ^a| image::yes.svg[width=16]
2627
| xref::api/chat/huggingface.adoc[HuggingFace] | text ^a| image::no.svg[width=12] ^a| image::no.svg[width=12] ^a| image::no.svg[width=12] ^a| image::no.svg[width=12] ^a| image::no.svg[width=12] ^a| image::no.svg[width=12] ^a| image::no.svg[width=12]

0 commit comments

Comments
 (0)