@@ -62,7 +62,7 @@ First we need a Google Gemini API key. You can get one for free, see more detail
6262
6363[source,bash]
6464----
65- export GOOGLE_AI_GEMINI_API_KEY =<your-google-ai-gemini-api-key>
65+ export QUARKUS_LANGCHAIN4J_AI_GEMINI_API_KEY =<your-google-ai-gemini-api-key>
6666----
6767
6868Next we need to install the LangChain4j dependencies:
@@ -71,13 +71,14 @@ Next we need to install the LangChain4j dependencies:
7171----
7272<dependency>
7373 <groupId>io.quarkiverse.langchain4j</groupId>
74- <artifactId>quarkus-langchain4j-core </artifactId>
75- <version>0.24 .0</version>
74+ <artifactId>quarkus-langchain4j-ai-gemini </artifactId>
75+ <version>0.25 .0</version>
7676</dependency>
7777<dependency>
78- <groupId>dev.langchain4j</groupId>
79- <artifactId>langchain4j-google-ai-gemini</artifactId>
80- <version>1.0.0-beta1</version>
78+ <groupId>io.quarkiverse.langchain4j</groupId>
79+ <artifactId>quarkus-langchain4j-core-deployment</artifactId>
80+ <version>0.25.0</version>
81+ <scope>provided</scope>
8182</dependency>
8283----
8384
@@ -87,24 +88,31 @@ Next we need to wire up the Gemini LLM to the application (using your Google AI
8788
8889[source,java]
8990----
90- @ApplicationScoped
91- public class GoogleGeminiConfig {
92-
93- @Produces
94- @ApplicationScoped
95- ChatLanguageModel model() {
96- return GoogleAiGeminiChatModel.builder()
97- .apiKey(System.getenv("GOOGLE_AI_GEMINI_API_KEY"))
98- .modelName("gemini-2.0-flash")
99- .build();
100- }
101- }
91+ quarkus.langchain4j.ai.gemini.chat-model.model-id=gemini-2.0-flash
92+ quarkus.langchain4j.log-requests=true
93+ quarkus.langchain4j.log-responses=true
94+ ----
95+
96+ Logging the request and response is optional but can be helpful for debugging.
97+
98+ === Register the AI service
99+
100+ We must register the AI service to use the `LeaseAnalyzer` interface.
101+
102+ [source,java]
102103----
104+ import dev.langchain4j.data.pdf.PdfFile;
105+ import dev.langchain4j.service.UserMessage;
106+ import io.quarkiverse.langchain4j.PdfUrl;
107+ import io.quarkiverse.langchain4j.RegisterAiService;
108+
109+ @RegisterAiService(chatMemoryProviderSupplier = RegisterAiService.NoChatMemoryProviderSupplier.class)
110+ public interface LeaseAnalyzer {
103111
104- [NOTE]
105- ====
106- Quarkus LangChain4j will provide autoconfiguration for Gemini in a future release. Currently, manual configuration is required since the Gemini integration is still evolving, with upstream LangChain4j offering three different modules for Google's AI APIs.
107- ====
112+ @UserMessage("Analyze the given document")
113+ LeaseReport analyze(@PdfUrl PdfFile pdfFile);
114+ }
115+ ----
108116
109117=== Define your data structure
110118
@@ -132,7 +140,7 @@ Lastly, we need to create a `LeaseAnalyzerResource` class that will use the LLM
132140[source,java]
133141----
134142@Inject
135- ChatLanguageModel model ;
143+ LeaseAnalyzer analyzer ;
136144
137145@PUT
138146@Consumes(MediaType.MULTIPART_FORM_DATA)
@@ -148,27 +156,15 @@ public String upload(@RestForm("file") FileUpload fileUploadRequest) {
148156 // Encode PDF content to base64 for transmission
149157 String documentEncoded = Base64.getEncoder().encodeToString(fileBytes);
150158
151- // Create user message with PDF content for analysis
152- UserMessage userMessage = UserMessage.from(
153- TextContent.from("Analyze the given document"),
154- PdfFileContent.from(documentEncoded, "application/pdf"));
155-
156- // Build chat request with JSON response format
157- ChatRequest chatRequest = ChatRequest.builder()
158- .messages(userMessage)
159- .parameters(ChatRequestParameters.builder()
160- .responseFormat(responseFormatFrom(LeaseReport.class))
161- .build())
162- .build();
163-
164159 log.info("Google Gemini analyzing....");
165160 long startTime = System.nanoTime();
166- ChatResponse chatResponse = model.chat(chatRequest);
161+
162+ LeaseReport result = analyzer.analyze(PdfFile.builder().base64Data(documentEncoded).build());
163+
167164 long endTime = System.nanoTime();
168- String response = chatResponse.aiMessage().text();
169- log.infof("Google Gemini analyzed in %.2f seconds: %s", (endTime - startTime) / 1_000_000_000.0, response);
165+ log.infof("Google Gemini analyzed in %.2f seconds: %s", (endTime - startTime) / 1_000_000_000.0, result);
170166
171- return response ;
167+ return result ;
172168 } catch (IOException e) {
173169 throw new RuntimeException(e);
174170 }
0 commit comments