Skip to content

Commit 1f6c786

Browse files
committed
chore(deps): bump dev.langchain4j:langchain4j-bom from 0.36.2 to 1.0.0
Add dev.langchain4j:langchain4j-community-bom dependency, because dev.langchain4j:langchain4j-qianfan was moved to dev.langchain4j:langchain4j-community-qianfan. Remove proxy configuration from OpenAI, because it was removed from langchain4j.
1 parent 09d05fa commit 1f6c786

File tree

17 files changed

+66
-97
lines changed

17 files changed

+66
-97
lines changed

build.gradle.kts

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -100,11 +100,9 @@ dependencies {
100100
implementation("org.jetbrains.kotlinx:kotlinx-serialization-json:1.8.1")
101101

102102
// langchain4j integrations
103-
implementation(platform("dev.langchain4j:langchain4j-bom:0.36.2"))
103+
implementation(platform("dev.langchain4j:langchain4j-bom:1.0.0"))
104104
implementation("dev.langchain4j:langchain4j-open-ai")
105105
implementation("dev.langchain4j:langchain4j-ollama")
106-
// The Baidu Qianfan Large Model Platform, including the ERNIE series, can be accessed at https://docs.langchain4j.dev/integrations/language-models/qianfan/.
107-
implementation("dev.langchain4j:langchain4j-qianfan")
108106
implementation("dev.langchain4j:langchain4j-vertex-ai-gemini")
109107
implementation("dev.langchain4j:langchain4j-anthropic")
110108
implementation("dev.langchain4j:langchain4j-azure-open-ai")
@@ -113,6 +111,11 @@ dependencies {
113111
implementation("dev.langchain4j:langchain4j-google-ai-gemini")
114112
implementation("dev.langchain4j:langchain4j-github-models")
115113
implementation("dev.langchain4j:langchain4j-mistral-ai")
114+
115+
implementation(platform("dev.langchain4j:langchain4j-community-bom:1.0.0-beta5"))
116+
// The Baidu Qianfan Large Model Platform, including the ERNIE series, can be accessed at https://docs.langchain4j.dev/integrations/language-models/qianfan/.
117+
implementation("dev.langchain4j:langchain4j-community-qianfan")
118+
116119
// tests
117120
testImplementation("org.junit.jupiter:junit-jupiter-params:5.12.2")
118121
testImplementation("org.junit.jupiter:junit-jupiter:5.12.2")

src/main/kotlin/com/github/blarc/ai/commits/intellij/plugin/settings/AppSettings2.kt

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -114,7 +114,6 @@ class AppSettings2 : PersistentStateComponent<AppSettings2> {
114114
openAiLlmClientConfiguration?.apply {
115115
host = appSettings.openAIHost
116116
appSettings.openAISocketTimeout.toIntOrNull()?.let { timeout = it }
117-
proxyUrl = appSettings.proxyUrl
118117
modelId = appSettings.openAIModelId
119118
temperature = appSettings.openAITemperature
120119

src/main/kotlin/com/github/blarc/ai/commits/intellij/plugin/settings/clients/LLMClientService.kt

Lines changed: 17 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -24,12 +24,11 @@ import com.intellij.platform.ide.progress.withBackgroundProgress
2424
import com.intellij.ui.components.JBLabel
2525
import com.intellij.vcs.commit.AbstractCommitWorkflowHandler
2626
import com.intellij.vcs.commit.isAmendCommitMode
27-
import dev.langchain4j.data.message.AiMessage
2827
import dev.langchain4j.data.message.UserMessage
29-
import dev.langchain4j.model.StreamingResponseHandler
30-
import dev.langchain4j.model.chat.ChatLanguageModel
31-
import dev.langchain4j.model.chat.StreamingChatLanguageModel
32-
import dev.langchain4j.model.output.Response
28+
import dev.langchain4j.model.chat.ChatModel
29+
import dev.langchain4j.model.chat.StreamingChatModel
30+
import dev.langchain4j.model.chat.response.ChatResponse
31+
import dev.langchain4j.model.chat.response.StreamingChatResponseHandler
3332
import git4idea.GitCommit
3433
import git4idea.history.GitHistoryUtils
3534
import git4idea.repo.GitRepositoryManager
@@ -45,9 +44,9 @@ abstract class LLMClientService<C : LLMClientConfiguration>(private val cs: Coro
4544
return listOf()
4645
}
4746

48-
abstract suspend fun buildChatModel(client: C): ChatLanguageModel
47+
abstract suspend fun buildChatModel(client: C): ChatModel
4948

50-
abstract suspend fun buildStreamingChatModel(client: C): StreamingChatLanguageModel?
49+
abstract suspend fun buildStreamingChatModel(client: C): StreamingChatModel?
5150

5251
fun refreshModels(client: C, comboBox: ComboBox<String>, label: JBLabel) {
5352
label.text = message("settings.refreshModels.running")
@@ -155,33 +154,32 @@ abstract class LLMClientService<C : LLMClientConfiguration>(private val cs: Coro
155154
}, onError = onError)
156155
}
157156

158-
private suspend fun sendStreamingRequest(streamingModel: StreamingChatLanguageModel, text: String, onSuccess: suspend (r: String) -> Unit) {
157+
private suspend fun sendStreamingRequest(streamingModel: StreamingChatModel, text: String, onSuccess: suspend (r: String) -> Unit) {
159158
var response = ""
160159
val completionDeferred = CompletableDeferred<String>()
161160

162161
withContext(Dispatchers.IO) {
163-
streamingModel.generate(
162+
streamingModel.chat(
164163
listOf(
165164
UserMessage.from(
166165
"user",
167166
text
168167
)
169168
),
170-
object : StreamingResponseHandler<AiMessage> {
171-
override fun onNext(token: String?) {
172-
response += token
169+
object : StreamingChatResponseHandler {
170+
override fun onPartialResponse(partialResponse: String?) {
171+
response += partialResponse
173172
cs.launch {
174173
onSuccess(response)
175174
}
176175
}
177176

178-
override fun onError(error: Throwable) {
179-
completionDeferred.completeExceptionally(error)
177+
override fun onCompleteResponse(completeResponse: ChatResponse) {
178+
completionDeferred.complete(completeResponse.aiMessage().text())
180179
}
181180

182-
override fun onComplete(response: Response<AiMessage>) {
183-
super.onComplete(response)
184-
completionDeferred.complete(response.content().text())
181+
override fun onError(error: Throwable) {
182+
completionDeferred.completeExceptionally(error)
185183
}
186184
}
187185
)
@@ -194,14 +192,14 @@ abstract class LLMClientService<C : LLMClientConfiguration>(private val cs: Coro
194192
private suspend fun sendRequest(client: C, text: String, onSuccess: suspend (r: String) -> Unit) {
195193
val model = buildChatModel(client)
196194
val response = withContext(Dispatchers.IO) {
197-
model.generate(
195+
model.chat(
198196
listOf(
199197
UserMessage.from(
200198
"user",
201199
text
202200
)
203201
)
204-
).content().text()
202+
).aiMessage().text()
205203
}
206204
onSuccess(response)
207205
}

src/main/kotlin/com/github/blarc/ai/commits/intellij/plugin/settings/clients/anthropic/AnthropicClientService.kt

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -11,8 +11,8 @@ import com.intellij.openapi.components.service
1111
import com.intellij.util.text.nullize
1212
import dev.langchain4j.model.anthropic.AnthropicChatModel
1313
import dev.langchain4j.model.anthropic.AnthropicStreamingChatModel
14-
import dev.langchain4j.model.chat.ChatLanguageModel
15-
import dev.langchain4j.model.chat.StreamingChatLanguageModel
14+
import dev.langchain4j.model.chat.ChatModel
15+
import dev.langchain4j.model.chat.StreamingChatModel
1616
import kotlinx.coroutines.CoroutineScope
1717
import kotlinx.coroutines.Dispatchers
1818
import kotlinx.coroutines.launch
@@ -26,7 +26,7 @@ class AnthropicClientService(private val cs: CoroutineScope) : LLMClientService<
2626
fun getInstance(): AnthropicClientService = service()
2727
}
2828

29-
override suspend fun buildChatModel(client: AnthropicClientConfiguration): ChatLanguageModel {
29+
override suspend fun buildChatModel(client: AnthropicClientConfiguration): ChatModel {
3030
val token = client.token.nullize(true) ?: retrieveToken(client.id)?.toString(true)
3131
val builder = AnthropicChatModel.builder()
3232
.modelName(client.modelId)
@@ -49,7 +49,7 @@ class AnthropicClientService(private val cs: CoroutineScope) : LLMClientService<
4949

5050
}
5151

52-
override suspend fun buildStreamingChatModel(client: AnthropicClientConfiguration): StreamingChatLanguageModel {
52+
override suspend fun buildStreamingChatModel(client: AnthropicClientConfiguration): StreamingChatModel {
5353
val token = client.token.nullize(true) ?: retrieveToken(client.id)?.toString(true)
5454
val builder = AnthropicStreamingChatModel.builder()
5555
.modelName(client.modelId)

src/main/kotlin/com/github/blarc/ai/commits/intellij/plugin/settings/clients/azureOpenAi/AzureOpenAiClientService.kt

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -11,8 +11,8 @@ import com.intellij.openapi.components.service
1111
import com.intellij.util.text.nullize
1212
import dev.langchain4j.model.azure.AzureOpenAiChatModel
1313
import dev.langchain4j.model.azure.AzureOpenAiStreamingChatModel
14-
import dev.langchain4j.model.chat.ChatLanguageModel
15-
import dev.langchain4j.model.chat.StreamingChatLanguageModel
14+
import dev.langchain4j.model.chat.ChatModel
15+
import dev.langchain4j.model.chat.StreamingChatModel
1616
import kotlinx.coroutines.CoroutineScope
1717
import kotlinx.coroutines.Dispatchers
1818
import kotlinx.coroutines.launch
@@ -27,7 +27,7 @@ class AzureOpenAiClientService(private val cs: CoroutineScope) : LLMClientServic
2727
fun getInstance(): AzureOpenAiClientService = service()
2828
}
2929

30-
override suspend fun buildChatModel(client: AzureOpenAiClientConfiguration): ChatLanguageModel {
30+
override suspend fun buildChatModel(client: AzureOpenAiClientConfiguration): ChatModel {
3131
val token = client.token.nullize(true) ?: retrieveToken(client.id)?.toString(true)
3232
return AzureOpenAiChatModel.builder()
3333
.deploymentName(client.modelId)
@@ -39,7 +39,7 @@ class AzureOpenAiClientService(private val cs: CoroutineScope) : LLMClientServic
3939
.build()
4040
}
4141

42-
override suspend fun buildStreamingChatModel(client: AzureOpenAiClientConfiguration): StreamingChatLanguageModel {
42+
override suspend fun buildStreamingChatModel(client: AzureOpenAiClientConfiguration): StreamingChatModel {
4343
val token = client.token.nullize(true) ?: retrieveToken(client.id)?.toString(true)
4444
return AzureOpenAiStreamingChatModel.builder()
4545
.deploymentName(client.modelId)

src/main/kotlin/com/github/blarc/ai/commits/intellij/plugin/settings/clients/geminiGoogle/GeminiGoogleClientService.kt

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -9,8 +9,8 @@ import com.intellij.ide.passwordSafe.PasswordSafe
99
import com.intellij.openapi.components.Service
1010
import com.intellij.openapi.components.service
1111
import com.intellij.util.text.nullize
12-
import dev.langchain4j.model.chat.ChatLanguageModel
13-
import dev.langchain4j.model.chat.StreamingChatLanguageModel
12+
import dev.langchain4j.model.chat.ChatModel
13+
import dev.langchain4j.model.chat.StreamingChatModel
1414
import dev.langchain4j.model.googleai.GoogleAiGeminiChatModel
1515
import dev.langchain4j.model.googleai.GoogleAiGeminiStreamingChatModel
1616
import kotlinx.coroutines.CoroutineScope
@@ -25,7 +25,7 @@ class GeminiGoogleClientService(private val cs: CoroutineScope) : LLMClientServi
2525
fun getInstance(): GeminiGoogleClientService = service()
2626
}
2727

28-
override suspend fun buildChatModel(client: GeminiGoogleClientConfiguration): ChatLanguageModel {
28+
override suspend fun buildChatModel(client: GeminiGoogleClientConfiguration): ChatModel {
2929
val token = client.token.nullize(true) ?: retrieveToken(client.id)?.toString(true)
3030
return GoogleAiGeminiChatModel.builder()
3131
.apiKey(token)
@@ -36,7 +36,7 @@ class GeminiGoogleClientService(private val cs: CoroutineScope) : LLMClientServi
3636
.build()
3737
}
3838

39-
override suspend fun buildStreamingChatModel(client: GeminiGoogleClientConfiguration) : StreamingChatLanguageModel {
39+
override suspend fun buildStreamingChatModel(client: GeminiGoogleClientConfiguration) : StreamingChatModel {
4040
val token = client.token.nullize(true) ?: retrieveToken(client.id)?.toString(true)
4141
return GoogleAiGeminiStreamingChatModel.builder()
4242
.apiKey(token)

src/main/kotlin/com/github/blarc/ai/commits/intellij/plugin/settings/clients/geminiVertex/GeminiVertexClientService.kt

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,8 @@ package com.github.blarc.ai.commits.intellij.plugin.settings.clients.geminiVerte
33
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.LLMClientService
44
import com.intellij.openapi.components.Service
55
import com.intellij.openapi.components.service
6-
import dev.langchain4j.model.chat.ChatLanguageModel
7-
import dev.langchain4j.model.chat.StreamingChatLanguageModel
6+
import dev.langchain4j.model.chat.ChatModel
7+
import dev.langchain4j.model.chat.StreamingChatModel
88
import dev.langchain4j.model.vertexai.VertexAiGeminiChatModel
99
import dev.langchain4j.model.vertexai.VertexAiGeminiStreamingChatModel
1010
import kotlinx.coroutines.CoroutineScope
@@ -17,7 +17,7 @@ class GeminiVertexClientService(private val cs: CoroutineScope): LLMClientServic
1717
fun getInstance(): GeminiVertexClientService = service()
1818
}
1919

20-
override suspend fun buildChatModel(client: GeminiClientConfiguration): ChatLanguageModel {
20+
override suspend fun buildChatModel(client: GeminiClientConfiguration): ChatModel {
2121
return VertexAiGeminiChatModel.builder()
2222
.project(client.projectId)
2323
.location(client.location)
@@ -28,7 +28,7 @@ class GeminiVertexClientService(private val cs: CoroutineScope): LLMClientServic
2828
.build()
2929
}
3030

31-
override suspend fun buildStreamingChatModel(client: GeminiClientConfiguration): StreamingChatLanguageModel {
31+
override suspend fun buildStreamingChatModel(client: GeminiClientConfiguration): StreamingChatModel {
3232
return VertexAiGeminiStreamingChatModel.builder()
3333
.project(client.projectId)
3434
.location(client.location)

src/main/kotlin/com/github/blarc/ai/commits/intellij/plugin/settings/clients/githubModels/GitHubModelsClientService.kt

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -9,8 +9,8 @@ import com.intellij.ide.passwordSafe.PasswordSafe
99
import com.intellij.openapi.components.Service
1010
import com.intellij.openapi.components.service
1111
import com.intellij.util.text.nullize
12-
import dev.langchain4j.model.chat.ChatLanguageModel
13-
import dev.langchain4j.model.chat.StreamingChatLanguageModel
12+
import dev.langchain4j.model.chat.ChatModel
13+
import dev.langchain4j.model.chat.StreamingChatModel
1414
import dev.langchain4j.model.github.GitHubModelsChatModel
1515
import dev.langchain4j.model.github.GitHubModelsStreamingChatModel
1616
import kotlinx.coroutines.CoroutineScope
@@ -26,7 +26,7 @@ class GitHubModelsClientService(private val cs: CoroutineScope) : LLMClientServi
2626
fun getInstance(): GitHubModelsClientService = service()
2727
}
2828

29-
override suspend fun buildChatModel(client: GitHubModelsClientConfiguration): ChatLanguageModel {
29+
override suspend fun buildChatModel(client: GitHubModelsClientConfiguration): ChatModel {
3030
val token = client.token.nullize(true) ?: retrieveToken(client.id)?.toString(true)
3131
return GitHubModelsChatModel.builder()
3232
.gitHubToken(token)
@@ -37,7 +37,7 @@ class GitHubModelsClientService(private val cs: CoroutineScope) : LLMClientServi
3737
.build()
3838
}
3939

40-
override suspend fun buildStreamingChatModel(client: GitHubModelsClientConfiguration): StreamingChatLanguageModel? {
40+
override suspend fun buildStreamingChatModel(client: GitHubModelsClientConfiguration): StreamingChatModel? {
4141
val token = client.token.nullize(true) ?: retrieveToken(client.id)?.toString(true)
4242
return GitHubModelsStreamingChatModel.builder()
4343
.gitHubToken(token)

src/main/kotlin/com/github/blarc/ai/commits/intellij/plugin/settings/clients/huggingface/HuggingFaceClientService.kt

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ import com.intellij.ide.passwordSafe.PasswordSafe
99
import com.intellij.openapi.components.Service
1010
import com.intellij.openapi.components.service
1111
import com.intellij.util.text.nullize
12-
import dev.langchain4j.model.chat.ChatLanguageModel
12+
import dev.langchain4j.model.chat.ChatModel
1313
import dev.langchain4j.model.huggingface.HuggingFaceChatModel
1414
import kotlinx.coroutines.CoroutineScope
1515
import kotlinx.coroutines.Dispatchers
@@ -24,7 +24,7 @@ class HuggingFaceClientService(private val cs: CoroutineScope) : LLMClientServic
2424
fun getInstance(): HuggingFaceClientService = service()
2525
}
2626

27-
override suspend fun buildChatModel(client: HuggingFaceClientConfiguration): ChatLanguageModel {
27+
override suspend fun buildChatModel(client: HuggingFaceClientConfiguration): ChatModel {
2828
val token = client.token.nullize(true) ?: retrieveToken(client.id)?.toString(true)
2929

3030
return HuggingFaceChatModel.builder()

src/main/kotlin/com/github/blarc/ai/commits/intellij/plugin/settings/clients/mistral/MistralAIClientService.kt

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -9,8 +9,8 @@ import com.intellij.ide.passwordSafe.PasswordSafe
99
import com.intellij.openapi.components.Service
1010
import com.intellij.openapi.components.service
1111
import com.intellij.util.text.nullize
12-
import dev.langchain4j.model.chat.ChatLanguageModel
13-
import dev.langchain4j.model.chat.StreamingChatLanguageModel
12+
import dev.langchain4j.model.chat.ChatModel
13+
import dev.langchain4j.model.chat.StreamingChatModel
1414
import dev.langchain4j.model.mistralai.MistralAiChatModel
1515
import dev.langchain4j.model.mistralai.MistralAiModels
1616
import dev.langchain4j.model.mistralai.MistralAiStreamingChatModel
@@ -39,7 +39,7 @@ class MistralAIClientService(private val cs: CoroutineScope) : LLMClientService<
3939
return availableModels.map { it.id }
4040
}
4141

42-
override suspend fun buildChatModel(client: MistralAIClientConfiguration): ChatLanguageModel {
42+
override suspend fun buildChatModel(client: MistralAIClientConfiguration): ChatModel {
4343
val token = client.token.nullize(true) ?: retrieveToken(client.id)?.toString(true)
4444
return MistralAiChatModel.builder()
4545
.modelName(client.modelId)
@@ -50,7 +50,7 @@ class MistralAIClientService(private val cs: CoroutineScope) : LLMClientService<
5050
.build()
5151
}
5252

53-
override suspend fun buildStreamingChatModel(client: MistralAIClientConfiguration): StreamingChatLanguageModel? {
53+
override suspend fun buildStreamingChatModel(client: MistralAIClientConfiguration): StreamingChatModel? {
5454
val token = client.token.nullize(true) ?: retrieveToken(client.id)?.toString(true)
5555
return MistralAiStreamingChatModel.builder()
5656
.modelName(client.modelId)

0 commit comments

Comments
 (0)