Skip to content

Commit 63308e4

Browse files
committed
feat(mistralai): support Mistral AI client
1 parent f103ffc commit 63308e4

File tree

13 files changed

+290
-3
lines changed

13 files changed

+290
-3
lines changed

CHANGELOG.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
- Option to stop the commit message generation by clicking the action icon again.
1010
- Setting for HuggingFace client to automatically remove prompt from the generated commit message.
1111
- Show progress and result when refreshing models via API.
12+
- Support for Mistral AI.
1213

1314
### Fixed
1415

README.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,7 @@ plugin and configure a LLM API client in plugin's settings: <kbd>Settings</kbd>
4242
- Gemini Vertex AI
4343
- GitHub Models
4444
- Hugging Face
45+
- Mistral AI
4546
- Open AI
4647
- Ollama
4748
- Qianfan (Ernie)

build.gradle.kts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -112,6 +112,7 @@ dependencies {
112112
implementation("dev.langchain4j:langchain4j-google-ai-gemini")
113113
implementation("dev.langchain4j:langchain4j-google-ai-gemini")
114114
implementation("dev.langchain4j:langchain4j-github-models")
115+
implementation("dev.langchain4j:langchain4j-mistral-ai")
115116
// tests
116117
testImplementation("org.junit.jupiter:junit-jupiter-params:5.11.3")
117118
testImplementation("org.junit.jupiter:junit-jupiter:5.11.3")

src/main/kotlin/com/github/blarc/ai/commits/intellij/plugin/Icons.kt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@ object Icons {
2727
val AZURE_OPEN_AI = AICommitsIcon("/icons/azureOpenAi.svg", null)
2828
val HUGGING_FACE = AICommitsIcon("/icons/huggingface.svg", null)
2929
val GITHUB = AICommitsIcon("/icons/github15bright.svg", "/icons/github15dark.svg")
30+
val MISTRAL = AICommitsIcon("/icons/mistral.svg", null)
3031

3132
object Process {
3233
val STOP = AICommitsIcon("/icons/stop.svg", "/icons/stop_dark.svg")

src/main/kotlin/com/github/blarc/ai/commits/intellij/plugin/settings/AppSettings2.kt

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@ import com.github.blarc.ai.commits.intellij.plugin.settings.clients.geminiGoogle
1111
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.geminiVertex.GeminiClientConfiguration
1212
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.githubModels.GitHubModelsClientConfiguration
1313
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.huggingface.HuggingFaceClientConfiguration
14+
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.mistral.MistralAIClientConfiguration
1415
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.ollama.OllamaClientConfiguration
1516
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.openAi.OpenAiClientConfiguration
1617
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.openAi.OpenAiClientSharedState
@@ -63,7 +64,8 @@ class AppSettings2 : PersistentStateComponent<AppSettings2> {
6364
AnthropicClientConfiguration::class,
6465
AzureOpenAiClientConfiguration::class,
6566
HuggingFaceClientConfiguration::class,
66-
GitHubModelsClientConfiguration::class
67+
GitHubModelsClientConfiguration::class,
68+
MistralAIClientConfiguration::class
6769
],
6870
style = XCollection.Style.v2
6971
)

src/main/kotlin/com/github/blarc/ai/commits/intellij/plugin/settings/clients/LLMClientTable.kt

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ import com.github.blarc.ai.commits.intellij.plugin.settings.clients.geminiGoogle
99
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.geminiVertex.GeminiClientConfiguration
1010
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.githubModels.GitHubModelsClientConfiguration
1111
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.huggingface.HuggingFaceClientConfiguration
12+
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.mistral.MistralAIClientConfiguration
1213
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.ollama.OllamaClientConfiguration
1314
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.openAi.OpenAiClientConfiguration
1415
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.qianfan.QianfanClientConfiguration
@@ -155,8 +156,9 @@ class LLMClientTable {
155156
AnthropicClientConfiguration(),
156157
AzureOpenAiClientConfiguration(),
157158
HuggingFaceClientConfiguration(),
158-
GitHubModelsClientConfiguration()
159-
)
159+
GitHubModelsClientConfiguration(),
160+
MistralAIClientConfiguration()
161+
).sortedBy { it.getClientName() }
160162
} else {
161163
listOf(newLLMClientConfiguration)
162164
}
Lines changed: 68 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,68 @@
1+
package com.github.blarc.ai.commits.intellij.plugin.settings.clients.mistral;
2+
3+
import com.github.blarc.ai.commits.intellij.plugin.Icons
4+
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.LLMClientConfiguration
5+
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.LLMClientSharedState
6+
import com.intellij.openapi.project.Project
7+
import com.intellij.openapi.vcs.ui.CommitMessage
8+
import com.intellij.util.xmlb.annotations.Attribute
9+
import com.intellij.util.xmlb.annotations.Transient
10+
import com.intellij.vcs.commit.AbstractCommitWorkflowHandler
11+
import dev.langchain4j.model.mistralai.MistralAiChatModelName
12+
import kotlinx.coroutines.Job
13+
import javax.swing.Icon
14+
15+
class MistralAIClientConfiguration : LLMClientConfiguration(
16+
"MistralAI",
17+
MistralAiChatModelName.OPEN_MISTRAL_7B.toString(),
18+
"0.7"
19+
) {
20+
21+
@Attribute
22+
var tokenIsStored: Boolean = false
23+
@Transient
24+
var token: String? = null
25+
@Attribute
26+
var topP: Double? = null
27+
@Attribute
28+
var maxTokens: Int? = null
29+
30+
companion object {
31+
const val CLIENT_NAME = "MistralAI"
32+
}
33+
34+
override fun getClientName(): String {
35+
return CLIENT_NAME
36+
}
37+
38+
override fun getClientIcon(): Icon {
39+
return Icons.MISTRAL.getThemeBasedIcon()
40+
}
41+
42+
override fun getSharedState(): LLMClientSharedState {
43+
return MistralAIClientSharedState.getInstance()
44+
}
45+
46+
override fun generateCommitMessage(commitWorkflowHandler: AbstractCommitWorkflowHandler<*, *>, commitMessage: CommitMessage, project: Project) {
47+
return MistralAIClientService.getInstance().generateCommitMessage(this, commitWorkflowHandler, commitMessage, project)
48+
}
49+
50+
override fun getGenerateCommitMessageJob(): Job? {
51+
return MistralAIClientService.getInstance().generateCommitMessageJob
52+
}
53+
54+
override fun clone(): LLMClientConfiguration {
55+
val copy = MistralAIClientConfiguration()
56+
copy.id = id
57+
copy.name = name
58+
copy.modelId = modelId
59+
copy.temperature = temperature
60+
copy.tokenIsStored = tokenIsStored
61+
copy.token = token
62+
copy.topP = topP
63+
copy.maxTokens = maxTokens
64+
return copy
65+
}
66+
67+
override fun panel() = MistralAIClientPanel(this)
68+
}
Lines changed: 73 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,73 @@
1+
package com.github.blarc.ai.commits.intellij.plugin.settings.clients.mistral;
2+
3+
import com.github.blarc.ai.commits.intellij.plugin.AICommitsBundle.message
4+
import com.github.blarc.ai.commits.intellij.plugin.emptyText
5+
import com.github.blarc.ai.commits.intellij.plugin.isInt
6+
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.LLMClientPanel
7+
import com.intellij.ui.components.JBPasswordField
8+
import com.intellij.ui.components.JBTextField
9+
import com.intellij.ui.dsl.builder.*
10+
11+
class MistralAIClientPanel private constructor(
12+
private val clientConfiguration: MistralAIClientConfiguration,
13+
val service: MistralAIClientService
14+
) : LLMClientPanel(clientConfiguration) {
15+
16+
private val tokenPasswordField = JBPasswordField()
17+
private val topPTextField = JBTextField()
18+
private val maxTokensTextField = JBTextField()
19+
20+
constructor(configuration: MistralAIClientConfiguration) : this(configuration, MistralAIClientService.getInstance())
21+
22+
override fun create() = panel {
23+
nameRow()
24+
modelIdRow()
25+
tokenRow()
26+
maxTokens()
27+
temperatureRow()
28+
topPDoubleRow(topPTextField, clientConfiguration::topP.toNullableProperty())
29+
verifyRow()
30+
}
31+
32+
override fun verifyConfiguration() {
33+
// Configuration passed to panel is already a copy of the original or a new configuration
34+
clientConfiguration.modelId = modelComboBox.item
35+
clientConfiguration.temperature = temperatureTextField.text
36+
clientConfiguration.token = String(tokenPasswordField.password)
37+
clientConfiguration.topP = topPTextField.text.toDoubleOrNull()
38+
clientConfiguration.maxTokens = maxTokensTextField.text.toIntOrNull()
39+
service.verifyConfiguration(clientConfiguration, verifyLabel)
40+
}
41+
42+
override fun getRefreshModelsFunction() = fun () {
43+
service.refreshModels(clientConfiguration, modelComboBox, verifyLabel)
44+
}
45+
46+
private fun Panel.tokenRow() {
47+
row {
48+
label(message("settings.llmClient.token"))
49+
.widthGroup("label")
50+
cell(tokenPasswordField)
51+
.bindText(getter = { "" }, setter = {
52+
MistralAIClientService.getInstance().saveToken(clientConfiguration, it)
53+
})
54+
.emptyText(if (clientConfiguration.tokenIsStored) message("settings.llmClient.token.stored") else message("settings.openAI.token.example"))
55+
.resizableColumn()
56+
.align(Align.FILL)
57+
// maxLineLength was eye-balled, but prevents the dialog getting wider
58+
.comment(message("settings.mistral.token.comment"), 50)
59+
}
60+
}
61+
62+
private fun Panel.maxTokens() {
63+
row {
64+
label(message("settings.mistral.maxTokens"))
65+
.widthGroup("label")
66+
cell(maxTokensTextField)
67+
.bindText({ clientConfiguration.maxTokens?.toString() ?: "" }, { s -> clientConfiguration::maxTokens.set(s.toInt()) })
68+
.align(Align.FILL)
69+
.validationOnInput { isInt(it.text) }
70+
.resizableColumn()
71+
}
72+
}
73+
}
Lines changed: 74 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,74 @@
1+
package com.github.blarc.ai.commits.intellij.plugin.settings.clients.mistral;
2+
3+
import com.github.blarc.ai.commits.intellij.plugin.AICommitsUtils.getCredentialAttributes
4+
import com.github.blarc.ai.commits.intellij.plugin.AICommitsUtils.retrieveToken
5+
import com.github.blarc.ai.commits.intellij.plugin.notifications.Notification
6+
import com.github.blarc.ai.commits.intellij.plugin.notifications.sendNotification
7+
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.LLMClientService
8+
import com.intellij.ide.passwordSafe.PasswordSafe
9+
import com.intellij.openapi.components.Service
10+
import com.intellij.openapi.components.service
11+
import com.intellij.util.text.nullize
12+
import dev.langchain4j.model.chat.ChatLanguageModel
13+
import dev.langchain4j.model.chat.StreamingChatLanguageModel
14+
import dev.langchain4j.model.mistralai.MistralAiChatModel
15+
import dev.langchain4j.model.mistralai.MistralAiModels
16+
import dev.langchain4j.model.mistralai.MistralAiStreamingChatModel
17+
import kotlinx.coroutines.CoroutineScope
18+
import kotlinx.coroutines.Dispatchers
19+
import kotlinx.coroutines.launch
20+
import kotlinx.coroutines.withContext
21+
22+
@Service(Service.Level.APP)
23+
class MistralAIClientService(private val cs: CoroutineScope) : LLMClientService<MistralAIClientConfiguration>(cs) {
24+
25+
companion object {
26+
@JvmStatic
27+
fun getInstance(): MistralAIClientService = service()
28+
}
29+
30+
override suspend fun getAvailableModels(client: MistralAIClientConfiguration): List<String> {
31+
val token = client.token.nullize(true) ?: retrieveToken(client.id)?.toString(true)
32+
val mistralAiModels = MistralAiModels.builder()
33+
.apiKey(token)
34+
.build()
35+
36+
val availableModels = withContext(Dispatchers.IO) {
37+
mistralAiModels.availableModels().content()
38+
}
39+
return availableModels.map { it.id }
40+
}
41+
42+
override suspend fun buildChatModel(client: MistralAIClientConfiguration): ChatLanguageModel {
43+
val token = client.token.nullize(true) ?: retrieveToken(client.id)?.toString(true)
44+
return MistralAiChatModel.builder()
45+
.modelName(client.modelId)
46+
.temperature(client.temperature.toDouble())
47+
.maxTokens(client.maxTokens)
48+
.topP(client.topP)
49+
.apiKey(token)
50+
.build()
51+
}
52+
53+
override suspend fun buildStreamingChatModel(client: MistralAIClientConfiguration): StreamingChatLanguageModel? {
54+
val token = client.token.nullize(true) ?: retrieveToken(client.id)?.toString(true)
55+
return MistralAiStreamingChatModel.builder()
56+
.modelName(client.modelId)
57+
.temperature(client.temperature.toDouble())
58+
.maxTokens(client.maxTokens)
59+
.topP(client.topP)
60+
.apiKey(token)
61+
.build()
62+
}
63+
64+
fun saveToken(client: MistralAIClientConfiguration, token: String) {
65+
cs.launch(Dispatchers.Default) {
66+
try {
67+
PasswordSafe.instance.setPassword(getCredentialAttributes(client.id), token)
68+
client.tokenIsStored = true
69+
} catch (e: Exception) {
70+
sendNotification(Notification.unableToSaveToken(e.message))
71+
}
72+
}
73+
}
74+
}
Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
package com.github.blarc.ai.commits.intellij.plugin.settings.clients.mistral;
2+
3+
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.LLMClientSharedState
4+
import com.intellij.openapi.components.*
5+
import com.intellij.util.xmlb.annotations.XCollection
6+
import dev.langchain4j.model.mistralai.MistralAiChatModelName
7+
8+
@Service(Service.Level.APP)
9+
@State(name = "MistralAIClientSharedState", storages = [Storage("AICommitsMistralAI.xml")])
10+
class MistralAIClientSharedState : PersistentStateComponent<MistralAIClientSharedState>, LLMClientSharedState {
11+
12+
companion object {
13+
@JvmStatic
14+
fun getInstance(): MistralAIClientSharedState = service()
15+
}
16+
17+
@XCollection(style = XCollection.Style.v2)
18+
override val hosts = mutableSetOf("http://localhost:11434/")
19+
20+
@XCollection(style = XCollection.Style.v2)
21+
override val modelIds: MutableSet<String> = MistralAiChatModelName.entries.stream()
22+
.map { it.toString() }
23+
.toList()
24+
.toMutableSet()
25+
26+
override fun getState(): MistralAIClientSharedState = this
27+
28+
override fun loadState(state: MistralAIClientSharedState) {
29+
modelIds += state.modelIds
30+
hosts += state.hosts
31+
}
32+
}

0 commit comments

Comments
 (0)