Skip to content

Commit 12c57a6

Browse files
committed
refactor(clients): use llm client services to make suspended http requests
Serialization and deserialization doesn't work, because coroutine scope tries to get deserialized instead of injected.
1 parent 3d7883f commit 12c57a6

17 files changed

+503
-502
lines changed

src/main/kotlin/com/github/blarc/ai/commits/intellij/plugin/AICommitAction.kt

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -55,10 +55,9 @@ class AICommitAction : AnAction(), DumbAware {
5555
val llmClient = AppSettings2.instance.getActiveLLMClient()
5656
runBlocking(Dispatchers.Main) {
5757
try {
58-
val generatedCommitMessage = llmClient.generateCommitMessage(prompt)
59-
commitMessage.setCommitMessage(generatedCommitMessage)
60-
AppSettings2.instance.recordHit()
58+
llmClient.generateCommitMessage(prompt, commitMessage)
6159
} catch (e: Exception) {
60+
// TODO @Blarc: This will never happen, commit message generating is called in a suspended function
6261
commitMessage.setCommitMessage(e.message ?: message("action.error"))
6362
sendNotification(Notification.unsuccessfulRequest(e.message ?: message("action.unknown-error")))
6463
}

src/main/kotlin/com/github/blarc/ai/commits/intellij/plugin/settings/AICommitsListCellRenderer.kt

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
package com.github.blarc.ai.commits.intellij.plugin.settings
22

3-
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.LLMClient
3+
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.LLMClientConfiguration
44
import com.github.blarc.ai.commits.intellij.plugin.settings.prompts.Prompt
55
import java.awt.Component
66
import java.util.*
@@ -25,7 +25,8 @@ class AICommitsListCellRenderer : DefaultListCellRenderer() {
2525
text = value.name
2626
}
2727

28-
is LLMClient -> {
28+
is LLMClientConfiguration -> {
29+
icon = value.getIcon()
2930
text = value.displayName
3031
}
3132
}

src/main/kotlin/com/github/blarc/ai/commits/intellij/plugin/settings/AppSettings2.kt

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -3,9 +3,9 @@ package com.github.blarc.ai.commits.intellij.plugin.settings
33
import com.github.blarc.ai.commits.intellij.plugin.AICommitsUtils
44
import com.github.blarc.ai.commits.intellij.plugin.notifications.Notification
55
import com.github.blarc.ai.commits.intellij.plugin.notifications.sendNotification
6-
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.LLMClient
7-
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.ollama.OllamaClient
8-
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.openAi.OpenAiClient
6+
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.LLMClientConfiguration
7+
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.ollama.OllamaClientConfiguration
8+
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.openAi.OpenAiClientConfiguration
99
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.openAi.OpenAiClientService
1010
import com.github.blarc.ai.commits.intellij.plugin.settings.prompts.DefaultPrompts
1111
import com.intellij.openapi.application.ApplicationManager
@@ -42,13 +42,13 @@ class AppSettings2 : PersistentStateComponent<AppSettings2> {
4242

4343
@XCollection(
4444
elementTypes = [
45-
OpenAiClient::class,
46-
OllamaClient::class,
45+
OpenAiClientConfiguration::class,
46+
OllamaClientConfiguration::class,
4747
],
4848
style = XCollection.Style.v2
4949
)
50-
var llmClients = setOf<LLMClient>(
51-
OpenAiClient()
50+
var llmClientConfigurations = setOf<LLMClientConfiguration>(
51+
OpenAiClientConfiguration()
5252
)
5353

5454
private var activeLlmClient = "OpenAI"
@@ -68,7 +68,7 @@ class AppSettings2 : PersistentStateComponent<AppSettings2> {
6868
override fun noStateLoaded() {
6969
val appSettings = AppSettings.instance
7070
migrateSettingsFromVersion1(appSettings)
71-
val openAiLlmClient = llmClients.find { it.displayName == "OpenAI" }
71+
val openAiLlmClient = llmClientConfigurations.find { it.displayName == "OpenAI" }
7272
migrateOpenAiClientFromVersion1(openAiLlmClient, appSettings)
7373
}
7474

@@ -82,8 +82,8 @@ class AppSettings2 : PersistentStateComponent<AppSettings2> {
8282
appExclusions = appSettings.appExclusions
8383
}
8484

85-
private fun migrateOpenAiClientFromVersion1(openAiLlmClient: LLMClient?, appSettings: AppSettings) {
86-
openAiLlmClient?.apply {
85+
private fun migrateOpenAiClientFromVersion1(openAiLlmClientConfiguration: LLMClientConfiguration?, appSettings: AppSettings) {
86+
openAiLlmClientConfiguration?.apply {
8787
host = appSettings.openAIHost
8888
appSettings.openAISocketTimeout.toIntOrNull()?.let { timeout = it }
8989
proxyUrl = appSettings.proxyUrl
@@ -107,14 +107,14 @@ class AppSettings2 : PersistentStateComponent<AppSettings2> {
107107
return AICommitsUtils.matchesGlobs(path, appExclusions)
108108
}
109109

110-
fun getActiveLLMClient(): LLMClient {
111-
return llmClients.find { it.displayName == activeLlmClient }!!
110+
fun getActiveLLMClient(): LLMClientConfiguration {
111+
return llmClientConfigurations.find { it.displayName == activeLlmClient }!!
112112
}
113113

114-
fun setActiveLlmClient(llmClient: LLMClient) {
114+
fun setActiveLlmClient(llmClientConfiguration: LLMClientConfiguration) {
115115
// TODO @Blarc: Throw exception if llm client name is not valid
116-
llmClients.find { it.displayName == llmClient.displayName }?.let {
117-
activeLlmClient = llmClient.displayName
116+
llmClientConfigurations.find { it.displayName == llmClientConfiguration.displayName }?.let {
117+
activeLlmClient = llmClientConfiguration.displayName
118118
}
119119
}
120120

src/main/kotlin/com/github/blarc/ai/commits/intellij/plugin/settings/AppSettingsConfigurable.kt

Lines changed: 9 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ package com.github.blarc.ai.commits.intellij.plugin.settings
22

33
import com.github.blarc.ai.commits.intellij.plugin.AICommitsBundle
44
import com.github.blarc.ai.commits.intellij.plugin.AICommitsBundle.message
5-
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.LLMClient
5+
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.LLMClientConfiguration
66
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.LLMClientTable
77
import com.github.blarc.ai.commits.intellij.plugin.settings.prompts.Prompt
88
import com.github.blarc.ai.commits.intellij.plugin.settings.prompts.PromptTable
@@ -17,7 +17,7 @@ class AppSettingsConfigurable : BoundConfigurable(message("settings.general.grou
1717

1818
private val llmClientTable = LLMClientTable()
1919
private lateinit var llmClientToolbarDecorator: ToolbarDecorator
20-
private lateinit var llmClientComboBox: ComboBox<LLMClient>
20+
private lateinit var llmClientConfigurationComboBox: ComboBox<LLMClientConfiguration>
2121
private val promptTable = PromptTable()
2222
private lateinit var toolbarDecorator: ToolbarDecorator
2323
private lateinit var promptComboBox: ComboBox<Prompt>
@@ -26,7 +26,8 @@ class AppSettingsConfigurable : BoundConfigurable(message("settings.general.grou
2626

2727
row {
2828
label(message("settings.llmClient")).widthGroup("labelPrompt")
29-
llmClientComboBox = comboBox(AppSettings2.instance.llmClients, AICommitsListCellRenderer())
29+
// TODO @Blarc: add icon next to LLMClient name
30+
llmClientConfigurationComboBox = comboBox(AppSettings2.instance.llmClientConfigurations, AICommitsListCellRenderer())
3031
.bindItem(getter = AppSettings2.instance::getActiveLLMClient) {
3132
it?.let {
3233
AppSettings2.instance.setActiveLlmClient(it)
@@ -41,18 +42,18 @@ class AppSettingsConfigurable : BoundConfigurable(message("settings.general.grou
4142
}
4243
.setEditAction {
4344
llmClientTable.editLlmClient()?.let {
44-
val editingActive = llmClientComboBox.selectedItem == it.first
45-
llmClientComboBox.removeItem(it.first)
46-
llmClientComboBox.addItem(it.second)
45+
val editingActive = llmClientConfigurationComboBox.selectedItem == it.first
46+
llmClientConfigurationComboBox.removeItem(it.first)
47+
llmClientConfigurationComboBox.addItem(it.second)
4748

4849
if (editingActive) {
49-
llmClientComboBox.selectedItem = it.second
50+
llmClientConfigurationComboBox.selectedItem = it.second
5051
}
5152
}
5253
}
5354
.setRemoveAction {
5455
llmClientTable.removeLlmClient()?.let {
55-
llmClientComboBox.removeItem(it)
56+
llmClientConfigurationComboBox.removeItem(it)
5657
}
5758
}
5859
.disableUpDownActions()

src/main/kotlin/com/github/blarc/ai/commits/intellij/plugin/settings/clients/LLMClient.kt renamed to src/main/kotlin/com/github/blarc/ai/commits/intellij/plugin/settings/clients/LLMClientConfiguration.kt

Lines changed: 8 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -5,18 +5,20 @@ import com.github.blarc.ai.commits.intellij.plugin.AICommitsUtils.retrieveToken
55
import com.github.blarc.ai.commits.intellij.plugin.notifications.Notification
66
import com.github.blarc.ai.commits.intellij.plugin.notifications.sendNotification
77
import com.intellij.ide.passwordSafe.PasswordSafe
8+
import com.intellij.openapi.ui.ComboBox
9+
import com.intellij.openapi.vcs.ui.CommitMessage
810
import com.intellij.util.xmlb.annotations.Attribute
911
import com.intellij.util.xmlb.annotations.Transient
1012
import javax.swing.Icon
1113

12-
abstract class LLMClient(
14+
abstract class LLMClientConfiguration(
1315
@Attribute var displayName: String,
1416
@Attribute var host: String,
1517
@Attribute var proxyUrl: String?,
1618
@Attribute var timeout: Int,
1719
@Attribute var modelId: String,
1820
@Attribute var temperature: String,
19-
) : Cloneable, Comparable<LLMClient> {
21+
) : Cloneable, Comparable<LLMClientConfiguration> {
2022
@get:Transient
2123
var token: String
2224
get() = retrieveToken(displayName) ?: ""
@@ -28,20 +30,11 @@ abstract class LLMClient(
2830

2931
abstract fun getModelIds(): Set<String>
3032

31-
abstract suspend fun generateCommitMessage(prompt: String): String
33+
abstract fun generateCommitMessage(prompt: String, commitMessage: CommitMessage)
3234

33-
abstract fun getRefreshModelFunction(): (suspend () -> Unit)?
35+
abstract fun getRefreshModelsFunction(): ((ComboBox<String>) -> Unit)?
3436

35-
public abstract override fun clone(): LLMClient
36-
37-
@Throws(Exception::class)
38-
abstract suspend fun verifyConfiguration(
39-
newHost: String,
40-
newProxy: String?,
41-
newTimeout: String,
42-
newModelId: String,
43-
newToken: String
44-
)
37+
public abstract override fun clone(): LLMClientConfiguration
4538

4639
abstract fun panel(): LLMClientPanel
4740

@@ -53,7 +46,7 @@ abstract class LLMClient(
5346
}
5447
}
5548

56-
override fun compareTo(other: LLMClient): Int {
49+
override fun compareTo(other: LLMClientConfiguration): Int {
5750
return displayName.compareTo(other.displayName)
5851
}
5952
}
Lines changed: 125 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,131 @@
11
package com.github.blarc.ai.commits.intellij.plugin.settings.clients
22

3-
import com.intellij.openapi.ui.DialogPanel
3+
import com.github.blarc.ai.commits.intellij.plugin.AICommitsBundle.message
4+
import com.github.blarc.ai.commits.intellij.plugin.isInt
5+
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.ollama.OllamaClientService
6+
import com.github.blarc.ai.commits.intellij.plugin.temperatureValid
7+
import com.intellij.openapi.components.service
8+
import com.intellij.openapi.ui.ComboBox
9+
import com.intellij.ui.components.JBLabel
10+
import com.intellij.ui.components.JBTextField
11+
import com.intellij.ui.dsl.builder.*
12+
import com.intellij.ui.util.minimumWidth
413

5-
interface LLMClientPanel {
614

7-
fun create(): DialogPanel
15+
abstract class LLMClientPanel(private val client: LLMClientConfiguration) {
816

17+
val hostComboBox = ComboBox(client.getHosts().toTypedArray())
18+
val proxyTextField = JBTextField()
19+
val socketTimeoutTextField = JBTextField()
20+
val modelComboBox = ComboBox(client.getModelIds().toTypedArray())
21+
val temperatureTextField = JBTextField()
22+
val verifyLabel = JBLabel()
23+
24+
open fun create() = panel {
25+
hostRow()
26+
timeoutRow()
27+
modelIdRow()
28+
temperatureRow()
29+
verifyRow()
30+
}
31+
32+
open fun Panel.hostRow() {
33+
row {
34+
label(message("settings.llmClient.host"))
35+
.widthGroup("label")
36+
cell(hostComboBox)
37+
.applyToComponent {
38+
isEditable = true
39+
}
40+
.bindItem(client::host.toNullableProperty())
41+
.widthGroup("input")
42+
}
43+
}
44+
45+
open fun Panel.proxyRow() {
46+
row {
47+
label(message("settings.llmClient.proxy"))
48+
.widthGroup("label")
49+
cell(proxyTextField)
50+
.applyToComponent { minimumWidth = 400 }
51+
.bindText(client::proxyUrl.toNonNullableProperty(""))
52+
.resizableColumn()
53+
.widthGroup("input")
54+
}
55+
row {
56+
comment(message("settings.llmClient.proxy.comment"))
57+
}
58+
}
59+
60+
open fun Panel.timeoutRow() {
61+
row {
62+
label(message("settings.llmClient.timeout")).widthGroup("label")
63+
cell(socketTimeoutTextField)
64+
.applyToComponent { minimumWidth = 400 }
65+
.bindIntText(client::timeout)
66+
.resizableColumn()
67+
.widthGroup("input")
68+
.validationOnInput { isInt(it.text) }
69+
}
70+
}
71+
72+
open fun Panel.modelIdRow() {
73+
row {
74+
label(message("settings.llmClient.modelId"))
75+
.widthGroup("label")
76+
77+
cell(modelComboBox)
78+
.applyToComponent {
79+
isEditable = true
80+
}
81+
.bindItem({ client.modelId }, {
82+
if (it != null) {
83+
client.modelId = it
84+
}
85+
})
86+
.widthGroup("input")
87+
.resizableColumn()
88+
.onApply { service<OllamaClientService>().modelIds.add(modelComboBox.item) }
89+
90+
client.getRefreshModelsFunction()?.let { f ->
91+
button(message("settings.refreshModels")) {
92+
f.invoke(modelComboBox)
93+
}
94+
.align(AlignX.RIGHT)
95+
.widthGroup("button")
96+
}
97+
}
98+
}
99+
100+
open fun Panel.temperatureRow() {
101+
row {
102+
label(message("settings.llmClient.temperature"))
103+
.widthGroup("label")
104+
105+
cell(temperatureTextField)
106+
.bindText(client::temperature)
107+
.applyToComponent { minimumWidth = 400 }
108+
.resizableColumn()
109+
.widthGroup("input")
110+
.validationOnInput { temperatureValid(it.text) }
111+
112+
contextHelp(message("settings.llmClient.temperature.comment"))
113+
.resizableColumn()
114+
.align(AlignX.LEFT)
115+
}
116+
}
117+
118+
open fun Panel.verifyRow() {
119+
row {
120+
cell(verifyLabel)
121+
.applyToComponent { setAllowAutoWrapping(true) }
122+
.align(AlignX.LEFT)
123+
124+
button(message("settings.verifyToken")) { verifyConfiguration() }
125+
.align(AlignX.RIGHT)
126+
.widthGroup("button")
127+
}
128+
}
129+
130+
abstract fun verifyConfiguration()
9131
}

0 commit comments

Comments
 (0)