Skip to content

Commit 6ab4c11

Browse files
committed
refactor(clients): split llm client service to state and behaviour services
1 parent 12c57a6 commit 6ab4c11

File tree

12 files changed

+132
-84
lines changed

12 files changed

+132
-84
lines changed

src/main/kotlin/com/github/blarc/ai/commits/intellij/plugin/settings/AppSettings2.kt

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -6,10 +6,13 @@ import com.github.blarc.ai.commits.intellij.plugin.notifications.sendNotificatio
66
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.LLMClientConfiguration
77
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.ollama.OllamaClientConfiguration
88
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.openAi.OpenAiClientConfiguration
9-
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.openAi.OpenAiClientService
9+
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.openAi.OpenAiClientSharedState
1010
import com.github.blarc.ai.commits.intellij.plugin.settings.prompts.DefaultPrompts
1111
import com.intellij.openapi.application.ApplicationManager
12-
import com.intellij.openapi.components.*
12+
import com.intellij.openapi.components.PersistentStateComponent
13+
import com.intellij.openapi.components.Service
14+
import com.intellij.openapi.components.State
15+
import com.intellij.openapi.components.Storage
1316
import com.intellij.util.xmlb.Converter
1417
import com.intellij.util.xmlb.XmlSerializerUtil
1518
import com.intellij.util.xmlb.annotations.OptionTag
@@ -92,8 +95,8 @@ class AppSettings2 : PersistentStateComponent<AppSettings2> {
9295
AICommitsUtils.retrieveToken(appSettings.openAITokenTitle)?.let { token = it }
9396
}
9497

95-
service<OpenAiClientService>().hosts.addAll(appSettings.openAIHosts)
96-
service<OpenAiClientService>().modelIds.addAll(appSettings.openAIModelIds)
98+
OpenAiClientSharedState.getInstance().hosts.addAll(appSettings.openAIHosts)
99+
OpenAiClientSharedState.getInstance().modelIds.addAll(appSettings.openAIModelIds)
97100
}
98101

99102
fun recordHit() {

src/main/kotlin/com/github/blarc/ai/commits/intellij/plugin/settings/clients/LLMClientConfiguration.kt

Lines changed: 17 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,9 +26,23 @@ abstract class LLMClientConfiguration(
2626

2727
abstract fun getIcon(): Icon
2828

29-
abstract fun getHosts(): Set<String>
29+
abstract fun getSharedState(): LLMClientSharedState
3030

31-
abstract fun getModelIds(): Set<String>
31+
fun getHosts(): Set<String> {
32+
return getSharedState().hosts
33+
}
34+
35+
fun getModelIds(): Set<String> {
36+
return getSharedState().modelIds
37+
}
38+
39+
fun addHost(host: String) {
40+
getSharedState().hosts.add(host)
41+
}
42+
43+
fun addModelId(modelId: String) {
44+
getSharedState().modelIds.add(modelId)
45+
}
3246

3347
abstract fun generateCommitMessage(prompt: String, commitMessage: CommitMessage)
3448

@@ -49,4 +63,5 @@ abstract class LLMClientConfiguration(
4963
override fun compareTo(other: LLMClientConfiguration): Int {
5064
return displayName.compareTo(other.displayName)
5165
}
66+
5267
}

src/main/kotlin/com/github/blarc/ai/commits/intellij/plugin/settings/clients/LLMClientPanel.kt

Lines changed: 14 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -2,22 +2,22 @@ package com.github.blarc.ai.commits.intellij.plugin.settings.clients
22

33
import com.github.blarc.ai.commits.intellij.plugin.AICommitsBundle.message
44
import com.github.blarc.ai.commits.intellij.plugin.isInt
5-
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.ollama.OllamaClientService
65
import com.github.blarc.ai.commits.intellij.plugin.temperatureValid
7-
import com.intellij.openapi.components.service
86
import com.intellij.openapi.ui.ComboBox
97
import com.intellij.ui.components.JBLabel
108
import com.intellij.ui.components.JBTextField
119
import com.intellij.ui.dsl.builder.*
1210
import com.intellij.ui.util.minimumWidth
1311

1412

15-
abstract class LLMClientPanel(private val client: LLMClientConfiguration) {
13+
abstract class LLMClientPanel(
14+
private val clientConfiguration: LLMClientConfiguration,
15+
) {
1616

17-
val hostComboBox = ComboBox(client.getHosts().toTypedArray())
17+
val hostComboBox = ComboBox(clientConfiguration.getHosts().toTypedArray())
1818
val proxyTextField = JBTextField()
1919
val socketTimeoutTextField = JBTextField()
20-
val modelComboBox = ComboBox(client.getModelIds().toTypedArray())
20+
val modelComboBox = ComboBox(clientConfiguration.getModelIds().toTypedArray())
2121
val temperatureTextField = JBTextField()
2222
val verifyLabel = JBLabel()
2323

@@ -37,8 +37,9 @@ abstract class LLMClientPanel(private val client: LLMClientConfiguration) {
3737
.applyToComponent {
3838
isEditable = true
3939
}
40-
.bindItem(client::host.toNullableProperty())
40+
.bindItem(clientConfiguration::host.toNullableProperty())
4141
.widthGroup("input")
42+
.onApply { clientConfiguration.addHost(hostComboBox.item) }
4243
}
4344
}
4445

@@ -48,7 +49,7 @@ abstract class LLMClientPanel(private val client: LLMClientConfiguration) {
4849
.widthGroup("label")
4950
cell(proxyTextField)
5051
.applyToComponent { minimumWidth = 400 }
51-
.bindText(client::proxyUrl.toNonNullableProperty(""))
52+
.bindText(clientConfiguration::proxyUrl.toNonNullableProperty(""))
5253
.resizableColumn()
5354
.widthGroup("input")
5455
}
@@ -62,7 +63,7 @@ abstract class LLMClientPanel(private val client: LLMClientConfiguration) {
6263
label(message("settings.llmClient.timeout")).widthGroup("label")
6364
cell(socketTimeoutTextField)
6465
.applyToComponent { minimumWidth = 400 }
65-
.bindIntText(client::timeout)
66+
.bindIntText(clientConfiguration::timeout)
6667
.resizableColumn()
6768
.widthGroup("input")
6869
.validationOnInput { isInt(it.text) }
@@ -78,16 +79,16 @@ abstract class LLMClientPanel(private val client: LLMClientConfiguration) {
7879
.applyToComponent {
7980
isEditable = true
8081
}
81-
.bindItem({ client.modelId }, {
82+
.bindItem({ clientConfiguration.modelId }, {
8283
if (it != null) {
83-
client.modelId = it
84+
clientConfiguration.modelId = it
8485
}
8586
})
8687
.widthGroup("input")
8788
.resizableColumn()
88-
.onApply { service<OllamaClientService>().modelIds.add(modelComboBox.item) }
89+
.onApply { clientConfiguration.addModelId(modelComboBox.item) }
8990

90-
client.getRefreshModelsFunction()?.let { f ->
91+
clientConfiguration.getRefreshModelsFunction()?.let { f ->
9192
button(message("settings.refreshModels")) {
9293
f.invoke(modelComboBox)
9394
}
@@ -103,7 +104,7 @@ abstract class LLMClientPanel(private val client: LLMClientConfiguration) {
103104
.widthGroup("label")
104105

105106
cell(temperatureTextField)
106-
.bindText(client::temperature)
107+
.bindText(clientConfiguration::temperature)
107108
.applyToComponent { minimumWidth = 400 }
108109
.resizableColumn()
109110
.widthGroup("input")

src/main/kotlin/com/github/blarc/ai/commits/intellij/plugin/settings/clients/LLMClientService.kt

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,8 @@ import kotlinx.coroutines.Dispatchers
1515
import kotlinx.coroutines.launch
1616
import kotlinx.coroutines.withContext
1717

18-
abstract class LLMClientService<T : LLMClientConfiguration>(private val cs: CoroutineScope?) {
18+
abstract class LLMClientService <T: LLMClientConfiguration>(private val cs: CoroutineScope) {
19+
1920
abstract fun buildChatModel(client: T): ChatLanguageModel
2021

2122
fun generateCommitMessage(client: T, prompt: String, commitMessage: CommitMessage) {
@@ -63,7 +64,7 @@ abstract class LLMClientService<T : LLMClientConfiguration>(private val cs: Coro
6364
// data class OpenAiErrorWrapper(val error: OpenAiError)
6465

6566
private fun sendRequest(model: ChatLanguageModel, text: String, onResponse: suspend (r: Response<AiMessage>) -> Unit ) {
66-
cs!!.launch(Dispatchers.Default) {
67+
cs.launch(Dispatchers.Default) {
6768
val response = withContext(Dispatchers.IO) {
6869
model.generate(
6970
listOf(
Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
package com.github.blarc.ai.commits.intellij.plugin.settings.clients
2+
3+
interface LLMClientSharedState {
4+
5+
val hosts: MutableSet<String>
6+
7+
val modelIds: MutableSet<String>
8+
}

src/main/kotlin/com/github/blarc/ai/commits/intellij/plugin/settings/clients/ollama/OllamaClientConfiguration.kt

Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@ package com.github.blarc.ai.commits.intellij.plugin.settings.clients.ollama
22

33
import com.github.blarc.ai.commits.intellij.plugin.Icons
44
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.LLMClientConfiguration
5+
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.LLMClientSharedState
56
import com.intellij.openapi.ui.ComboBox
67
import com.intellij.openapi.vcs.ui.CommitMessage
78
import javax.swing.Icon
@@ -18,13 +19,8 @@ class OllamaClientConfiguration(displayName: String = "Ollama") : LLMClientConfi
1819
return Icons.OLLAMA
1920
}
2021

21-
override fun getHosts(): Set<String> {
22-
return OllamaClientService.getInstance().hosts
23-
}
24-
25-
override fun getModelIds(): Set<String> {
26-
return OllamaClientService.getInstance().modelIds
27-
22+
override fun getSharedState(): LLMClientSharedState {
23+
return OllamaClientSharedState.getInstance()
2824
}
2925

3026
override fun generateCommitMessage(prompt: String, commitMessage: CommitMessage) {
Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,14 @@
11
package com.github.blarc.ai.commits.intellij.plugin.settings.clients.ollama
22

3-
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.LLMClientConfiguration
43
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.LLMClientPanel
54

6-
class OllamaClientPanel(client: LLMClientConfiguration) : LLMClientPanel(client) {
5+
class OllamaClientPanel private constructor(
6+
configuration: OllamaClientConfiguration,
7+
val service: OllamaClientService
8+
): LLMClientPanel(configuration) {
9+
10+
constructor(configuration: OllamaClientConfiguration): this(configuration, OllamaClientService.getInstance())
11+
712
override fun verifyConfiguration() {
813

914
val newConfiguration = OllamaClientConfiguration()
@@ -12,6 +17,6 @@ class OllamaClientPanel(client: LLMClientConfiguration) : LLMClientPanel(client)
1217
newConfiguration.modelId = modelComboBox.item
1318
newConfiguration.temperature = temperatureTextField.text
1419

15-
OllamaClientService.getInstance().verifyConfiguration(newConfiguration, verifyLabel)
20+
service.verifyConfiguration(newConfiguration, verifyLabel)
1621
}
1722
}

src/main/kotlin/com/github/blarc/ai/commits/intellij/plugin/settings/clients/ollama/OllamaClientService.kt

Lines changed: 6 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -2,11 +2,10 @@ package com.github.blarc.ai.commits.intellij.plugin.settings.clients.ollama
22

33
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.LLMClientService
44
import com.intellij.openapi.application.EDT
5-
import com.intellij.openapi.components.*
5+
import com.intellij.openapi.components.Service
6+
import com.intellij.openapi.components.service
67
import com.intellij.openapi.ui.ComboBox
78
import com.intellij.openapi.ui.naturalSorted
8-
import com.intellij.util.xmlb.XmlSerializerUtil
9-
import com.intellij.util.xmlb.annotations.XCollection
109
import dev.langchain4j.model.chat.ChatLanguageModel
1110
import dev.langchain4j.model.ollama.OllamaChatModel
1211
import dev.langchain4j.model.ollama.OllamaModels
@@ -18,41 +17,25 @@ import java.time.Duration
1817
import javax.swing.DefaultComboBoxModel
1918

2019
@Service(Service.Level.APP)
21-
@State(name = "OllamaClientService", storages = [Storage("AICommitsOllama.xml")])
22-
class OllamaClientService(
23-
@Transient private val cs: CoroutineScope?
24-
):
25-
PersistentStateComponent<OllamaClientService>,
26-
LLMClientService<OllamaClientConfiguration>(cs) {
20+
class OllamaClientService(private val cs: CoroutineScope) : LLMClientService<OllamaClientConfiguration>(cs) {
2721

2822
companion object {
2923
@JvmStatic
3024
fun getInstance(): OllamaClientService = service()
3125
}
3226

33-
@XCollection(style = XCollection.Style.v2)
34-
val hosts = mutableSetOf("http://localhost:11434/")
35-
36-
@XCollection(style = XCollection.Style.v2)
37-
val modelIds: MutableSet<String> = mutableSetOf("llama3")
38-
39-
override fun getState(): OllamaClientService = this
40-
41-
override fun loadState(state: OllamaClientService) {
42-
XmlSerializerUtil.copyBean(state, this)
43-
}
44-
4527
fun refreshModels(client: OllamaClientConfiguration, comboBox: ComboBox<String>) {
4628
val ollamaModels = OllamaModels.builder()
4729
.timeout(Duration.ofSeconds(client.timeout.toLong()))
4830
.baseUrl(client.host)
4931
.build()
5032

51-
cs!!.launch(Dispatchers.Default) {
33+
cs.launch(Dispatchers.Default) {
5234
val availableModels = withContext(Dispatchers.IO) {
5335
ollamaModels.availableModels()
5436
}
55-
modelIds.addAll(availableModels.content()
37+
38+
OllamaClientSharedState.getInstance().modelIds.addAll(availableModels.content()
5639
.map { it.name }
5740
)
5841
withContext(Dispatchers.EDT) {
Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
package com.github.blarc.ai.commits.intellij.plugin.settings.clients.ollama
2+
3+
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.LLMClientSharedState
4+
import com.intellij.openapi.components.*
5+
import com.intellij.util.xmlb.XmlSerializerUtil
6+
import com.intellij.util.xmlb.annotations.XCollection
7+
8+
@Service(Service.Level.APP)
9+
@State(name = "OllamaClientSharedState", storages = [Storage("AICommitsOllama.xml")])
10+
class OllamaClientSharedState : PersistentStateComponent<OllamaClientSharedState>, LLMClientSharedState {
11+
12+
companion object {
13+
@JvmStatic
14+
fun getInstance(): OllamaClientSharedState = service()
15+
}
16+
17+
@XCollection(style = XCollection.Style.v2)
18+
override val hosts = mutableSetOf("http://localhost:11434/")
19+
20+
@XCollection(style = XCollection.Style.v2)
21+
override val modelIds: MutableSet<String> = mutableSetOf("llama3")
22+
23+
override fun getState(): OllamaClientSharedState = this
24+
25+
override fun loadState(state: OllamaClientSharedState) {
26+
XmlSerializerUtil.copyBean(state, this)
27+
}
28+
}

src/main/kotlin/com/github/blarc/ai/commits/intellij/plugin/settings/clients/openAi/OpenAiClientConfiguration.kt

Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ package com.github.blarc.ai.commits.intellij.plugin.settings.clients.openAi
22

33
import com.github.blarc.ai.commits.intellij.plugin.Icons
44
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.LLMClientConfiguration
5-
import com.intellij.openapi.components.service
5+
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.LLMClientSharedState
66
import com.intellij.openapi.vcs.ui.CommitMessage
77
import javax.swing.Icon
88

@@ -18,12 +18,8 @@ class OpenAiClientConfiguration(displayName: String = "OpenAI") : LLMClientConfi
1818
return Icons.OPEN_AI
1919
}
2020

21-
override fun getHosts(): Set<String> {
22-
return service<OpenAiClientService>().hosts
23-
}
24-
25-
override fun getModelIds(): Set<String> {
26-
return service<OpenAiClientService>().modelIds
21+
override fun getSharedState(): LLMClientSharedState {
22+
return OpenAiClientSharedState.getInstance()
2723
}
2824

2925
override fun generateCommitMessage(prompt: String, commitMessage: CommitMessage) {

0 commit comments

Comments
 (0)