Skip to content

Commit 3d7883f

Browse files
committed
feat(ollama): add ollama llm client
Refs: #132
1 parent 7d76111 commit 3d7883f

File tree

8 files changed

+220
-2
lines changed

8 files changed

+220
-2
lines changed

src/main/kotlin/com/github/blarc/ai/commits/intellij/plugin/Icons.kt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,4 +5,5 @@ import com.intellij.openapi.util.IconLoader
55
object Icons {
66
val AI_COMMITS = IconLoader.getIcon("/icons/aiCommits15.svg", javaClass)
77
val OPEN_AI = IconLoader.getIcon("/icons/openai.svg", javaClass)
8+
val OLLAMA = IconLoader.getIcon("/icons/ollama15.svg", javaClass)
89
}

src/main/kotlin/com/github/blarc/ai/commits/intellij/plugin/settings/AppSettings2.kt

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ import com.github.blarc.ai.commits.intellij.plugin.AICommitsUtils
44
import com.github.blarc.ai.commits.intellij.plugin.notifications.Notification
55
import com.github.blarc.ai.commits.intellij.plugin.notifications.sendNotification
66
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.LLMClient
7+
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.ollama.OllamaClient
78
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.openAi.OpenAiClient
89
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.openAi.OpenAiClientService
910
import com.github.blarc.ai.commits.intellij.plugin.settings.prompts.DefaultPrompts
@@ -41,7 +42,8 @@ class AppSettings2 : PersistentStateComponent<AppSettings2> {
4142

4243
@XCollection(
4344
elementTypes = [
44-
OpenAiClient::class
45+
OpenAiClient::class,
46+
OllamaClient::class,
4547
],
4648
style = XCollection.Style.v2
4749
)

src/main/kotlin/com/github/blarc/ai/commits/intellij/plugin/settings/clients/LLMClientTable.kt

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ package com.github.blarc.ai.commits.intellij.plugin.settings.clients
33
import com.github.blarc.ai.commits.intellij.plugin.AICommitsBundle.message
44
import com.github.blarc.ai.commits.intellij.plugin.createColumn
55
import com.github.blarc.ai.commits.intellij.plugin.settings.AppSettings2
6+
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.ollama.OllamaClient
67
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.openAi.OpenAiClient
78
import com.intellij.openapi.ui.DialogPanel
89
import com.intellij.openapi.ui.DialogWrapper
@@ -132,7 +133,8 @@ class LLMClientTable {
132133
return if (newLLMClient == null) {
133134
// TODO(@Blarc): Is there a better way to create the list of all possible LLM Clients that implement LLMClient abstract class
134135
listOf(
135-
OpenAiClient()
136+
OpenAiClient(),
137+
OllamaClient()
136138
)
137139
} else {
138140
listOf(newLLMClient)
Lines changed: 86 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,86 @@
1+
package com.github.blarc.ai.commits.intellij.plugin.settings.clients.ollama
2+
3+
import com.github.blarc.ai.commits.intellij.plugin.Icons
4+
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.LLMClient
5+
import com.intellij.openapi.components.service
6+
import dev.langchain4j.data.message.UserMessage
7+
import dev.langchain4j.model.ollama.OllamaChatModel
8+
import dev.langchain4j.model.ollama.OllamaModels
9+
import kotlinx.coroutines.Dispatchers
10+
import kotlinx.coroutines.withContext
11+
import java.time.Duration
12+
import javax.swing.Icon
13+
14+
class OllamaClient(displayName: String = "Ollama") : LLMClient(
15+
displayName,
16+
"http://localhost:11434/",
17+
null,
18+
30,
19+
"llama3",
20+
"0.7"
21+
) {
22+
override fun getIcon(): Icon {
23+
return Icons.OLLAMA
24+
}
25+
26+
override fun getHosts(): Set<String> {
27+
return service<OllamaClientService>().hosts
28+
}
29+
30+
override fun getModelIds(): Set<String> {
31+
return service<OllamaClientService>().modelIds
32+
33+
}
34+
35+
override suspend fun generateCommitMessage(prompt: String): String {
36+
val ollama = OllamaChatModel.builder()
37+
.modelName(modelId)
38+
.temperature(temperature.toDouble())
39+
.timeout(Duration.ofSeconds(timeout.toLong()))
40+
.baseUrl(host)
41+
.build()
42+
43+
val response = ollama.generate(
44+
listOf(
45+
UserMessage.from(
46+
"user",
47+
prompt
48+
)
49+
)
50+
)
51+
return response.content().text()
52+
53+
}
54+
55+
override fun getRefreshModelFunction(): (suspend () -> Unit) = {
56+
// Model names are set by the user.
57+
val ollamaModels = OllamaModels.builder()
58+
.timeout(Duration.ofSeconds(timeout.toLong()))
59+
.baseUrl(host)
60+
.build()
61+
62+
val availableModels = withContext(Dispatchers.IO) {
63+
ollamaModels.availableModels()
64+
}
65+
66+
service<OllamaClientService>().modelIds.addAll(availableModels.content()
67+
.map { it.name }
68+
)
69+
}
70+
71+
override fun clone(): LLMClient {
72+
val copy = OllamaClient(displayName)
73+
copy.host = host
74+
copy.proxyUrl = proxyUrl
75+
copy.timeout = timeout
76+
copy.modelId = modelId
77+
copy.temperature = temperature
78+
return copy
79+
}
80+
81+
override suspend fun verifyConfiguration(newHost: String, newProxy: String?, newTimeout: String, newModelId: String, newToken: String) {
82+
TODO("Not yet implemented")
83+
}
84+
85+
override fun panel() = OllamaClientPanel(this)
86+
}
Lines changed: 89 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,89 @@
1+
package com.github.blarc.ai.commits.intellij.plugin.settings.clients.ollama
2+
3+
import com.github.blarc.ai.commits.intellij.plugin.AICommitsBundle.message
4+
import com.github.blarc.ai.commits.intellij.plugin.isInt
5+
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.LLMClientPanel
6+
import com.github.blarc.ai.commits.intellij.plugin.temperatureValid
7+
import com.intellij.openapi.components.service
8+
import com.intellij.openapi.ui.ComboBox
9+
import com.intellij.openapi.ui.naturalSorted
10+
import com.intellij.ui.components.JBTextField
11+
import com.intellij.ui.dsl.builder.*
12+
import com.intellij.ui.util.minimumWidth
13+
import kotlinx.coroutines.CoroutineScope
14+
import kotlinx.coroutines.Dispatchers
15+
import kotlinx.coroutines.launch
16+
import javax.swing.DefaultComboBoxModel
17+
18+
class OllamaClientPanel(private val client: OllamaClient) : LLMClientPanel {
19+
20+
private val hostComboBox = ComboBox(client.getHosts().toTypedArray())
21+
private val socketTimeoutTextField = JBTextField()
22+
private var modelComboBox = ComboBox(client.getModelIds().toTypedArray())
23+
24+
override fun create() = panel {
25+
row {
26+
label(message("settings.llmClient.host"))
27+
.widthGroup("label")
28+
cell(hostComboBox)
29+
.applyToComponent {
30+
isEditable = true
31+
}
32+
.bindItem(client::host.toNullableProperty())
33+
.widthGroup("input")
34+
}
35+
row {
36+
label(message("settings.llmClient.timeout")).widthGroup("label")
37+
cell(socketTimeoutTextField)
38+
.applyToComponent { minimumWidth = 400 }
39+
.bindIntText(client::timeout)
40+
.resizableColumn()
41+
.widthGroup("input")
42+
.validationOnInput { isInt(it.text) }
43+
}
44+
row {
45+
label(message("settings.llmClient.modelId"))
46+
.widthGroup("label")
47+
48+
cell(modelComboBox)
49+
.applyToComponent {
50+
isEditable = true
51+
}
52+
.bindItem({ client.modelId }, {
53+
if (it != null) {
54+
client.modelId = it
55+
}
56+
})
57+
.widthGroup("input")
58+
.resizableColumn()
59+
.onApply { service<OllamaClientService>().modelIds.add(modelComboBox.item) }
60+
61+
client.getRefreshModelFunction().let { f ->
62+
button(message("settings.refreshModels")) {
63+
CoroutineScope(Dispatchers.Default).launch {
64+
f.invoke()
65+
modelComboBox.model = DefaultComboBoxModel(client.getModelIds().naturalSorted().toTypedArray())
66+
modelComboBox.item = client.modelId
67+
}
68+
}
69+
.align(AlignX.RIGHT)
70+
.widthGroup("button")
71+
}
72+
}
73+
row {
74+
label(message("settings.llmClient.temperature"))
75+
.widthGroup("label")
76+
77+
textField()
78+
.bindText(client::temperature)
79+
.applyToComponent { minimumWidth = 400 }
80+
.resizableColumn()
81+
.widthGroup("input")
82+
.validationOnInput { temperatureValid(it.text) }
83+
84+
contextHelp(message("settings.llmClient.temperature.comment"))
85+
.resizableColumn()
86+
.align(AlignX.LEFT)
87+
}
88+
}
89+
}
Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
package com.github.blarc.ai.commits.intellij.plugin.settings.clients.ollama
2+
3+
import com.intellij.openapi.components.PersistentStateComponent
4+
import com.intellij.openapi.components.Service
5+
import com.intellij.openapi.components.State
6+
import com.intellij.openapi.components.Storage
7+
import com.intellij.util.xmlb.XmlSerializerUtil
8+
import com.intellij.util.xmlb.annotations.XCollection
9+
10+
@Service(Service.Level.APP)
11+
@State(name = "OpenAiClientService", storages = [Storage("AICommitsOllama.xml")])
12+
class OllamaClientService : PersistentStateComponent<OllamaClientService> {
13+
@XCollection(style = XCollection.Style.v2)
14+
val hosts = mutableSetOf("http://localhost:11434/")
15+
16+
@XCollection(style = XCollection.Style.v2)
17+
val modelIds: MutableSet<String> = mutableSetOf("llama3")
18+
19+
override fun getState(): OllamaClientService = this
20+
21+
override fun loadState(state: OllamaClientService) {
22+
XmlSerializerUtil.copyBean(state, this)
23+
}
24+
}
Lines changed: 10 additions & 0 deletions
Loading

0 commit comments

Comments
 (0)