Skip to content

Commit 2bc989e

Browse files
committed
feat(openAi): retrieve token async
1 parent b768c09 commit 2bc989e

File tree

9 files changed

+88
-63
lines changed

9 files changed

+88
-63
lines changed

src/main/kotlin/com/github/blarc/ai/commits/intellij/plugin/AICommitsUtils.kt

Lines changed: 18 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ import com.github.blarc.ai.commits.intellij.plugin.notifications.sendNotificatio
55
import com.github.blarc.ai.commits.intellij.plugin.settings.AppSettings2
66
import com.github.blarc.ai.commits.intellij.plugin.settings.ProjectSettings
77
import com.intellij.credentialStore.CredentialAttributes
8-
import com.intellij.credentialStore.Credentials
8+
import com.intellij.credentialStore.OneTimeString
99
import com.intellij.ide.passwordSafe.PasswordSafe
1010
import com.intellij.openapi.components.service
1111
import com.intellij.openapi.diff.impl.patch.IdeaTextPatchBuilder
@@ -15,6 +15,8 @@ import com.intellij.openapi.vcs.changes.Change
1515
import com.knuddels.jtokkit.Encodings
1616
import com.knuddels.jtokkit.api.ModelType
1717
import git4idea.repo.GitRepositoryManager
18+
import kotlinx.coroutines.Dispatchers
19+
import kotlinx.coroutines.withContext
1820
import java.io.StringWriter
1921
import java.nio.file.FileSystems
2022

@@ -138,9 +140,21 @@ object AICommitsUtils {
138140
return encoding.countTokens(prompt) > modelType.maxContextLength
139141
}
140142

141-
fun retrieveToken(title: String): String? {
142-
val credentials: Credentials? = PasswordSafe.instance.get(getCredentialAttributes(title))
143-
return credentials?.getPasswordAsString()
143+
// TODO @Blarc: Slow operations are prohibited on EDT
144+
fun saveToken(title: String, token: String) {
145+
try {
146+
PasswordSafe.instance.setPassword(getCredentialAttributes(title), token)
147+
} catch (e: Exception) {
148+
sendNotification(Notification.unableToSaveToken(e.message))
149+
}
150+
}
151+
152+
suspend fun retrieveToken(title: String): OneTimeString? {
153+
val credentialAttributes = getCredentialAttributes(title)
154+
val credentials = withContext(Dispatchers.IO) {
155+
PasswordSafe.instance.get(credentialAttributes)
156+
}
157+
return credentials?.password
144158
}
145159

146160
fun getCredentialAttributes(title: String): CredentialAttributes {

src/main/kotlin/com/github/blarc/ai/commits/intellij/plugin/settings/AppSettings2.kt

Lines changed: 14 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,16 @@
11
package com.github.blarc.ai.commits.intellij.plugin.settings
22

33
import com.github.blarc.ai.commits.intellij.plugin.AICommitsUtils
4+
import com.github.blarc.ai.commits.intellij.plugin.AICommitsUtils.getCredentialAttributes
5+
import com.github.blarc.ai.commits.intellij.plugin.AICommitsUtils.saveToken
46
import com.github.blarc.ai.commits.intellij.plugin.notifications.Notification
57
import com.github.blarc.ai.commits.intellij.plugin.notifications.sendNotification
68
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.LLMClientConfiguration
79
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.ollama.OllamaClientConfiguration
810
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.openAi.OpenAiClientConfiguration
911
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.openAi.OpenAiClientSharedState
1012
import com.github.blarc.ai.commits.intellij.plugin.settings.prompts.DefaultPrompts
13+
import com.intellij.ide.passwordSafe.PasswordSafe
1114
import com.intellij.openapi.application.ApplicationManager
1215
import com.intellij.openapi.components.PersistentStateComponent
1316
import com.intellij.openapi.components.Service
@@ -72,7 +75,7 @@ class AppSettings2 : PersistentStateComponent<AppSettings2> {
7275
val appSettings = AppSettings.instance
7376
migrateSettingsFromVersion1(appSettings)
7477
val openAiLlmClient = llmClientConfigurations.find { it.displayName == "OpenAI" }
75-
migrateOpenAiClientFromVersion1(openAiLlmClient, appSettings)
78+
migrateOpenAiClientFromVersion1(openAiLlmClient as OpenAiClientConfiguration, appSettings)
7679
}
7780

7881
private fun migrateSettingsFromVersion1(appSettings: AppSettings) {
@@ -85,14 +88,22 @@ class AppSettings2 : PersistentStateComponent<AppSettings2> {
8588
appExclusions = appSettings.appExclusions
8689
}
8790

88-
private fun migrateOpenAiClientFromVersion1(openAiLlmClientConfiguration: LLMClientConfiguration?, appSettings: AppSettings) {
91+
private fun migrateOpenAiClientFromVersion1(openAiLlmClientConfiguration: OpenAiClientConfiguration?, appSettings: AppSettings) {
8992
openAiLlmClientConfiguration?.apply {
9093
host = appSettings.openAIHost
9194
appSettings.openAISocketTimeout.toIntOrNull()?.let { timeout = it }
9295
proxyUrl = appSettings.proxyUrl
9396
modelId = appSettings.openAIModelId
9497
temperature = appSettings.openAITemperature
95-
AICommitsUtils.retrieveToken(appSettings.openAITokenTitle)?.let { token = it }
98+
99+
val credentialAttributes = getCredentialAttributes(appSettings.openAITokenTitle)
100+
PasswordSafe.instance.getAsync(credentialAttributes)
101+
.onSuccess {
102+
it?.password?.let { token ->
103+
saveToken(displayName, token.toString(false))
104+
tokenIsStored = true
105+
}
106+
}
96107
}
97108

98109
OpenAiClientSharedState.getInstance().hosts.addAll(appSettings.openAIHosts)

src/main/kotlin/com/github/blarc/ai/commits/intellij/plugin/settings/clients/LLMClientConfiguration.kt

Lines changed: 0 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,8 @@
11
package com.github.blarc.ai.commits.intellij.plugin.settings.clients
22

3-
import com.github.blarc.ai.commits.intellij.plugin.AICommitsUtils.getCredentialAttributes
4-
import com.github.blarc.ai.commits.intellij.plugin.AICommitsUtils.retrieveToken
5-
import com.github.blarc.ai.commits.intellij.plugin.notifications.Notification
6-
import com.github.blarc.ai.commits.intellij.plugin.notifications.sendNotification
7-
import com.intellij.ide.passwordSafe.PasswordSafe
83
import com.intellij.openapi.ui.ComboBox
94
import com.intellij.openapi.vcs.ui.CommitMessage
105
import com.intellij.util.xmlb.annotations.Attribute
11-
import com.intellij.util.xmlb.annotations.Transient
126
import javax.swing.Icon
137

148
abstract class LLMClientConfiguration(
@@ -19,10 +13,6 @@ abstract class LLMClientConfiguration(
1913
@Attribute var modelId: String,
2014
@Attribute var temperature: String,
2115
) : Cloneable, Comparable<LLMClientConfiguration> {
22-
@get:Transient
23-
var token: String
24-
get() = retrieveToken(displayName) ?: ""
25-
set(token) = saveToken(token)
2616

2717
abstract fun getIcon(): Icon
2818

@@ -52,14 +42,6 @@ abstract class LLMClientConfiguration(
5242

5343
abstract fun panel(): LLMClientPanel
5444

55-
private fun saveToken(token: String) {
56-
try {
57-
PasswordSafe.instance.setPassword(getCredentialAttributes(displayName), token)
58-
} catch (e: Exception) {
59-
sendNotification(Notification.unableToSaveToken(e.message))
60-
}
61-
}
62-
6345
override fun compareTo(other: LLMClientConfiguration): Int {
6446
return displayName.compareTo(other.displayName)
6547
}

src/main/kotlin/com/github/blarc/ai/commits/intellij/plugin/settings/clients/LLMClientService.kt

Lines changed: 36 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -15,46 +15,51 @@ import kotlinx.coroutines.withContext
1515

1616
abstract class LLMClientService<T : LLMClientConfiguration>(private val cs: CoroutineScope) {
1717

18-
abstract fun buildChatModel(client: T): ChatLanguageModel
18+
abstract suspend fun buildChatModel(client: T): ChatLanguageModel
1919

2020
fun generateCommitMessage(client: T, prompt: String, commitMessage: CommitMessage) {
21-
val model = buildChatModel(client)
22-
sendRequest(model, prompt, onSuccess = {
23-
commitMessage.setCommitMessage(it)
24-
AppSettings2.instance.recordHit()
25-
}, onError = {
26-
commitMessage.setCommitMessage(it)
27-
})
21+
cs.launch(Dispatchers.Default) {
22+
sendRequest(client, prompt, onSuccess = {
23+
commitMessage.setCommitMessage(it)
24+
AppSettings2.instance.recordHit()
25+
}, onError = {
26+
commitMessage.setCommitMessage(it)
27+
})
28+
}
2829
}
2930

3031
fun verifyConfiguration(client: T, label: JBLabel) {
31-
val model = buildChatModel(client)
32-
sendRequest(model, "test", onSuccess = {
33-
label.text = message("settings.verify.valid")
34-
label.icon = AllIcons.General.InspectionsOK
35-
}, onError = {
36-
label.text = it.wrap(80)
37-
label.icon = AllIcons.General.InspectionsError
38-
})
32+
// TODO @Blarc: Can you make this better?
33+
label.text = "Verifying configuration..."
34+
cs.launch(Dispatchers.Default) {
35+
sendRequest(client, "test", onSuccess = {
36+
label.text = message("settings.verify.valid")
37+
label.icon = AllIcons.General.InspectionsOK
38+
}, onError = {
39+
label.text = it.wrap(80)
40+
label.icon = AllIcons.General.InspectionsError
41+
})
42+
}
3943
}
4044

41-
private fun sendRequest(model: ChatLanguageModel, text: String, onSuccess: suspend (r: String) -> Unit, onError: suspend (r: String) -> Unit) {
42-
cs.launch(Dispatchers.Default) {
43-
try {
44-
val response = withContext(Dispatchers.IO) {
45-
model.generate(
46-
listOf(
47-
UserMessage.from(
48-
"user",
49-
text
50-
)
45+
private suspend fun sendRequest(client: T, text: String, onSuccess: suspend (r: String) -> Unit, onError: suspend (r: String) -> Unit) {
46+
try {
47+
val model = buildChatModel(client)
48+
val response = withContext(Dispatchers.IO) {
49+
model.generate(
50+
listOf(
51+
UserMessage.from(
52+
"user",
53+
text
5154
)
52-
).content().text()
53-
}
54-
onSuccess(response)
55-
} catch (e: Exception) {
56-
onError(e.message ?: "Unknown error.")
55+
)
56+
).content().text()
5757
}
58+
onSuccess(response)
59+
} catch (e: IllegalArgumentException) {
60+
onError("Invalid configuration: ${e.message ?: "unknown"}.")
61+
} catch (e: Exception) {
62+
onError(e.message ?: "Unknown error.")
5863
}
5964
}
6065
}

src/main/kotlin/com/github/blarc/ai/commits/intellij/plugin/settings/clients/ollama/OllamaClientService.kt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ class OllamaClientService(private val cs: CoroutineScope) : LLMClientService<Oll
4545
}
4646
}
4747

48-
override fun buildChatModel(client: OllamaClientConfiguration): ChatLanguageModel {
48+
override suspend fun buildChatModel(client: OllamaClientConfiguration): ChatLanguageModel {
4949
return OllamaChatModel.builder()
5050
.modelName(client.modelId)
5151
.temperature(client.temperature.toDouble())

src/main/kotlin/com/github/blarc/ai/commits/intellij/plugin/settings/clients/openAi/OpenAiClientConfiguration.kt

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,11 +3,11 @@ package com.github.blarc.ai.commits.intellij.plugin.settings.clients.openAi
33
import com.github.blarc.ai.commits.intellij.plugin.Icons
44
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.LLMClientConfiguration
55
import com.intellij.openapi.vcs.ui.CommitMessage
6+
import com.intellij.util.xmlb.annotations.Transient
67
import javax.swing.Icon
78

89
class OpenAiClientConfiguration(
9-
displayName: String = "OpenAI",
10-
var organizationId: String? = null
10+
displayName: String = "OpenAI"
1111
) : LLMClientConfiguration(
1212
displayName,
1313
"https://api.openai.com/v1",
@@ -16,6 +16,11 @@ class OpenAiClientConfiguration(
1616
"gpt-3.5-turbo",
1717
"0.7"
1818
) {
19+
var organizationId: String? = null
20+
var tokenIsStored: Boolean = false
21+
@Transient
22+
var token: String? = null
23+
1924
override fun getIcon(): Icon {
2025
return Icons.OPEN_AI
2126
}
@@ -39,6 +44,7 @@ class OpenAiClientConfiguration(
3944
copy.modelId = modelId
4045
copy.organizationId = organizationId
4146
copy.temperature = temperature
47+
copy.tokenIsStored = tokenIsStored
4248
return copy
4349
}
4450

src/main/kotlin/com/github/blarc/ai/commits/intellij/plugin/settings/clients/openAi/OpenAiClientPanel.kt

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
package com.github.blarc.ai.commits.intellij.plugin.settings.clients.openAi
22

33
import com.github.blarc.ai.commits.intellij.plugin.AICommitsBundle.message
4+
import com.github.blarc.ai.commits.intellij.plugin.AICommitsUtils.saveToken
45
import com.github.blarc.ai.commits.intellij.plugin.emptyText
56
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.LLMClientPanel
67
import com.intellij.ui.components.JBPasswordField
@@ -22,8 +23,10 @@ class OpenAiClientPanel(private val clientConfiguration: OpenAiClientConfigurati
2223
label(message("settings.llmClient.token"))
2324
.widthGroup("label")
2425
cell(tokenPasswordField)
25-
.bindText(clientConfiguration::token)
26-
.emptyText(message("settings.openAI.token.example"))
26+
.bindText(getter = {""}, setter = {
27+
saveToken(clientConfiguration.displayName, it)
28+
})
29+
.emptyText(if (clientConfiguration.tokenIsStored) message("settings.openAI.token.stored") else message("settings.openAI.token.example"))
2730
.resizableColumn()
2831
.focused()
2932
.widthGroup("input")

src/main/kotlin/com/github/blarc/ai/commits/intellij/plugin/settings/clients/openAi/OpenAiClientService.kt

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,10 @@
11
package com.github.blarc.ai.commits.intellij.plugin.settings.clients.openAi
22

3+
import com.github.blarc.ai.commits.intellij.plugin.AICommitsUtils.retrieveToken
34
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.LLMClientService
45
import com.intellij.openapi.components.Service
56
import com.intellij.openapi.components.service
7+
import com.intellij.util.text.nullize
68
import dev.langchain4j.model.chat.ChatLanguageModel
79
import dev.langchain4j.model.openai.OpenAiChatModel
810
import kotlinx.coroutines.CoroutineScope
@@ -19,9 +21,10 @@ class OpenAiClientService(cs: CoroutineScope) : LLMClientService<OpenAiClientCon
1921
fun getInstance(): OpenAiClientService = service()
2022
}
2123

22-
override fun buildChatModel(client: OpenAiClientConfiguration): ChatLanguageModel {
24+
override suspend fun buildChatModel(client: OpenAiClientConfiguration): ChatLanguageModel {
25+
val token = client.token.nullize(true) ?: retrieveToken(client.displayName)?.toString(true)
2326
val builder = OpenAiChatModel.builder()
24-
.apiKey(client.token)
27+
.apiKey(token ?: "")
2528
.modelName(client.modelId)
2629
.temperature(client.temperature.toDouble())
2730
.timeout(Duration.ofSeconds(client.timeout.toLong()))

src/main/resources/messages/MyBundle.properties

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -72,6 +72,7 @@ settings.llmClient.temperature.comment=What sampling temperature to use, between
7272
the output more random,while lower values like 0.2 will make it more focused and deterministic.
7373

7474
settings.openAI.token.example=sk-ABCdefgHIjKlxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
75+
settings.openAI.token.stored=<hidden>
7576
settings.openAi.token.comment=\
7677
<p>You can get your token <a href="https://platform.openai.com/account/api-keys">here.</a/></p>
7778
settings.openAi.organizationId=Organization ID

0 commit comments

Comments
 (0)