Skip to content

Commit bfa3f7a

Browse files
GanymedeNilBlarc
authored andcommitted
fix(geminiGoogle): Unable to submit request because it has a topK value of 64 but the supported range is from 1 (inclusive) to 41 (exclusive).
Add option to set topK and topP in client settings.
1 parent 052884b commit bfa3f7a

File tree

7 files changed

+78
-5
lines changed

7 files changed

+78
-5
lines changed

CHANGELOG.md

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,14 @@
22

33
## [Unreleased]
44

5+
### Added
6+
7+
- Option to set top K and top P in Gemini Google client settings.
8+
9+
### Fixed
10+
11+
- Unable to submit request to Gemini Google because it has a topK value of 64 but the supported range is from 1 (inclusive) to 41 (exclusive).
12+
513
## [2.7.0] - 2024-11-01
614

715
### Added

gradle.properties

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ pluginGroup = com.github.blarc
33
pluginName = AICommits
44
pluginRepositoryUrl = https://github.com/Blarc/ai-commits-intellij-plugin
55
# SemVer format -> https://semver.org
6-
pluginVersion = 2.7.0
6+
pluginVersion = 2.7.1
77

88
# https://plugins.jetbrains.com/docs/intellij/build-number-ranges.html
99
pluginSinceBuild = 233

src/main/kotlin/com/github/blarc/ai/commits/intellij/plugin/AICommitsExtensions.kt

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -50,6 +50,20 @@ fun ValidationInfoBuilder.isInt(value: String): ValidationInfo? {
5050
}
5151
}
5252

53+
fun ValidationInfoBuilder.isDouble(value: String): ValidationInfo? {
54+
if (value.isBlank()){
55+
return null
56+
}
57+
58+
value.toDoubleOrNull().let {
59+
if (it == null) {
60+
return error(message("validation.double"))
61+
} else {
62+
return null
63+
}
64+
}
65+
}
66+
5367
// Adds emptyText method to all cells that contain a component that implements ComponentWithEmptyText class
5468
fun <T>Cell<T>.emptyText(emptyText: String) : Cell<T> where T : JComponent, T : ComponentWithEmptyText {
5569
this.component.emptyText.text = emptyText

src/main/kotlin/com/github/blarc/ai/commits/intellij/plugin/settings/clients/geminiGoogle/GeminiGoogleClientConfiguration.kt

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,8 @@ class GeminiGoogleClientConfiguration : LLMClientConfiguration(
2020
var tokenIsStored: Boolean = false
2121
@Transient
2222
var token: String? = null
23+
var topK = 40
24+
var topP = 0.95
2325

2426
companion object {
2527
const val CLIENT_NAME = "Gemini Google"
@@ -52,6 +54,8 @@ class GeminiGoogleClientConfiguration : LLMClientConfiguration(
5254
copy.temperature = temperature
5355
copy.tokenIsStored = tokenIsStored
5456
copy.token = token
57+
copy.topK = topK
58+
copy.topP = topP
5559
return copy
5660
}
5761

src/main/kotlin/com/github/blarc/ai/commits/intellij/plugin/settings/clients/geminiGoogle/GeminiGoogleClientPanel.kt

Lines changed: 36 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3,19 +3,21 @@ package com.github.blarc.ai.commits.intellij.plugin.settings.clients.geminiGoogl
33
import GeminiGoogleClientService
44
import com.github.blarc.ai.commits.intellij.plugin.AICommitsBundle.message
55
import com.github.blarc.ai.commits.intellij.plugin.emptyText
6+
import com.github.blarc.ai.commits.intellij.plugin.isDouble
7+
import com.github.blarc.ai.commits.intellij.plugin.isInt
68
import com.github.blarc.ai.commits.intellij.plugin.settings.clients.LLMClientPanel
79
import com.intellij.ui.components.JBPasswordField
8-
import com.intellij.ui.dsl.builder.Align
9-
import com.intellij.ui.dsl.builder.Panel
10-
import com.intellij.ui.dsl.builder.bindText
11-
import com.intellij.ui.dsl.builder.panel
10+
import com.intellij.ui.components.JBTextField
11+
import com.intellij.ui.dsl.builder.*
1212

1313
class GeminiGoogleClientPanel private constructor(
1414
private val clientConfiguration: GeminiGoogleClientConfiguration,
1515
val service: GeminiGoogleClientService
1616
) : LLMClientPanel(clientConfiguration) {
1717

1818
private val tokenPasswordField = JBPasswordField()
19+
private val topKTextField = JBTextField()
20+
private val topPTextField = JBTextField()
1921

2022
constructor(configuration: GeminiGoogleClientConfiguration): this(configuration, GeminiGoogleClientService.getInstance())
2123

@@ -24,9 +26,39 @@ class GeminiGoogleClientPanel private constructor(
2426
tokenRow()
2527
modelIdRow()
2628
temperatureRow()
29+
topKRow()
30+
topPRow()
2731
verifyRow()
2832
}
2933

34+
private fun Panel.topKRow() {
35+
row {
36+
label(message("settings.geminiGoogle.topK"))
37+
.widthGroup("label")
38+
cell(topKTextField)
39+
.bindIntText(clientConfiguration::topK)
40+
.align(Align.FILL)
41+
.validationOnInput { isInt(it.text) }
42+
.resizableColumn()
43+
contextHelp(message("settings.geminiGoogle.topK.comment"))
44+
.align(AlignX.RIGHT)
45+
}
46+
}
47+
48+
private fun Panel.topPRow() {
49+
row {
50+
label(message("settings.geminiGoogle.topP"))
51+
.widthGroup("label")
52+
cell(topPTextField)
53+
.bindText({ clientConfiguration.topP.toString() }, { s -> clientConfiguration.topP = s.toDouble() })
54+
.align(Align.FILL)
55+
.validationOnInput { isDouble(it.text) }
56+
.resizableColumn()
57+
contextHelp(message("settings.geminiGoogle.topP.comment"))
58+
.align(AlignX.RIGHT)
59+
}
60+
}
61+
3062
private fun Panel.tokenRow() {
3163
row {
3264
label(message("settings.geminiGoogle.token"))

src/main/kotlin/com/github/blarc/ai/commits/intellij/plugin/settings/clients/geminiGoogle/GeminiGoogleClientService.kt

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,8 @@ class GeminiGoogleClientService(private val cs: CoroutineScope) : LLMClientServi
2929
.apiKey(token)
3030
.modelName(client.modelId)
3131
.temperature(client.temperature.toDouble())
32+
.topK(client.topK)
33+
.topP(client.topP)
3234
.build()
3335
}
3436

src/main/resources/messages/AiCommitsBundle.properties

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,7 @@ settings.prompt.name=Name
3838
settings.prompt.content=Content
3939
validation.required=This value is required.
4040
validation.integer=Value should be an integer.
41+
validation.double=Value should be double.
4142
validation.temperature=Temperature should be between 0 and 2.
4243
settings.prompt.comment=<ul>\
4344
<li>Customize your prompt with variables: {locale}, {diff}, {branch}, {hint}, {taskId}, {taskSummary} and {taskDescription}.</li>\
@@ -94,6 +95,17 @@ settings.gemini.location=Location
9495
settings.geminiGoogle.token=API key
9596
settings.geminiGoogle.token.comment=You can get your token <a href="https://aistudio.google.com/app/u/2/apikey">here.</a>.
9697
settings.geminiGoogle.token.example=BZcxxx-xxxx-xxxxx-xxxxxxxxxxxxxxxxxxxxx
98+
settings.geminiGoogle.topK=Top K
99+
settings.geminiGoogle.topK.comment=The Top K parameter changes how the model selects tokens for output.\
100+
A Top K of 1 means the selected token is the most probable among all the tokens in the model's vocabulary (also called greedy decoding),\
101+
while a Top K of 3 means that the next token is selected from among the 3 most probable using the temperature.\
102+
For each token selection step, the Top K tokens with the highest probabilities are sampled.\
103+
Tokens are then further filtered based on topP with the final token selected using temperature sampling.
104+
settings.geminiGoogle.topP=Top P
105+
settings.geminiGoogle.topP.comment=The Top P parameter changes how the model selects tokens for output.\
106+
Tokens are selected from the most to least probable until the sum of their probabilities equals the Top P value.\
107+
For example, if tokens A, B, and C have a probability of 0.3, 0.2, and 0.1 and the Top P value is 0.5,\
108+
then the model will select either A or B as the next token by using the temperature and exclude C as a candidate.
97109

98110
settings.anthropic.token.example=sk-ant-api03-TTz_qsxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
99111
settings.anthropic.token.comment=You can get your token <a href="https://console.anthropic.com/settings/keys">here.</a>
@@ -113,3 +125,4 @@ settings.huggingface.token.example=hf_fKASPPYLkasgjasKwpSnAASRdasdCdAsddsASSDF
113125
settings.huggingface.maxNewTokens=Max new tokens
114126
settings.huggingface.waitForModel=Wait for model
115127
settings.huggingface.waitModel.comment=When a model is warm, it is ready to be used, and you will get a response relatively quickly. However, some models are cold and need to be loaded before they can be used. In that case, you will get a 503 error. Rather than doing many requests until it is loaded, you can wait for the model to be loaded.
128+

0 commit comments

Comments
 (0)