diff --git a/idea-plugin/src/main/kotlin/com/itangcent/ai/AIProvider.kt b/idea-plugin/src/main/kotlin/com/itangcent/ai/AIProvider.kt index c035e909..575aa5cb 100644 --- a/idea-plugin/src/main/kotlin/com/itangcent/ai/AIProvider.kt +++ b/idea-plugin/src/main/kotlin/com/itangcent/ai/AIProvider.kt @@ -27,6 +27,13 @@ enum class AIProvider(val displayName: String, val models: List) { AIModel("deepseek-chat", "DeepSeek-V3"), AIModel("deepseek-reasoner", "DeepSeek-R1") ) + ), + + /** + * Local LLM service + */ + LOCALLM( + "LocalLLM", emptyList() ); companion object { @@ -34,7 +41,7 @@ enum class AIProvider(val displayName: String, val models: List) { * Get AIProvider by its display name (case-insensitive) */ fun fromDisplayName(name: String?): AIProvider? { - return values().find { it.displayName.equals(name, ignoreCase = true) } + return entries.find { it.displayName.equals(name, ignoreCase = true) } } /** diff --git a/idea-plugin/src/main/kotlin/com/itangcent/ai/AIService.kt b/idea-plugin/src/main/kotlin/com/itangcent/ai/AIService.kt index a6bf9b30..3e0af6bb 100644 --- a/idea-plugin/src/main/kotlin/com/itangcent/ai/AIService.kt +++ b/idea-plugin/src/main/kotlin/com/itangcent/ai/AIService.kt @@ -9,6 +9,7 @@ import com.itangcent.spi.SpiSingleBeanProvider */ @ProvidedBy(AIServiceProvider::class) interface AIService { + /** * Sends a prompt to the AI service and returns the response * @param prompt The user prompt to send to the AI service diff --git a/idea-plugin/src/main/kotlin/com/itangcent/ai/AIServiceChecker.kt b/idea-plugin/src/main/kotlin/com/itangcent/ai/AIServiceChecker.kt new file mode 100644 index 00000000..4474d325 --- /dev/null +++ b/idea-plugin/src/main/kotlin/com/itangcent/ai/AIServiceChecker.kt @@ -0,0 +1,47 @@ +package com.itangcent.ai + +import com.itangcent.common.logger.Log +import com.itangcent.common.logger.traceError + +/** + * A utility class for checking the health and availability of AI services. + * This class provides methods to verify if AI services are operational and can handle requests. + */ +object AIServiceHealthChecker : Log() { + /** + * Checks if the AI service is available and can handle requests. + * For regular AI services, it verifies by sending a simple test prompt. + * For Local LLM clients, it checks if there are any available models. + * + * @return true if the service is available and can handle requests, false otherwise + */ + fun AIService.isAvailable(): Boolean { + if (this is LocalLLMClient) { + return this.hasAvailableModels() + } + return try { + val response = + sendPrompt(systemMessage = "Answer Question", userPrompt = "Please respond with exactly 'YES'") + response.contains("YES", ignoreCase = true) + } catch (e: Exception) { + LOG.traceError("Failed to check AI service", e) + false + } + } + + /** + * Checks if the Local LLM client has any available models. + * This is used to verify if the local LLM service is properly configured and ready to use. + * + * @return true if there are available models, false otherwise + */ + fun LocalLLMClient.hasAvailableModels(): Boolean { + try { + val availableModels = this.getAvailableModels() + return availableModels.isNotEmpty() + } catch (e: Exception) { + LOG.traceError("Failed to check AI service", e) + return false + } + } +} diff --git a/idea-plugin/src/main/kotlin/com/itangcent/ai/LocalLLMClient.kt b/idea-plugin/src/main/kotlin/com/itangcent/ai/LocalLLMClient.kt new file mode 100644 index 00000000..bf92e303 --- /dev/null +++ b/idea-plugin/src/main/kotlin/com/itangcent/ai/LocalLLMClient.kt @@ -0,0 +1,101 @@ +package com.itangcent.ai + +import com.itangcent.common.utils.GsonUtils +import com.itangcent.http.HttpClient +import com.itangcent.http.RawContentType +import com.itangcent.http.contentType +import com.itangcent.idea.plugin.utils.AIUtils +import com.itangcent.intellij.extend.sub + +/** + * Client implementation for interacting with a local LLM server. + * This class handles the direct communication with the LLM server API. + */ +class LocalLLMClient( + private val serverUrl: String, + private val modelName: String, + private val httpClient: HttpClient +) : AIService { + + companion object { + private const val CHAT_COMPLETIONS_ENDPOINT = "/chat/completions" + private const val MODELS_ENDPOINT = "/models" + } + + /** + * Sends a prompt to the local LLM service with a custom system message. + * + * @param systemMessage The system message that sets the context for the LLM + * @param userPrompt The user's input prompt to be processed + * @return The LLM's response as a string + * @throws AIConfigurationException if the local LLM server URL is not configured + * @throws AIApiException if there's an error in the API response or communication + */ + override fun sendPrompt(systemMessage: String, userPrompt: String): String { + val fullUrl = "$serverUrl$CHAT_COMPLETIONS_ENDPOINT" + try { + val requestBodyMap = mapOf( + "messages" to listOf( + mapOf("role" to "system", "content" to systemMessage), + mapOf("role" to "user", "content" to userPrompt) + ), + "model" to modelName, + "stream" to false + ) + + val requestBody = GsonUtils.toJson(requestBodyMap) + val httpRequest = httpClient.post(fullUrl) + .contentType(RawContentType.APPLICATION_JSON) + .body(requestBody) + + val httpResponse = httpRequest.call() + + if (httpResponse.code() != 200) { + val errorMessage = + "Local LLM server returned status code ${httpResponse.code()}: ${httpResponse.string()}" + throw AIApiException(errorMessage) + } + + val responseBody = httpResponse.string() ?: throw AIApiException("Empty response from Local LLM server") + val jsonElement = GsonUtils.parseToJsonTree(responseBody) + val content = jsonElement.sub("choices")?.asJsonArray?.firstOrNull() + ?.asJsonObject?.sub("message")?.sub("content")?.asString + val errMsg = jsonElement.sub("error")?.asString + return content?.let { AIUtils.cleanMarkdownCodeBlocks(it) } + ?: throw AIApiException(errMsg ?: "Could not parse response from Local LLM server") + } catch (e: AIException) { + throw e + } catch (e: Exception) { + throw AIApiException("Error calling Local LLM server: ${e.message}", e) + } + } + + /** + * Retrieves the list of available models from the local LLM server. + * + * @return List of model IDs available on the server + * @throws AIApiException if there's an error in the API response or communication + */ + fun getAvailableModels(): List { + val url = "$serverUrl$MODELS_ENDPOINT" + + try { + val response = httpClient.get(url).call() + + if (response.code() != 200) { + throw AIApiException("Failed to get models: ${response.code()}") + } + + val responseBody = response.string() ?: throw AIApiException("Empty response from server") + val jsonElement = GsonUtils.parseToJsonTree(responseBody) + val dataArray = jsonElement.sub("data")?.asJsonArray + ?: throw AIApiException("Invalid response format: missing 'data' array") + + return dataArray.mapNotNull { modelObj -> + modelObj.asJsonObject.sub("id")?.asString + } + } catch (e: Exception) { + throw AIApiException("Error getting models: ${e.message}", e) + } + } +} \ No newline at end of file diff --git a/idea-plugin/src/main/kotlin/com/itangcent/ai/LocalLLMServerDiscoverer.kt b/idea-plugin/src/main/kotlin/com/itangcent/ai/LocalLLMServerDiscoverer.kt new file mode 100644 index 00000000..f4fbf864 --- /dev/null +++ b/idea-plugin/src/main/kotlin/com/itangcent/ai/LocalLLMServerDiscoverer.kt @@ -0,0 +1,78 @@ +package com.itangcent.ai + +import com.itangcent.ai.AIServiceHealthChecker.isAvailable +import com.itangcent.common.logger.Log +import com.itangcent.http.HttpClient + +/** + * Utility class for discovering and validating LocalLLM servers. + * This class attempts to find a working LocalLLM server by trying various common API endpoint suffixes. + * + * @property httpClient The HTTP client used for making requests + * @property possibleSuffixes List of possible API endpoint suffixes to try + */ +class LocalLLMServerDiscoverer( + private val httpClient: HttpClient, + private val possibleSuffixes: List = DEFAULT_SUFFIXES +) { + companion object : Log() { + private val DEFAULT_SUFFIXES = listOf( + "/v1", + "/api/v1", + "/api", + "/v1/api" + ) + } + + /** + * Attempts to discover a working LocalLLM server URL from a base URL. + * The method will try the base URL first, then attempt various API endpoint suffixes. + * + * @param baseUrl The base URL to start searching from (e.g., "http://localhost:8000") + * @return The working server URL if found, null otherwise + */ + fun discoverServer(baseUrl: String): String? { + val trimmedUrl = baseUrl.trimEnd('/') + if (validateLocalLLMServer(trimmedUrl)) { + LOG.debug("Found working server at base URL: $trimmedUrl") + return trimmedUrl + } + + // Try all possible suffixes + for (suffix in possibleSuffixes) { + if (baseUrl.endsWith(suffix)) { + LOG.debug("Skipping suffix $suffix as it's already in the base URL") + continue + } + val serverUrl = if (suffix.isEmpty()) trimmedUrl else "$trimmedUrl$suffix" + if (validateLocalLLMServer(serverUrl)) { + LOG.debug("Found working server at URL: $serverUrl") + return serverUrl + } + } + + LOG.warn("No working LocalLLM server found for base URL: $baseUrl") + return null + } + + /** + * Validates if a given URL points to a working LocalLLM server. + * A server is considered working if it responds to health checks and supports the required API endpoints. + * + * @param serverUrl The URL to validate + * @return true if the server is working, false otherwise + */ + private fun validateLocalLLMServer(serverUrl: String): Boolean { + try { + val localLLMService = LocalLLMClient( + serverUrl = serverUrl, + modelName = "", + httpClient = httpClient + ) + return localLLMService.isAvailable() + } catch (e: Exception) { + LOG.debug("Server validation failed for $serverUrl: ${e.message}") + return false + } + } +} \ No newline at end of file diff --git a/idea-plugin/src/main/kotlin/com/itangcent/ai/LocalLLMService.kt b/idea-plugin/src/main/kotlin/com/itangcent/ai/LocalLLMService.kt new file mode 100644 index 00000000..b803a5dc --- /dev/null +++ b/idea-plugin/src/main/kotlin/com/itangcent/ai/LocalLLMService.kt @@ -0,0 +1,62 @@ +package com.itangcent.ai + +import com.google.inject.Inject +import com.google.inject.Singleton +import com.itangcent.idea.plugin.condition.ConditionOnSetting +import com.itangcent.idea.plugin.settings.helper.AISettingsHelper +import com.itangcent.suv.http.HttpClientProvider + + +/** + * Implementation of AIService that interfaces with a local LLM server. + */ +@Singleton +@ConditionOnSetting("aiProvider", havingValue = "LocalLLM") +open class LocalLLMService : AIService { + + companion object + + @Inject + private lateinit var aiSettingsHelper: AISettingsHelper + + @Inject + private lateinit var httpClientProvider: HttpClientProvider + + private val rawLocalLLMService: LocalLLMClient by lazy { + LocalLLMClient(getServerUrl(), getModelName(), httpClientProvider.getHttpClient()) + } + + /** + * Sends a prompt to the local LLM service with a custom system message. + * + * @param systemMessage The system message that sets the context for the LLM + * @param userPrompt The user's input prompt to be processed + * @return The LLM's response as a string + * @throws AIConfigurationException if the local LLM server URL is not configured + * @throws AIApiException if there's an error in the API response or communication + */ + override fun sendPrompt(systemMessage: String, userPrompt: String): String { + return rawLocalLLMService.sendPrompt(systemMessage, userPrompt) + } + + /** + * Retrieves the configured local LLM server URL from settings. + * + * @return The configured server URL + * @throws AIConfigurationException if the URL is not configured + */ + private fun getServerUrl(): String { + return aiSettingsHelper.aiLocalServerUrl + ?: throw AIConfigurationException("Local LLM server URL is not configured") + } + + /** + * Retrieves the configured model name from settings or returns a default value. + * + * @return The configured model name or "local-model" as default + */ + private fun getModelName(): String { + return aiSettingsHelper.aiModel ?: "local-model" + } +} + diff --git a/idea-plugin/src/main/kotlin/com/itangcent/idea/plugin/dialog/EasyApiSettingAIGUI.form b/idea-plugin/src/main/kotlin/com/itangcent/idea/plugin/dialog/EasyApiSettingAIGUI.form index 4a8fbeb9..e6ba7113 100644 --- a/idea-plugin/src/main/kotlin/com/itangcent/idea/plugin/dialog/EasyApiSettingAIGUI.form +++ b/idea-plugin/src/main/kotlin/com/itangcent/idea/plugin/dialog/EasyApiSettingAIGUI.form @@ -1,21 +1,20 @@
- - + + - + - - + - + @@ -37,17 +36,15 @@ - - - + - + - + - + @@ -55,7 +52,7 @@ - + @@ -64,7 +61,7 @@ - + @@ -72,7 +69,7 @@ - + @@ -81,35 +78,84 @@ - + - + - + + - + + - - - + - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - - + - + @@ -139,15 +185,13 @@ - - - + - + @@ -160,8 +204,7 @@ - - + diff --git a/idea-plugin/src/main/kotlin/com/itangcent/idea/plugin/dialog/EasyApiSettingAIGUI.kt b/idea-plugin/src/main/kotlin/com/itangcent/idea/plugin/dialog/EasyApiSettingAIGUI.kt index bf1478f2..0f11a185 100644 --- a/idea-plugin/src/main/kotlin/com/itangcent/idea/plugin/dialog/EasyApiSettingAIGUI.kt +++ b/idea-plugin/src/main/kotlin/com/itangcent/idea/plugin/dialog/EasyApiSettingAIGUI.kt @@ -1,12 +1,17 @@ package com.itangcent.idea.plugin.dialog +import com.google.inject.Inject import com.itangcent.ai.AIModel import com.itangcent.ai.AIProvider +import com.itangcent.ai.LocalLLMClient +import com.itangcent.ai.LocalLLMServerDiscoverer import com.itangcent.common.constant.Language import com.itangcent.idea.plugin.configurable.AbstractEasyApiSettingGUI import com.itangcent.idea.plugin.settings.Settings import com.itangcent.idea.utils.SwingUtils import com.itangcent.idea.utils.SwingUtils.DisplayItem +import com.itangcent.intellij.context.ActionContext +import com.itangcent.suv.http.HttpClientProvider import javax.swing.* /** @@ -17,18 +22,31 @@ class EasyApiSettingAIGUI : AbstractEasyApiSettingGUI() { private var rootPanel: JPanel? = null private var aiEnableCheckBox: JCheckBox? = null private var aiProviderComboBox: JComboBox? = null - private var aiModelComboBox: JComboBox>? = null + private lateinit var aiModelComboBox: JComboBox> + private var aiModelTextField: JTextField? = null private var aiApiKeyField: JPasswordField? = null + private var apiKeyLabel: JLabel? = null + private var aiLocalServerUrlField: JTextField? = null + private var serverUrlLabel: JLabel? = null + private var loadServerButton: JButton? = null + private var serverUrlPanel: JPanel? = null + private var apiKeyPanel: JPanel? = null private var aiEnableCacheCheckBox: JCheckBox? = null private var aiTranslationEnabledCheckBox: JCheckBox? = null private var translationTargetLanguageComboBox: JComboBox>? = null private var aiMethodInferEnabledCheckBox: JCheckBox? = null + // Flag to track whether to use combo box or text field for model selection + private var useModelComboBox: Boolean = false + // Store the last selected AI Provider to detect changes private var lastSelectedAIProvider: AIProvider? = null - + // List of supported languages for translation - private val supportedLanguages = Language.values().toList() + private val supportedLanguages = Language.entries + + @Inject + private lateinit var actionContext: ActionContext override fun getRootPanel(): JComponent? { return rootPanel @@ -38,11 +56,12 @@ class EasyApiSettingAIGUI : AbstractEasyApiSettingGUI() { super.onCreate() setupAIProviderComboBox() setupTranslationLanguageComboBox() + setupLocalServerUrlField() } private fun setupAIProviderComboBox() { // Initialize the AIProvider combo box with display names - val displayNames = AIProvider.values().map { it.displayName } + val displayNames = AIProvider.entries.map { it.displayName } aiProviderComboBox?.model = DefaultComboBoxModel(displayNames.toTypedArray()) aiProviderComboBox?.addActionListener { @@ -62,7 +81,7 @@ class EasyApiSettingAIGUI : AbstractEasyApiSettingGUI() { updateModelComboBox() } } - + private fun setupTranslationLanguageComboBox() { // Initialize the language combo box with supported languages using the display format "code(name)" translationTargetLanguageComboBox?.model = SwingUtils.createComboBoxModel( @@ -70,22 +89,125 @@ class EasyApiSettingAIGUI : AbstractEasyApiSettingGUI() { ) { "${it.code}(${it.displayName})" } } + private fun setupLocalServerUrlField() { + aiProviderComboBox?.addActionListener { + val selectedProvider = aiProviderComboBox?.selectedItem as? String + val isLocalLLM = selectedProvider == AIProvider.LOCALLM.displayName + + serverUrlPanel?.isVisible = isLocalLLM + apiKeyPanel?.isVisible = !isLocalLLM + + // For LocalLLM, we'll determine which input to show after checking available models + if (isLocalLLM && aiLocalServerUrlField?.text?.isNotBlank() == true) { + checkLocalLLMModels() + } else if (!isLocalLLM) { + updateModelComboBox() + } + } + + loadServerButton?.addActionListener { + checkLocalLLMModels() + } + } + + private fun checkLocalLLMModels() = actionContext.runAsync { + val serverUrl = actionContext.callInSwingUI { aiLocalServerUrlField?.text?.trim() } ?: return@runAsync + val httpClientProvider = actionContext.instance(HttpClientProvider::class) + val serverDiscoverer = LocalLLMServerDiscoverer(httpClientProvider.getHttpClient()) + + try { + val finalUrl = serverDiscoverer.discoverServer(serverUrl) + if (finalUrl != null) { + actionContext.runInSwingUI { aiLocalServerUrlField?.text = finalUrl } + // Try to get available models + try { + val localLLMClient = LocalLLMClient( + serverUrl = finalUrl, + modelName = "", + httpClient = httpClientProvider.getHttpClient() + ) + val availableModels = localLLMClient.getAvailableModels() + actionContext.runInSwingUI { + if (availableModels.isNotEmpty()) { + val inputModel = aiModelTextField?.text + // Show model combo box and populate models + useModelComboBox = true + aiModelComboBox.isVisible = true + aiModelTextField?.isVisible = false + aiModelComboBox.removeAllItems() + availableModels.forEach { model -> + aiModelComboBox.addItem( + DisplayItem(item = AIModel(model, model), displayText = model) + ) + } + if (inputModel != null) { + SwingUtils.setSelectedItem( + aiModelComboBox, + AIModel(inputModel, inputModel) + ) { a, b -> a.id == b.id } + } + } else { + // No models available, show text field + useModelComboBox = false + aiModelComboBox.isVisible = false + aiModelTextField?.isVisible = true + aiModelTextField?.text = "local-model" + } + } + } catch (_: Exception) { + // If we can't get models, show text field + actionContext.runInSwingUI { + useModelComboBox = false + aiModelComboBox.isVisible = false + aiModelTextField?.isVisible = true + aiModelTextField?.text = "local-model" + } + } + } else { + JOptionPane.showMessageDialog( + rootPanel, + "Could not connect to the local LLM server. Please check the URL and try again.", + "Connection Error", + JOptionPane.ERROR_MESSAGE + ) + } + } catch (e: Exception) { + JOptionPane.showMessageDialog( + rootPanel, + "Error connecting to server: ${e.message}", + "Error", + JOptionPane.ERROR_MESSAGE + ) + } + } + private fun updateModelComboBox() { val selectedDisplayName = aiProviderComboBox?.selectedItem as? String val aiProvider = findAIProviderByDisplayName(selectedDisplayName) if (aiProvider != null) { - // Create model with display format "$id($displayName)" - aiModelComboBox?.model = SwingUtils.createComboBoxModel( - aiProvider.models - ) { "${it.id}(${it.displayName})" } - - // Select the first model by default - if (aiModelComboBox?.itemCount ?: 0 > 0) { - aiModelComboBox?.selectedIndex = 0 + // For providers with models, show combo box + useModelComboBox = aiProvider.models.isNotEmpty() + aiModelComboBox.isVisible = useModelComboBox + aiModelTextField?.isVisible = !useModelComboBox + aiModelTextField?.text = settingsInstance?.aiModel ?: "local-model" + + if (useModelComboBox) { + // Create model with display format "$id($displayName)" + aiModelComboBox.model = SwingUtils.createComboBoxModel( + aiProvider.models + ) { "${it.id}(${it.displayName})" } + + // Select the first model by default + if (aiModelComboBox.itemCount > 0) { + aiModelComboBox.selectedIndex = 0 + } } } else { - aiModelComboBox?.model = DefaultComboBoxModel(emptyArray>()) + useModelComboBox = false + aiModelComboBox.model = DefaultComboBoxModel(emptyArray>()) + aiModelComboBox.isVisible = false + aiModelTextField?.isVisible = true } } @@ -93,7 +215,7 @@ class EasyApiSettingAIGUI : AbstractEasyApiSettingGUI() { * Find AIProvider by its display name */ private fun findAIProviderByDisplayName(displayName: String?): AIProvider? { - return AIProvider.values().find { it.displayName == displayName } + return AIProvider.entries.find { it.displayName == displayName } } /** @@ -102,21 +224,26 @@ class EasyApiSettingAIGUI : AbstractEasyApiSettingGUI() { override fun readSettings(settings: Settings) { settings.aiEnable = aiEnableCheckBox?.isSelected == true settings.aiProvider = aiProviderComboBox?.selectedItem as? String - - // Get the model ID from the selected item - settings.aiModel = aiModelComboBox?.let { SwingUtils.getSelectedItem(it)?.id } - + settings.aiLocalServerUrl = aiLocalServerUrlField?.text?.trim() + + // Get the model ID based on the selected provider + settings.aiModel = if (useModelComboBox) { + SwingUtils.getSelectedItem(aiModelComboBox)?.id + } else { + aiModelTextField?.text?.trim() + } + settings.aiToken = aiApiKeyField?.password?.let { String(it) }?.takeIf { it.isNotBlank() } settings.aiEnableCache = aiEnableCacheCheckBox?.isSelected == true - + // Translation settings settings.aiTranslationEnabled = aiTranslationEnabledCheckBox?.isSelected == true - + // Store the language code in settings - settings.aiTranslationTargetLanguage = translationTargetLanguageComboBox?.let { - SwingUtils.getSelectedItem(it)?.code + settings.aiTranslationTargetLanguage = translationTargetLanguageComboBox?.let { + SwingUtils.getSelectedItem(it)?.code } - + // Method inference settings settings.aiMethodInferEnabled = aiMethodInferEnabledCheckBox?.isSelected == true } @@ -128,50 +255,59 @@ class EasyApiSettingAIGUI : AbstractEasyApiSettingGUI() { super.setSettings(settings) val aiProvider = AIProvider.fromDisplayName(settings.aiProvider) ?: AIProvider.OPENAI + val isLocalLLM = aiProvider == AIProvider.LOCALLM + + // Show/hide appropriate fields based on provider + + serverUrlPanel?.isVisible = isLocalLLM + apiKeyPanel?.isVisible = !isLocalLLM aiEnableCheckBox?.isSelected = settings.aiEnable aiProviderComboBox?.selectedItem = aiProvider.displayName + aiLocalServerUrlField?.text = settings.aiLocalServerUrl lastSelectedAIProvider = aiProvider - // Update model combo box - updateModelComboBox() - - // Find and select the model with matching ID - val modelId = settings.aiModel - if (modelId != null && aiModelComboBox != null) { - // Find the model with the matching ID - val modelToSelect = aiProvider.models.find { it.id == modelId } - - // Set the selected item - if (modelToSelect != null) { - SwingUtils.setSelectedItem(aiModelComboBox!!, modelToSelect) { a, b -> a.id == b.id } - } + // For LocalLLM with server URL, check available models + if (isLocalLLM && settings.aiLocalServerUrl?.isNotBlank() == true) { + checkLocalLLMModels() + } else { + // For other providers, update model combo box + updateModelComboBox() } aiApiKeyField?.text = settings.aiToken ?: "" aiEnableCacheCheckBox?.isSelected = settings.aiEnableCache - + // Translation settings aiTranslationEnabledCheckBox?.isSelected = settings.aiTranslationEnabled - + // Set the selected language by code or default to English if not set val targetLanguageCode = settings.aiTranslationTargetLanguage if (targetLanguageCode != null && translationTargetLanguageComboBox != null) { // Find the language with the matching code val languageToSelect = Language.fromCode(targetLanguageCode) - + // Set the selected item if (languageToSelect != null) { - SwingUtils.setSelectedItem(translationTargetLanguageComboBox!!, languageToSelect) { a, b -> a.code == b.code } + SwingUtils.setSelectedItem( + translationTargetLanguageComboBox!!, + languageToSelect + ) { a, b -> a.code == b.code } } else { // Default to English if code not found - SwingUtils.setSelectedItem(translationTargetLanguageComboBox!!, Language.getDefault()) { a, b -> a.code == b.code } + SwingUtils.setSelectedItem( + translationTargetLanguageComboBox!!, + Language.getDefault() + ) { a, b -> a.code == b.code } } } else { // Default to English if no language code is set if (translationTargetLanguageComboBox != null) { - SwingUtils.setSelectedItem(translationTargetLanguageComboBox!!, Language.getDefault()) { a, b -> a.code == b.code } + SwingUtils.setSelectedItem( + translationTargetLanguageComboBox!!, + Language.getDefault() + ) { a, b -> a.code == b.code } } } diff --git a/idea-plugin/src/main/kotlin/com/itangcent/idea/plugin/settings/Settings.kt b/idea-plugin/src/main/kotlin/com/itangcent/idea/plugin/settings/Settings.kt index 346dd62c..5cff7ffb 100644 --- a/idea-plugin/src/main/kotlin/com/itangcent/idea/plugin/settings/Settings.kt +++ b/idea-plugin/src/main/kotlin/com/itangcent/idea/plugin/settings/Settings.kt @@ -129,6 +129,11 @@ class Settings : ProjectSettingsSupport, ApplicationSettingsSupport { */ override var aiToken: String? = null + /** + * Local LLM server URL (for LocalLLM provider) + */ + override var aiLocalServerUrl: String? = null + /** * Enable AI integration */ @@ -218,6 +223,7 @@ class Settings : ProjectSettingsSupport, ApplicationSettingsSupport { if (!remoteConfig.contentEquals(other.remoteConfig)) return false if (aiProvider != other.aiProvider) return false if (aiToken != other.aiToken) return false + if (aiLocalServerUrl != other.aiLocalServerUrl) return false if (aiEnable != other.aiEnable) return false if (aiModel != other.aiModel) return false if (aiEnableCache != other.aiEnableCache) return false @@ -273,6 +279,7 @@ class Settings : ProjectSettingsSupport, ApplicationSettingsSupport { result = 31 * result + remoteConfig.contentHashCode() result = 31 * result + (aiProvider?.hashCode() ?: 0) result = 31 * result + (aiToken?.hashCode() ?: 0) + result = 31 * result + (aiLocalServerUrl?.hashCode() ?: 0) result = 31 * result + aiEnable.hashCode() result = 31 * result + (aiModel?.hashCode() ?: 0) result = 31 * result + aiEnableCache.hashCode() @@ -310,6 +317,7 @@ class Settings : ProjectSettingsSupport, ApplicationSettingsSupport { "remoteConfig=${remoteConfig.contentToString()}, " + "aiProvider=$aiProvider, " + "aiToken=$aiToken, " + + "aiLocalServerUrl=$aiLocalServerUrl, " + "aiEnable=$aiEnable, " + "aiModel=$aiModel, " + "aiEnableCache=$aiEnableCache, " + diff --git a/idea-plugin/src/main/kotlin/com/itangcent/idea/plugin/settings/helper/AISettingsHelper.kt b/idea-plugin/src/main/kotlin/com/itangcent/idea/plugin/settings/helper/AISettingsHelper.kt index a93233a8..1feb0e63 100644 --- a/idea-plugin/src/main/kotlin/com/itangcent/idea/plugin/settings/helper/AISettingsHelper.kt +++ b/idea-plugin/src/main/kotlin/com/itangcent/idea/plugin/settings/helper/AISettingsHelper.kt @@ -29,6 +29,11 @@ class AISettingsHelper { */ val aiToken: String? get() = settingBinder.read().aiToken + /** + * Get the local LLM server URL from settings + */ + val aiLocalServerUrl: String? get() = settingBinder.read().aiLocalServerUrl + /** * Get the AI provider from settings */ diff --git a/idea-plugin/src/main/kotlin/com/itangcent/idea/plugin/settings/xml/ApplicationSettings.kt b/idea-plugin/src/main/kotlin/com/itangcent/idea/plugin/settings/xml/ApplicationSettings.kt index cd987774..e8425ca0 100644 --- a/idea-plugin/src/main/kotlin/com/itangcent/idea/plugin/settings/xml/ApplicationSettings.kt +++ b/idea-plugin/src/main/kotlin/com/itangcent/idea/plugin/settings/xml/ApplicationSettings.kt @@ -56,6 +56,7 @@ interface ApplicationSettingsSupport { // AI integration var aiProvider: String? var aiToken: String? + var aiLocalServerUrl: String? var aiEnable: Boolean var aiModel: String? var aiEnableCache: Boolean @@ -108,6 +109,7 @@ interface ApplicationSettingsSupport { newSetting.remoteConfig = this.remoteConfig newSetting.aiProvider = this.aiProvider newSetting.aiToken = this.aiToken + newSetting.aiLocalServerUrl = this.aiLocalServerUrl newSetting.aiEnable = this.aiEnable newSetting.aiModel = this.aiModel newSetting.aiEnableCache = this.aiEnableCache @@ -220,6 +222,8 @@ class ApplicationSettings : ApplicationSettingsSupport { override var aiToken: String? = null + override var aiLocalServerUrl: String? = null + override var aiEnable: Boolean = false override var aiModel: String? = null diff --git a/idea-plugin/src/main/resources/META-INF/services/com.itangcent.ai.AIService b/idea-plugin/src/main/resources/META-INF/services/com.itangcent.ai.AIService index ef069804..312a409b 100644 --- a/idea-plugin/src/main/resources/META-INF/services/com.itangcent.ai.AIService +++ b/idea-plugin/src/main/resources/META-INF/services/com.itangcent.ai.AIService @@ -1,2 +1,3 @@ com.itangcent.ai.OpenAIService -com.itangcent.ai.DeepSeekService \ No newline at end of file +com.itangcent.ai.DeepSeekService +com.itangcent.ai.LocalLLMService \ No newline at end of file