diff --git a/data-agent-frontend/src/components/agent/DataSourceConfig.vue b/data-agent-frontend/src/components/agent/DataSourceConfig.vue index 6ed2edbc3..da448cde8 100644 --- a/data-agent-frontend/src/components/agent/DataSourceConfig.vue +++ b/data-agent-frontend/src/components/agent/DataSourceConfig.vue @@ -259,18 +259,18 @@
- - - - - - +
@@ -402,9 +402,12 @@ style="width: 100%" size="large" > - - - + @@ -804,7 +807,7 @@ Edit, } from '@element-plus/icons-vue'; import datasourceService from '@/services/datasource'; - import { Datasource, AgentDatasource } from '@/services/datasource'; + import { Datasource, AgentDatasource, DatasourceType } from '@/services/datasource'; import { ApiResponse } from '@/services/common'; import { ElMessage, ElMessageBox } from 'element-plus'; import agentDatasourceService from '@/services/agentDatasource'; @@ -860,14 +863,24 @@ const targetColumnList: Ref = ref([]); const savingForeignKeys: Ref = ref(false); + // 数据源类型列表 + const datasourceTypes: Ref = ref([]); + watch(dialogVisible, newValue => { if (newValue) { loadAllDatasource(); + loadDatasourceTypes(); newDatasource.value = { port: 3306 } as Datasource; schemaName.value = ''; } }); + watch(editDialogVisible, newValue => { + if (newValue) { + loadDatasourceTypes(); + } + }); + // 初始化Agent数据源列表 const loadAgentDatasource = async () => { selectedDatasourceId.value = null; @@ -910,6 +923,19 @@ } }; + // 加载数据源类型列表 + const loadDatasourceTypes = async () => { + try { + const response = await datasourceService.getDatasourceTypes(); + if (response.success && response.data) { + datasourceTypes.value = response.data; + } + } catch (error) { + ElMessage.error('加载数据源类型失败'); + console.error('Failed to load datasource types:', error); + } + }; + // 初始化Agent数据源 const initAgentDatasource = async () => { initStatus.value = true; @@ -1586,6 +1612,9 @@ // PostgreSQL/Oracle Schema字段 schemaName, schemaNameEdit, + // 数据源类型 + datasourceTypes, + loadDatasourceTypes, // 逻辑外键管理 Connection, Link, diff --git a/data-agent-frontend/src/services/datasource.ts b/data-agent-frontend/src/services/datasource.ts index 9e63dc73f..184858fd2 100644 --- a/data-agent-frontend/src/services/datasource.ts +++ b/data-agent-frontend/src/services/datasource.ts @@ -47,6 +47,15 @@ export interface AgentDatasource { selectTables?: string[]; } +// 定义数据源类型接口 +export interface DatasourceType { + code: number; + typeName: string; + dialect: string; + protocol: string; + displayName: string; +} + const API_BASE_URL = '/api/datasource'; class DatasourceService { @@ -111,6 +120,12 @@ class DatasourceService { const response = await axios.post>(`${API_BASE_URL}/${id}/test`); return response.data; } + + // 8. 获取所有可用的数据源类型 + async getDatasourceTypes(): Promise> { + const response = await axios.get>(`${API_BASE_URL}/types`); + return response.data; + } } export default new DatasourceService(); diff --git a/data-agent-management/pom.xml b/data-agent-management/pom.xml index d8eeb3c42..718394e9a 100644 --- a/data-agent-management/pom.xml +++ b/data-agent-management/pom.xml @@ -106,6 +106,62 @@ h2 runtime + + + + org.apache.hive + hive-jdbc + 3.1.3 + runtime + + + + org.eclipse.jetty.aggregate + * + + + org.eclipse.jetty + * + + + + org.apache.hadoop + * + + + + org.slf4j + slf4j-log4j12 + + + log4j + log4j + + + org.apache.logging.log4j + * + + + + org.apache.tomcat + * + + + org.apache.tomcat.embed + * + + + + javax.servlet + * + + + jakarta.servlet + * + + + + org.springdoc diff --git a/data-agent-management/src/main/java/com/alibaba/cloud/ai/dataagent/connector/impls/hive/HiveDBAccessor.java b/data-agent-management/src/main/java/com/alibaba/cloud/ai/dataagent/connector/impls/hive/HiveDBAccessor.java new file mode 100644 index 000000000..6b12ba7e8 --- /dev/null +++ b/data-agent-management/src/main/java/com/alibaba/cloud/ai/dataagent/connector/impls/hive/HiveDBAccessor.java @@ -0,0 +1,46 @@ +/* + * Copyright 2024-2026 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.alibaba.cloud.ai.dataagent.connector.impls.hive; + +import com.alibaba.cloud.ai.dataagent.connector.accessor.AbstractAccessor; +import com.alibaba.cloud.ai.dataagent.connector.ddl.DdlFactory; +import com.alibaba.cloud.ai.dataagent.connector.pool.DBConnectionPoolFactory; +import com.alibaba.cloud.ai.dataagent.enums.BizDataSourceTypeEnum; +import org.springframework.stereotype.Service; + +/** + * Hive 数据源访问器实现 + */ +@Service("hiveAccessor") +public class HiveDBAccessor extends AbstractAccessor { + + private final static String ACCESSOR_TYPE = "Hive_Accessor"; + + protected HiveDBAccessor(DdlFactory ddlFactory, DBConnectionPoolFactory poolFactory) { + super(ddlFactory, poolFactory.getPoolByDbType(BizDataSourceTypeEnum.HIVE.getTypeName())); + } + + @Override + public String getAccessorType() { + return ACCESSOR_TYPE; + } + + @Override + public boolean supportedDataSourceType(String type) { + return BizDataSourceTypeEnum.HIVE.getTypeName().equalsIgnoreCase(type); + } + +} diff --git a/data-agent-management/src/main/java/com/alibaba/cloud/ai/dataagent/connector/impls/hive/HiveJdbcConnectionPool.java b/data-agent-management/src/main/java/com/alibaba/cloud/ai/dataagent/connector/impls/hive/HiveJdbcConnectionPool.java new file mode 100644 index 000000000..acd846b8f --- /dev/null +++ b/data-agent-management/src/main/java/com/alibaba/cloud/ai/dataagent/connector/impls/hive/HiveJdbcConnectionPool.java @@ -0,0 +1,166 @@ +/* + * Copyright 2026 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.alibaba.cloud.ai.dataagent.connector.impls.hive; + +import com.alibaba.cloud.ai.dataagent.bo.DbConfigBO; +import com.alibaba.cloud.ai.dataagent.connector.pool.AbstractDBConnectionPool; +import com.alibaba.cloud.ai.dataagent.enums.BizDataSourceTypeEnum; +import com.alibaba.cloud.ai.dataagent.enums.ErrorCodeEnum; +import com.alibaba.druid.pool.DruidDataSourceFactory; +import lombok.extern.slf4j.Slf4j; +import org.springframework.stereotype.Service; + +import javax.sql.DataSource; +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.HashMap; +import java.util.Map; + +import static com.alibaba.cloud.ai.dataagent.enums.ErrorCodeEnum.DATABASE_NOT_EXIST_42000; +import static com.alibaba.cloud.ai.dataagent.enums.ErrorCodeEnum.DATASOURCE_CONNECTION_FAILURE_08001; +import static com.alibaba.cloud.ai.dataagent.enums.ErrorCodeEnum.INSUFFICIENT_PRIVILEGE_42501; +import static com.alibaba.cloud.ai.dataagent.enums.ErrorCodeEnum.OTHERS; +import static com.alibaba.cloud.ai.dataagent.enums.ErrorCodeEnum.PASSWORD_ERROR_28000; +import static com.alibaba.cloud.ai.dataagent.enums.ErrorCodeEnum.SUCCESS; + +/** + * Hive JDBC connection pool implementation. + */ +@Slf4j +@Service("hiveJdbcConnectionPool") +public class HiveJdbcConnectionPool extends AbstractDBConnectionPool { + + private static final String DRIVER = "org.apache.hive.jdbc.HiveDriver"; + + @Override + public String getDriver() { + return DRIVER; + } + + @Override + public ErrorCodeEnum errorMapping(String sqlState) { + if (sqlState == null) { + return OTHERS; + } + + ErrorCodeEnum ret = ErrorCodeEnum.fromCode(sqlState); + if (ret != OTHERS) { + return ret; + } + + switch (sqlState) { + case "08001": + case "08S01": + return DATASOURCE_CONNECTION_FAILURE_08001; + case "28000": + return PASSWORD_ERROR_28000; + case "42000": + return DATABASE_NOT_EXIST_42000; + case "42501": + return INSUFFICIENT_PRIVILEGE_42501; + default: + return OTHERS; + } + } + + @Override + public boolean supportedDataSourceType(String type) { + return BizDataSourceTypeEnum.HIVE.getTypeName().equals(type); + } + + @Override + public String getConnectionPoolType() { + return "Hive_JDBC_Pool"; + } + + @Override + public DataSource createdDataSource(String url, String username, String password) throws Exception { + log.info("Creating Hive DataSource with custom configuration"); + String driver = getDriver(); + Map props = new HiveDruidProperties(driver, url, username, password, "stat").toMap(); + return DruidDataSourceFactory.createDataSource(props); + } + + private static final class HiveDruidProperties { + + private final String driver; + + private final String url; + + private final String username; + + private final String password; + + private final String filters; + + private HiveDruidProperties(String driver, String url, String username, String password, String filters) { + this.driver = driver; + this.url = url; + this.username = username; + this.password = password; + this.filters = filters; + } + + private Map toMap() { + Map props = new HashMap<>(); + props.put(DruidDataSourceFactory.PROP_DRIVERCLASSNAME, this.driver); + props.put(DruidDataSourceFactory.PROP_URL, this.url); + props.put(DruidDataSourceFactory.PROP_USERNAME, this.username); + props.put(DruidDataSourceFactory.PROP_PASSWORD, this.password); + props.put(DruidDataSourceFactory.PROP_FILTERS, this.filters); + props.put(DruidDataSourceFactory.PROP_INITIALSIZE, "5"); + props.put(DruidDataSourceFactory.PROP_MINIDLE, "5"); + props.put(DruidDataSourceFactory.PROP_MAXACTIVE, "20"); + props.put(DruidDataSourceFactory.PROP_MAXWAIT, "60000"); + props.put(DruidDataSourceFactory.PROP_TIMEBETWEENEVICTIONRUNSMILLIS, "60000"); + props.put(DruidDataSourceFactory.PROP_MINEVICTABLEIDLETIMEMILLIS, "300000"); + props.put(DruidDataSourceFactory.PROP_VALIDATIONQUERY, "SELECT 1"); + props.put(DruidDataSourceFactory.PROP_TESTWHILEIDLE, "true"); + props.put(DruidDataSourceFactory.PROP_TESTONBORROW, "false"); + props.put(DruidDataSourceFactory.PROP_TESTONRETURN, "false"); + return props; + } + + } + + @Override + public ErrorCodeEnum ping(DbConfigBO config) { + log.info("Hive ping method called, url: {}", config.getUrl()); + try (Connection connection = getConnection(config); Statement stmt = connection.createStatement()) { + log.info("Hive connection obtained, executing SELECT 1"); + ResultSet rs = stmt.executeQuery("SELECT 1"); + if (rs.next()) { + rs.close(); + return SUCCESS; + } + rs.close(); + return DATASOURCE_CONNECTION_FAILURE_08001; + } + catch (SQLException e) { + log.error("Hive connection test failed, url:{}, state:{}, message:{}", config.getUrl(), e.getSQLState(), + e.getMessage()); + return errorMapping(e.getSQLState()); + } + catch (Exception e) { + log.error("Hive connection test failed with unexpected error, url:{}, message:{}", config.getUrl(), + e.getMessage()); + return DATASOURCE_CONNECTION_FAILURE_08001; + } + } + +} diff --git a/data-agent-management/src/main/java/com/alibaba/cloud/ai/dataagent/connector/impls/hive/HiveJdbcDdl.java b/data-agent-management/src/main/java/com/alibaba/cloud/ai/dataagent/connector/impls/hive/HiveJdbcDdl.java new file mode 100644 index 000000000..541818d42 --- /dev/null +++ b/data-agent-management/src/main/java/com/alibaba/cloud/ai/dataagent/connector/impls/hive/HiveJdbcDdl.java @@ -0,0 +1,231 @@ +/* + * Copyright 2024-2026 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.alibaba.cloud.ai.dataagent.connector.impls.hive; + +import com.alibaba.cloud.ai.dataagent.bo.schema.ColumnInfoBO; +import com.alibaba.cloud.ai.dataagent.bo.schema.DatabaseInfoBO; +import com.alibaba.cloud.ai.dataagent.bo.schema.ForeignKeyInfoBO; +import com.alibaba.cloud.ai.dataagent.bo.schema.ResultSetBO; +import com.alibaba.cloud.ai.dataagent.bo.schema.SchemaInfoBO; +import com.alibaba.cloud.ai.dataagent.bo.schema.TableInfoBO; +import com.alibaba.cloud.ai.dataagent.connector.SqlExecutor; +import com.alibaba.cloud.ai.dataagent.connector.ddl.AbstractJdbcDdl; +import com.alibaba.cloud.ai.dataagent.enums.BizDataSourceTypeEnum; +import org.apache.commons.compress.utils.Lists; +import org.apache.commons.lang3.StringUtils; +import org.springframework.stereotype.Service; + +import java.sql.Connection; +import java.sql.SQLException; +import java.util.Collections; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + +import static com.alibaba.cloud.ai.dataagent.util.ColumnTypeUtil.wrapType; + +/** + * Hive JDBC DDL 执行器实现 + */ +@Service +public class HiveJdbcDdl extends AbstractJdbcDdl { + + @Override + public List showDatabases(Connection connection) { + String sql = "SHOW DATABASES"; + List databaseInfoList = Lists.newArrayList(); + try { + String[][] resultArr = SqlExecutor.executeSqlAndReturnArr(connection, sql); + if (resultArr.length <= 1) { + return Lists.newArrayList(); + } + + for (int i = 1; i < resultArr.length; i++) { + if (resultArr[i].length == 0) { + continue; + } + String database = resultArr[i][0]; + databaseInfoList.add(DatabaseInfoBO.builder().name(database).build()); + } + } + catch (SQLException e) { + throw new RuntimeException(e); + } + + return databaseInfoList; + } + + @Override + public List showSchemas(Connection connection) { + return Collections.emptyList(); + } + + @Override + public List showTables(Connection connection, String schema, String tablePattern) { + StringBuilder sql = new StringBuilder("SHOW TABLES"); + + if (StringUtils.isNotBlank(schema)) { + sql.append(" IN ").append(schema); + } + + if (StringUtils.isNotBlank(tablePattern)) { + sql.append(" LIKE '").append(tablePattern).append("'"); + } + + List tableInfoList = Lists.newArrayList(); + try { + String[][] resultArr = SqlExecutor.executeSqlAndReturnArr(connection, sql.toString()); + if (resultArr.length <= 1) { + return Lists.newArrayList(); + } + + for (int i = 1; i < resultArr.length; i++) { + if (resultArr[i].length == 0) { + continue; + } + String tableName = resultArr[i][0]; + tableInfoList.add(TableInfoBO.builder().name(tableName).build()); + } + } + catch (SQLException e) { + throw new RuntimeException(e); + } + + return tableInfoList; + } + + @Override + public List fetchTables(Connection connection, String schema, List tables) { + List tableInfoList = Lists.newArrayList(); + + for (String tableName : tables) { + try { + String sql = "DESCRIBE FORMATTED " + (StringUtils.isNotBlank(schema) ? schema + "." : "") + tableName; + String[][] resultArr = SqlExecutor.executeSqlAndReturnArr(connection, sql); + + String tableComment = ""; + for (int i = 1; i < resultArr.length; i++) { + if (resultArr[i].length >= 2 && "comment".equalsIgnoreCase(resultArr[i][0].trim())) { + tableComment = resultArr[i][1]; + break; + } + } + + tableInfoList.add(TableInfoBO.builder().name(tableName).description(tableComment).build()); + } + catch (SQLException e) { + tableInfoList.add(TableInfoBO.builder().name(tableName).build()); + } + } + + return tableInfoList; + } + + @Override + public List showColumns(Connection connection, String schema, String table) { + String fullTableName = StringUtils.isNotBlank(schema) ? schema + "." + table : table; + String sql = "DESCRIBE " + fullTableName; + + List columnInfoList = Lists.newArrayList(); + try { + String[][] resultArr = SqlExecutor.executeSqlAndReturnArr(connection, sql); + if (resultArr.length <= 1) { + return Lists.newArrayList(); + } + + for (int i = 1; i < resultArr.length; i++) { + if (resultArr[i].length < 2) { + continue; + } + + String colName = resultArr[i][0]; + String dataType = resultArr[i][1]; + String comment = resultArr[i].length >= 3 ? resultArr[i][2] : ""; + + if (StringUtils.isBlank(colName) || colName.startsWith("#")) { + continue; + } + + columnInfoList.add(ColumnInfoBO.builder() + .name(colName) + .description(comment) + .type(wrapType(dataType)) + .primary(false) // Hive 不支持主键 + .notnull(false) // Hive 不强制非空约束 + .build()); + } + } + catch (SQLException e) { + throw new RuntimeException(e); + } + + return columnInfoList; + } + + @Override + public List showForeignKeys(Connection connection, String schema, List tables) { + return Collections.emptyList(); + } + + @Override + public List sampleColumn(Connection connection, String schema, String table, String column) { + String fullTableName = StringUtils.isNotBlank(schema) ? schema + "." + table : table; + String sql = String.format("SELECT `%s` FROM %s LIMIT 99", column, fullTableName); + + List sampleInfo = Lists.newArrayList(); + try { + String[][] resultArr = SqlExecutor.executeSqlAndReturnArr(connection, null, sql); + if (resultArr.length <= 1) { + return Lists.newArrayList(); + } + + for (int i = 1; i < resultArr.length; i++) { + if (resultArr[i].length == 0 || column.equalsIgnoreCase(resultArr[i][0])) { + continue; + } + sampleInfo.add(resultArr[i][0]); + } + } + catch (SQLException e) { + } + + // 去重 + Set siSet = sampleInfo.stream().collect(Collectors.toSet()); + sampleInfo = siSet.stream().collect(Collectors.toList()); + return sampleInfo; + } + + @Override + public ResultSetBO scanTable(Connection connection, String schema, String table) { + String fullTableName = StringUtils.isNotBlank(schema) ? schema + "." + table : table; + String sql = String.format("SELECT * FROM %s LIMIT 20", fullTableName); + + ResultSetBO resultSet = ResultSetBO.builder().build(); + try { + resultSet = SqlExecutor.executeSqlAndReturnObject(connection, schema, sql); + } + catch (SQLException e) { + throw new RuntimeException(e); + } + return resultSet; + } + + @Override + public BizDataSourceTypeEnum getDataSourceType() { + return BizDataSourceTypeEnum.HIVE; + } + +} diff --git a/data-agent-management/src/main/java/com/alibaba/cloud/ai/dataagent/connector/pool/DBConnectionPoolFactory.java b/data-agent-management/src/main/java/com/alibaba/cloud/ai/dataagent/connector/pool/DBConnectionPoolFactory.java index ddc4afb77..eb066d38b 100644 --- a/data-agent-management/src/main/java/com/alibaba/cloud/ai/dataagent/connector/pool/DBConnectionPoolFactory.java +++ b/data-agent-management/src/main/java/com/alibaba/cloud/ai/dataagent/connector/pool/DBConnectionPoolFactory.java @@ -47,18 +47,11 @@ public boolean isRegistered(String type) { * @return DB connection pool */ public DBConnectionPool getPoolByType(String type) { - // if (type == null || type.trim().isEmpty()) { - // return null; - // } - // return switch (type.toLowerCase()) { - // case "mysql", "mysqljdbcconnectionpool" -> - // poolMap.get("mysqlJdbcConnectionPool"); - // case "postgresql", "postgres", "postgresqljdbcconnectionpool" -> - // poolMap.get("postgreSqlJdbcConnectionPool"); - // case "h2", "h2jdbcconnectionpool" -> poolMap.get("h2JdbcConnectionPool"); - // default -> null; - // }; - return poolMap.get(type); + DBConnectionPool direct = poolMap.get(type); + if (direct != null) { + return direct; + } + return poolMap.values().stream().filter(p -> p.supportedDataSourceType(type)).findFirst().orElse(null); } // todo: 写一层缓存 diff --git a/data-agent-management/src/main/java/com/alibaba/cloud/ai/dataagent/controller/DatasourceController.java b/data-agent-management/src/main/java/com/alibaba/cloud/ai/dataagent/controller/DatasourceController.java index 90c028551..cc19ca59c 100644 --- a/data-agent-management/src/main/java/com/alibaba/cloud/ai/dataagent/controller/DatasourceController.java +++ b/data-agent-management/src/main/java/com/alibaba/cloud/ai/dataagent/controller/DatasourceController.java @@ -15,16 +15,20 @@ */ package com.alibaba.cloud.ai.dataagent.controller; +import com.alibaba.cloud.ai.dataagent.dto.datasource.DatasourceTypeDTO; import com.alibaba.cloud.ai.dataagent.dto.schema.CreateLogicalRelationDTO; import com.alibaba.cloud.ai.dataagent.dto.schema.UpdateLogicalRelationDTO; import com.alibaba.cloud.ai.dataagent.entity.Datasource; import com.alibaba.cloud.ai.dataagent.entity.LogicalRelation; +import com.alibaba.cloud.ai.dataagent.enums.BizDataSourceTypeEnum; import com.alibaba.cloud.ai.dataagent.exception.InternalServerException; import com.alibaba.cloud.ai.dataagent.exception.InvalidInputException; import com.alibaba.cloud.ai.dataagent.service.datasource.DatasourceService; import com.alibaba.cloud.ai.dataagent.vo.ApiResponse; import jakarta.validation.Valid; +import java.util.Arrays; import java.util.List; +import java.util.stream.Collectors; import lombok.AllArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; @@ -51,6 +55,29 @@ public class DatasourceController { private final DatasourceService datasourceService; + /** + * Get all data source list + */ + @GetMapping("/types") + public ApiResponse> getDatasourceTypes() { + // 定义标准的 JDBC 数据源类型 + List standardTypes = Arrays.asList(BizDataSourceTypeEnum.MYSQL, + BizDataSourceTypeEnum.POSTGRESQL, BizDataSourceTypeEnum.DAMENG, BizDataSourceTypeEnum.SQL_SERVER, + BizDataSourceTypeEnum.ORACLE, BizDataSourceTypeEnum.HIVE); + + List types = standardTypes.stream() + .map(type -> DatasourceTypeDTO.builder() + .code(type.getCode()) + .typeName(type.getTypeName()) + .dialect(type.getDialect()) + .protocol(type.getProtocol()) + .displayName(type.getDialect()) // 使用 dialect 作为显示名称 + .build()) + .collect(Collectors.toList()); + + return ApiResponse.success("获取数据源类型成功", types); + } + /** * Get all data source list */ diff --git a/data-agent-management/src/main/java/com/alibaba/cloud/ai/dataagent/dto/datasource/DatasourceTypeDTO.java b/data-agent-management/src/main/java/com/alibaba/cloud/ai/dataagent/dto/datasource/DatasourceTypeDTO.java new file mode 100644 index 000000000..6cbb77a70 --- /dev/null +++ b/data-agent-management/src/main/java/com/alibaba/cloud/ai/dataagent/dto/datasource/DatasourceTypeDTO.java @@ -0,0 +1,57 @@ +/* + * Copyright 2024-2026 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.alibaba.cloud.ai.dataagent.dto.datasource; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +/** + * 数据源类型 DTO + */ +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class DatasourceTypeDTO { + + /** + * 数据源类型代码 + */ + private Integer code; + + /** + * 数据源类型名称(用于后端识别) + */ + private String typeName; + + /** + * 数据库方言类型 + */ + private String dialect; + + /** + * 连接协议类型 + */ + private String protocol; + + /** + * 显示名称(用于前端展示) + */ + private String displayName; + +} diff --git a/data-agent-management/src/main/java/com/alibaba/cloud/ai/dataagent/enums/BizDataSourceTypeEnum.java b/data-agent-management/src/main/java/com/alibaba/cloud/ai/dataagent/enums/BizDataSourceTypeEnum.java index cc96af621..914b07746 100644 --- a/data-agent-management/src/main/java/com/alibaba/cloud/ai/dataagent/enums/BizDataSourceTypeEnum.java +++ b/data-agent-management/src/main/java/com/alibaba/cloud/ai/dataagent/enums/BizDataSourceTypeEnum.java @@ -40,6 +40,11 @@ public enum BizDataSourceTypeEnum { */ ORACLE(7, "oracle", DatabaseDialectEnum.ORACLE.getCode(), DbAccessTypeEnum.JDBC.getCode()), + /** + * Hive database + */ + HIVE(8, "hive", DatabaseDialectEnum.HIVE.getCode(), DbAccessTypeEnum.JDBC.getCode()), + HOLOGRESS(10, "hologress", DatabaseDialectEnum.POSTGRESQL.getCode(), DbAccessTypeEnum.JDBC.getCode()), MYSQL_VPC(11, "mysql-vpc", DatabaseDialectEnum.MYSQL.getCode(), DbAccessTypeEnum.JDBC.getCode()), diff --git a/data-agent-management/src/main/java/com/alibaba/cloud/ai/dataagent/enums/DatabaseDialectEnum.java b/data-agent-management/src/main/java/com/alibaba/cloud/ai/dataagent/enums/DatabaseDialectEnum.java index d7933800e..b1e7545a3 100644 --- a/data-agent-management/src/main/java/com/alibaba/cloud/ai/dataagent/enums/DatabaseDialectEnum.java +++ b/data-agent-management/src/main/java/com/alibaba/cloud/ai/dataagent/enums/DatabaseDialectEnum.java @@ -31,7 +31,9 @@ public enum DatabaseDialectEnum { SQL_SERVER("SqlServer"), - ORACLE("Oracle"); + ORACLE("Oracle"), + + HIVE("Hive"); public final String code; diff --git a/data-agent-management/src/main/java/com/alibaba/cloud/ai/dataagent/service/datasource/handler/impl/HiveDatasourceTypeHandler.java b/data-agent-management/src/main/java/com/alibaba/cloud/ai/dataagent/service/datasource/handler/impl/HiveDatasourceTypeHandler.java new file mode 100644 index 000000000..acefe10c1 --- /dev/null +++ b/data-agent-management/src/main/java/com/alibaba/cloud/ai/dataagent/service/datasource/handler/impl/HiveDatasourceTypeHandler.java @@ -0,0 +1,49 @@ +/* + * Copyright 2024-2026 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.alibaba.cloud.ai.dataagent.service.datasource.handler.impl; + +import com.alibaba.cloud.ai.dataagent.enums.BizDataSourceTypeEnum; +import com.alibaba.cloud.ai.dataagent.entity.Datasource; +import com.alibaba.cloud.ai.dataagent.service.datasource.handler.DatasourceTypeHandler; +import org.springframework.stereotype.Component; + +/** + * Hive 数据源类型处理器 + */ +@Component +public class HiveDatasourceTypeHandler implements DatasourceTypeHandler { + + @Override + public String typeName() { + return BizDataSourceTypeEnum.HIVE.getTypeName(); + } + + @Override + public String buildConnectionUrl(Datasource datasource) { + if (!hasRequiredConnectionFields(datasource)) { + return datasource.getConnectionUrl(); + } + + return String.format("jdbc:hive2://%s:%d/%s", datasource.getHost(), datasource.getPort(), + datasource.getDatabaseName()); + } + + @Override + public String normalizeTestUrl(Datasource datasource, String url) { + return url; + } + +} diff --git a/data-agent-management/src/main/java/com/alibaba/cloud/ai/dataagent/service/datasource/impl/DatasourceServiceImpl.java b/data-agent-management/src/main/java/com/alibaba/cloud/ai/dataagent/service/datasource/impl/DatasourceServiceImpl.java index 278cea0a7..e8100959a 100644 --- a/data-agent-management/src/main/java/com/alibaba/cloud/ai/dataagent/service/datasource/impl/DatasourceServiceImpl.java +++ b/data-agent-management/src/main/java/com/alibaba/cloud/ai/dataagent/service/datasource/impl/DatasourceServiceImpl.java @@ -101,6 +101,14 @@ public Datasource createDatasource(Datasource datasource) { datasource.setTestStatus("unknown"); } + if (datasource.getPassword() == null) { + datasource.setPassword(""); + } + + if (datasource.getUsername() == null) { + datasource.setUsername(""); + } + datasourceMapper.insert(datasource); return datasource; } @@ -115,6 +123,14 @@ public Datasource updateDatasource(Integer id, Datasource datasource) { } datasource.setId(id); + if (datasource.getPassword() == null) { + datasource.setPassword(""); + } + + if (datasource.getUsername() == null) { + datasource.setUsername(""); + } + datasourceMapper.updateById(datasource); return datasource; } diff --git a/data-agent-management/src/main/resources/prompts/new-sql-generate.txt b/data-agent-management/src/main/resources/prompts/new-sql-generate.txt index 72748379c..3f21d55bf 100644 --- a/data-agent-management/src/main/resources/prompts/new-sql-generate.txt +++ b/data-agent-management/src/main/resources/prompts/new-sql-generate.txt @@ -42,6 +42,11 @@ * 若为 **MySQL**,请使用反引号 (例如 `order`)。 * 若为 **PostgreSQL/Oracle**,请使用双引号 (例如 "order")。 * 若为 **SQL Server**,请使用方括号 (例如 [order]),中文或Unicode字符串添加N前缀(如 N'中文')。 + * 若为 **Hive**,**不要使用反引号**,直接使用标识符名称 (例如 order)。 + * **Hive 特别注意**: + - **不要在 SQL 语句末尾添加分号**,Hive JDBC 不支持分号结尾。 + - 使用标准 SQL 语法,避免使用 MySQL 特有的语法(如反引号)。 + - 表名和列名通常不需要引号,除非是保留字。 # 最终指令确认 (Critical) 不管【全局任务背景】多么复杂,你现在的唯一目标是**仅完成**以下任务: diff --git a/data-agent-management/src/main/resources/prompts/sql-error-fixer.txt b/data-agent-management/src/main/resources/prompts/sql-error-fixer.txt index 843a96245..9d1e9b181 100644 --- a/data-agent-management/src/main/resources/prompts/sql-error-fixer.txt +++ b/data-agent-management/src/main/resources/prompts/sql-error-fixer.txt @@ -34,6 +34,11 @@ * 若为 **MySQL**,请使用反引号 (例如 `order`)。 * 若为 **PostgreSQL/Oracle**,请使用双引号 (例如 "order")。 * 若为 **SQL Server**,请使用方括号 (例如 [order])。 + * 若为 **Hive**,**不要使用反引号**,直接使用标识符名称 (例如 order),或在必要时使用反引号但需谨慎。 + * **Hive 特别注意**: + - **不要在 SQL 语句末尾添加分号**,Hive JDBC 不支持分号结尾。 + - 使用标准 SQL 语法,避免使用 MySQL 特有的语法。 + - 表名和列名通常不需要引号,除非是保留字。 # 输出格式 请直接输出修复后的 SQL 语句,**不要输出任何额外的标记**。