Skip to content

Commit ce9dc5b

Browse files
authored
Merge pull request #1316 from tmlx1990/hive
feat:Add hive support
2 parents ee38444 + 90d4c07 commit ce9dc5b

File tree

7 files changed

+1088
-3
lines changed

7 files changed

+1088
-3
lines changed
Lines changed: 69 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,69 @@
1+
package ai.chat2db.plugin.hive;
2+
3+
import ai.chat2db.spi.ValueHandler;
4+
import ai.chat2db.spi.model.Command;
5+
import ai.chat2db.spi.model.ExecuteResult;
6+
import ai.chat2db.spi.model.Header;
7+
import ai.chat2db.spi.sql.SQLExecutor;
8+
import lombok.extern.slf4j.Slf4j;
9+
import org.apache.commons.collections4.CollectionUtils;
10+
import org.apache.commons.lang3.StringUtils;
11+
12+
import java.sql.Connection;
13+
import java.sql.SQLException;
14+
import java.util.ArrayList;
15+
import java.util.List;
16+
17+
@Slf4j
18+
public class HiveCommandExecutor extends SQLExecutor {
19+
20+
/**
21+
* Execute command
22+
*/
23+
@Override
24+
public List<ExecuteResult> execute(Command command) {
25+
List<ExecuteResult> result = new ArrayList<>();
26+
result = super.execute(command);
27+
if (CollectionUtils.isNotEmpty(result)) {
28+
for (ExecuteResult executeResult : result) {
29+
if (executeResult.getHeaderList() != null) {
30+
for (Header header : executeResult.getHeaderList()) {
31+
header.setName(formatTableName(header.getName()));
32+
}
33+
}
34+
}
35+
}
36+
return result;
37+
}
38+
39+
40+
/**
41+
* Execute command
42+
*/
43+
@Override
44+
public ExecuteResult executeUpdate(String sql, Connection connection, int n) throws SQLException {
45+
return super.executeUpdate(sql, connection, n);
46+
}
47+
48+
49+
/**
50+
*
51+
*/
52+
@Override
53+
public ExecuteResult execute(final String sql, Connection connection, boolean limitRowSize, Integer offset,
54+
Integer count, ValueHandler valueHandler)
55+
throws SQLException {
56+
return super.execute(sql, connection, limitRowSize, offset, count, valueHandler);
57+
}
58+
59+
public static String formatTableName(String tableName) {
60+
if (StringUtils.isBlank(tableName)) {
61+
return tableName;
62+
}
63+
if (tableName.contains(".")) {
64+
String[] split = tableName.split("\\.");
65+
return split[1];
66+
}
67+
return tableName;
68+
}
69+
}

chat2db-server/chat2db-plugins/chat2db-hive/src/main/java/ai/chat2db/plugin/hive/HiveDBManage.java

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,28 @@
11
package ai.chat2db.plugin.hive;
22

33
import java.sql.Connection;
4+
import java.sql.SQLException;
45

56
import ai.chat2db.spi.DBManage;
67
import ai.chat2db.spi.jdbc.DefaultDBManage;
78
import ai.chat2db.spi.sql.SQLExecutor;
9+
import org.springframework.util.StringUtils;
810

911
public class HiveDBManage extends DefaultDBManage implements DBManage {
1012

1113

14+
@Override
15+
public void connectDatabase(Connection connection, String database) {
16+
if (StringUtils.isEmpty(database)) {
17+
return;
18+
}
19+
try {
20+
SQLExecutor.getInstance().execute(connection,"use " + database );
21+
} catch (SQLException e) {
22+
throw new RuntimeException(e);
23+
}
24+
}
25+
1226
@Override
1327
public void dropTable(Connection connection, String databaseName, String schemaName, String tableName) {
1428
String sql = "drop table if exists " +tableName;
Lines changed: 285 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,292 @@
11
package ai.chat2db.plugin.hive;
22

3+
import ai.chat2db.plugin.hive.builder.HiveSqlBuilder;
4+
import ai.chat2db.plugin.hive.type.HiveColumnTypeEnum;
5+
import ai.chat2db.plugin.hive.type.HiveIndexTypeEnum;
6+
import ai.chat2db.spi.CommandExecutor;
37
import ai.chat2db.spi.MetaData;
8+
import ai.chat2db.spi.SqlBuilder;
49
import ai.chat2db.spi.jdbc.DefaultMetaService;
10+
import ai.chat2db.spi.model.*;
11+
import ai.chat2db.spi.sql.SQLExecutor;
12+
import jakarta.validation.constraints.NotEmpty;
13+
import org.apache.commons.lang3.StringUtils;
14+
15+
import java.io.Reader;
16+
import java.sql.Connection;
17+
import java.sql.SQLException;
18+
import java.util.*;
19+
import java.util.stream.Collectors;
520

621
public class HiveMetaData extends DefaultMetaService implements MetaData {
22+
23+
@Override
24+
public List<Database> databases(Connection connection) {
25+
List<Database> databases = new ArrayList<>();
26+
return SQLExecutor.getInstance().execute(connection,"show databases", resultSet -> {
27+
try {
28+
while (resultSet.next()) {
29+
String databaseName = resultSet.getString("database_name");
30+
Database database = new Database();
31+
database.setName(databaseName);
32+
databases.add(database);
33+
}
34+
} catch (SQLException e) {
35+
throw new RuntimeException(e);
36+
}
37+
return databases;
38+
});
39+
}
40+
41+
@Override
42+
public List<Schema> schemas(Connection connection, String databaseName) {
43+
List<Schema> schemas = new ArrayList<>();
44+
schemas.add(Schema.builder().databaseName(databaseName).name(databaseName).build());
45+
return schemas;
46+
}
47+
48+
@Override
49+
public String tableDDL(Connection connection, @NotEmpty String databaseName, String schemaName,
50+
@NotEmpty String tableName) {
51+
String sql = "SHOW CREATE TABLE " + format(databaseName) + "."
52+
+ format(tableName);
53+
return SQLExecutor.getInstance().execute(connection, sql, resultSet -> {
54+
StringBuilder sb = new StringBuilder();
55+
while (resultSet.next()) {
56+
// 拼接建表语句
57+
sb.append(resultSet.getString("createtab_stmt"));
58+
sb.append("\r\n");
59+
}
60+
if (sb.length() > 0) {
61+
sb = sb.delete(sb.length() - 2, sb.length());
62+
sb.append(";");
63+
return sb.toString();
64+
}
65+
return null;
66+
});
67+
}
68+
69+
@Override
70+
public String getMetaDataName(String... names) {
71+
return Arrays.stream(names).filter(name -> StringUtils.isNotBlank(name)).map(name -> "`" + name + "`").collect(Collectors.joining("."));
72+
}
73+
74+
@Override
75+
public CommandExecutor getCommandExecutor() {
76+
return new HiveCommandExecutor();
77+
}
78+
79+
@Override
80+
public TableMeta getTableMeta(String databaseName, String schemaName, String tableName) {
81+
return TableMeta.builder()
82+
.columnTypes(HiveColumnTypeEnum.getTypes())
83+
//.charsets(HiveCharsetEnum.getCharsets())
84+
//.collations(HiveCollationEnum.getCollations())
85+
.indexTypes(HiveIndexTypeEnum.getIndexTypes())
86+
//.defaultValues(HiveDefaultValueEnum.getDefaultValues())
87+
.build();
88+
}
89+
90+
@Override
91+
public SqlBuilder getSqlBuilder() {
92+
return new HiveSqlBuilder();
93+
}
94+
95+
96+
private static String SELECT_TAB_COLS = "DESCRIBE FORMATTED `%s`.`%s`";
97+
// TODO 待完善
98+
@Override
99+
public List<TableColumn> columns(Connection connection, String databaseName, String schemaName, String tableName) {
100+
String sql = String.format(SELECT_TAB_COLS, databaseName, tableName);
101+
return SQLExecutor.getInstance().execute(connection, sql, resultSet -> {
102+
List<TableColumn> tableColumns = new ArrayList<>();
103+
Map<String, String> detailTableInfo = new HashMap<>();
104+
Map<String, String> tableParams = new HashMap<>();
105+
Map<String, String> storageInfo = new HashMap<>();
106+
Map<String, String> storageDescParams = new HashMap<>();
107+
Map<String, Map<String, String>> constraints = new HashMap<>();
108+
List<Map<String, String>> columns = new ArrayList<>();
109+
List<Map<String, String>> partitions = new ArrayList<>();
110+
Map<String, String> moduleMap = getDescTableModule();
111+
112+
String infoModule = "";
113+
while (resultSet.next()) {
114+
String title = resultSet.getString(1).trim();
115+
if (("".equals(title) && resultSet.getString(2) == null) || "# Constraints".equals(title)) {
116+
continue;
117+
}
118+
if (moduleMap.containsKey(title)) {
119+
if ("partition_info".equals(infoModule) && "col_name".equals(moduleMap.get(title))) {
120+
continue;
121+
}
122+
infoModule = moduleMap.get(title);
123+
continue;
124+
}
125+
126+
String key = null;
127+
String value = null;
128+
switch (infoModule) {
129+
case "col_name":
130+
Map<String, String> map = new HashMap<>();
131+
int colNum = resultSet.getMetaData().getColumnCount();
132+
for (int col = 1; col <= colNum; col++) {
133+
String columnName = resultSet.getMetaData().getColumnName(col);
134+
String columnValue = resultSet.getString(columnName);
135+
map.put(columnName, columnValue);
136+
}
137+
columns.add(map);
138+
break;
139+
case "table_info":
140+
key = resultSet.getString(1).trim().replace(":", "");
141+
value = resultSet.getString(2).trim();
142+
detailTableInfo.put(key, value);
143+
break;
144+
145+
case "table_param":
146+
key = resultSet.getString(2).trim().replace(":", "");
147+
value = resultSet.getString(3).trim();
148+
tableParams.put(key, value);
149+
break;
150+
151+
case "storage_info":
152+
key = resultSet.getString(1).trim().replace(":", "");
153+
value = resultSet.getString(2).trim();
154+
storageInfo.put(key, value);
155+
break;
156+
157+
case "storage_desc":
158+
key = resultSet.getString(2).trim().replace(":", "");
159+
value = resultSet.getString(3).trim();
160+
storageDescParams.put(key, value);
161+
break;
162+
case "primary_key":
163+
Map<String, String> primaryKeyMap = constraints.getOrDefault("primaryKey", new HashMap<>());
164+
if ("Table:".equals(title.trim())) {
165+
resultSet.next();
166+
}
167+
String primaryKeyName = resultSet.getString(2).trim();
168+
resultSet.next();
169+
170+
key = resultSet.getString(2).trim();
171+
primaryKeyMap.put(key, primaryKeyName);
172+
173+
constraints.put("primaryKey", primaryKeyMap);
174+
break;
175+
case "not_null_constraint":
176+
Map<String, String> notNullMap = constraints.getOrDefault("notnull", new HashMap<>());
177+
if ("Table:".equals(title.trim())) {
178+
resultSet.next();
179+
}
180+
181+
String notNullConstraintName = resultSet.getString(2).trim();
182+
resultSet.next();
183+
184+
key = resultSet.getString(2).trim();
185+
notNullMap.put(key, notNullConstraintName);
186+
187+
constraints.put("notnull", notNullMap);
188+
break;
189+
190+
case "default_constraint":
191+
Map<String, String> defaultMap = constraints.getOrDefault("default", new HashMap<>());
192+
if ("Table:".equals(title.trim())) { resultSet.next();}
193+
194+
String defaultConstraintName = resultSet.getString(2).trim();
195+
resultSet.next();
196+
197+
key = resultSet.getString(1).trim().split(":")[1];
198+
value = resultSet.getString(2).trim();
199+
int valueIndex = value.indexOf(":");
200+
value = value.substring(valueIndex + 1);
201+
202+
defaultMap.put(key + "_constraintName", defaultConstraintName);
203+
204+
constraints.put("default", defaultMap);
205+
break;
206+
207+
case "partition_info":
208+
Map<String, String> partitionMap = new HashMap<>();
209+
int partitionColNum = resultSet.getMetaData().getColumnCount();
210+
for (int col = 0; col < partitionColNum; col++) {
211+
String columnName = resultSet.getMetaData().getColumnName(col + 1);
212+
String columnValue = resultSet.getString(columnName);
213+
partitionMap.put(columnName, columnValue);
214+
}
215+
partitions.add(partitionMap);
216+
break;
217+
default:
218+
System.out.print("unknown module,please update method to support it : " + infoModule);
219+
220+
}
221+
222+
223+
}
224+
225+
for (Map<String, String> columnMap : columns) {
226+
TableColumn tableColumn = new TableColumn();
227+
tableColumn.setTableName(tableName);
228+
tableColumn.setSchemaName(schemaName);
229+
tableColumn.setName(columnMap.get("col_name"));
230+
tableColumn.setColumnType(columnMap.get("data_type"));
231+
tableColumn.setComment(columnMap.get("comment"));
232+
if (constraints.get("primaryKey") != null && constraints.get("primaryKey").keySet().contains(columnMap.get("col_name"))) {
233+
tableColumn.setPrimaryKey(true);
234+
}
235+
if (constraints.get("notnull") !=null && constraints.get("notnull").keySet().contains(columnMap.get("col_name"))) {
236+
tableColumn.setNullable(1);
237+
}
238+
tableColumns.add(tableColumn);
239+
240+
}
241+
242+
return tableColumns;
243+
});
244+
}
245+
246+
private static Map<String, String> getDescTableModule() {
247+
Map<String, String> descTableModule = new HashMap<>();
248+
249+
descTableModule.put("# col_name", "col_name");
250+
descTableModule.put("# Detailed Table Information", "table_info");
251+
descTableModule.put("Table Parameters:", "table_param");
252+
descTableModule.put("# Storage Information", "storage_info");
253+
descTableModule.put("Storage Desc Params:", "storage_desc");
254+
descTableModule.put("# Not Null Constraints", "not_null_constraint");
255+
descTableModule.put("# Default Constraints", "default_constraint");
256+
descTableModule.put("# Partition Information", "partition_info");
257+
descTableModule.put("# Primary Key", "primary_key");
258+
259+
return descTableModule;
260+
}
261+
262+
public static String format(String name) {
263+
return "`" + name + "`";
264+
}
265+
266+
private static String VIEW_SQL
267+
= "SHOW CREATE TABLE `%s`.`%s`";
268+
269+
@Override
270+
public Table view(Connection connection, String databaseName, String schemaName, String viewName) {
271+
String sql = String.format(VIEW_SQL, databaseName, viewName);
272+
return SQLExecutor.getInstance().execute(connection, sql, resultSet -> {
273+
Table table = new Table();
274+
table.setDatabaseName(databaseName);
275+
table.setSchemaName(schemaName);
276+
table.setName(viewName);
277+
StringBuilder sb = new StringBuilder();
278+
while (resultSet.next()) {
279+
// 拼接建表语句
280+
sb.append(resultSet.getString("createtab_stmt"));
281+
sb.append("\r\n");
282+
}
283+
if (sb.length() > 0) {
284+
sb = sb.delete(sb.length() - 2, sb.length());
285+
sb.append(";");
286+
table.setDdl(sb.toString());
287+
}
288+
return table;
289+
});
290+
}
7291
}
292+

0 commit comments

Comments
 (0)