Skip to content

Commit f993f18

Browse files
authored
[hive] hive create format table support query field comments. (#5070)
1 parent a2ed191 commit f993f18

File tree

3 files changed

+34
-29
lines changed

3 files changed

+34
-29
lines changed

paimon-flink/paimon-flink-common/src/main/java/org/apache/paimon/flink/FormatCatalogTable.java

Lines changed: 13 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@
2626
import org.apache.flink.table.connector.source.DynamicTableSource;
2727
import org.apache.flink.table.factories.DynamicTableFactory;
2828
import org.apache.flink.table.factories.FactoryUtil;
29+
import org.apache.flink.table.types.logical.RowType;
2930

3031
import java.util.Collections;
3132
import java.util.HashMap;
@@ -57,9 +58,18 @@ public FormatTable table() {
5758

5859
@Override
5960
public Schema getUnresolvedSchema() {
60-
return Schema.newBuilder()
61-
.fromRowDataType(fromLogicalToDataType(toLogicalType(table.rowType())))
62-
.build();
61+
final Schema.Builder builder = Schema.newBuilder();
62+
RowType logicalType = toLogicalType(table.rowType());
63+
logicalType
64+
.getFields()
65+
.forEach(
66+
field -> {
67+
builder.column(field.getName(), fromLogicalToDataType(field.getType()));
68+
if (field.getDescription().isPresent()) {
69+
builder.withComment(field.getDescription().get());
70+
}
71+
});
72+
return builder.build();
6373
}
6474

6575
@Override

paimon-hive/paimon-hive-catalog/src/main/java/org/apache/paimon/hive/HiveTableUtils.java

Lines changed: 4 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -21,14 +21,11 @@
2121
import org.apache.paimon.options.Options;
2222
import org.apache.paimon.schema.Schema;
2323
import org.apache.paimon.table.FormatTable.Format;
24-
import org.apache.paimon.types.DataType;
2524
import org.apache.paimon.types.RowType;
26-
import org.apache.paimon.utils.Pair;
2725

2826
import org.apache.hadoop.hive.metastore.api.FieldSchema;
2927
import org.apache.hadoop.hive.metastore.api.SerDeInfo;
3028
import org.apache.hadoop.hive.metastore.api.Table;
31-
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
3229

3330
import java.util.ArrayList;
3431
import java.util.List;
@@ -96,24 +93,11 @@ private static List<String> getFieldNames(List<FieldSchema> fieldSchemas) {
9693
public static RowType createRowType(Table table) {
9794
List<FieldSchema> allCols = new ArrayList<>(table.getSd().getCols());
9895
allCols.addAll(table.getPartitionKeys());
99-
Pair<String[], DataType[]> columnInformation = extractColumnInformation(allCols);
100-
return RowType.builder()
101-
.fields(columnInformation.getRight(), columnInformation.getLeft())
102-
.build();
103-
}
10496

105-
private static Pair<String[], DataType[]> extractColumnInformation(List<FieldSchema> allCols) {
106-
String[] colNames = new String[allCols.size()];
107-
DataType[] colTypes = new DataType[allCols.size()];
108-
109-
for (int i = 0; i < allCols.size(); i++) {
110-
FieldSchema fs = allCols.get(i);
111-
colNames[i] = fs.getName();
112-
colTypes[i] =
113-
HiveTypeUtils.toPaimonType(
114-
TypeInfoUtils.getTypeInfoFromTypeString(fs.getType()));
97+
RowType.Builder builder = RowType.builder();
98+
for (FieldSchema fs : allCols) {
99+
builder.field(fs.getName(), HiveTypeUtils.toPaimonType(fs.getType()), fs.getComment());
115100
}
116-
117-
return Pair.of(colNames, colTypes);
101+
return builder.build();
118102
}
119103
}

paimon-hive/paimon-hive-connector-common/src/test/java/org/apache/paimon/hive/HiveCatalogFormatTableITCaseBase.java

Lines changed: 17 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,7 @@
4242
import java.util.HashMap;
4343
import java.util.List;
4444
import java.util.Map;
45+
import java.util.Objects;
4546
import java.util.stream.Collectors;
4647

4748
import static org.apache.paimon.options.CatalogOptions.FORMAT_TABLE_ENABLED;
@@ -119,46 +120,56 @@ public void evaluate() throws Throwable {
119120

120121
@Test
121122
public void testCsvFormatTable() throws Exception {
122-
hiveShell.execute("CREATE TABLE csv_table (a INT, b STRING)");
123+
hiveShell.execute(
124+
"CREATE TABLE csv_table (a INT COMMENT 'comment a', b STRING COMMENT 'comment b')");
123125
doTestFormatTable("csv_table");
124126
}
125127

126128
@Test
127129
public void testCsvFormatTableWithDelimiter() throws Exception {
128130
hiveShell.execute(
129-
"CREATE TABLE csv_table_delimiter (a INT, b STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ';'");
131+
"CREATE TABLE csv_table_delimiter (a INT COMMENT 'comment a', b STRING COMMENT 'comment b') ROW FORMAT DELIMITED FIELDS TERMINATED BY ';'");
130132
doTestFormatTable("csv_table_delimiter");
131133
}
132134

133135
@Test
134136
public void testPartitionTable() throws Exception {
135-
hiveShell.execute("CREATE TABLE partition_table (a INT) PARTITIONED BY (b STRING)");
137+
hiveShell.execute(
138+
"CREATE TABLE partition_table (a INT COMMENT 'comment a') PARTITIONED BY (b STRING COMMENT 'comment b')");
136139
doTestFormatTable("partition_table");
137140
}
138141

139142
@Test
140143
public void testFlinkCreateCsvFormatTable() throws Exception {
141144
tEnv.executeSql(
142-
"CREATE TABLE flink_csv_table (a INT, b STRING) with ('type'='format-table', 'file.format'='csv')")
145+
"CREATE TABLE flink_csv_table (a INT COMMENT 'comment a', b STRING COMMENT 'comment b') with ('type'='format-table', 'file.format'='csv')")
143146
.await();
144147
doTestFormatTable("flink_csv_table");
145148
}
146149

147150
@Test
148151
public void testFlinkCreateFormatTableWithDelimiter() throws Exception {
149152
tEnv.executeSql(
150-
"CREATE TABLE flink_csv_table_delimiter (a INT, b STRING) with ('type'='format-table', 'file.format'='csv', 'field-delimiter'=';')");
153+
"CREATE TABLE flink_csv_table_delimiter (a INT COMMENT 'comment a', b STRING COMMENT 'comment b') with ('type'='format-table', 'file.format'='csv', 'field-delimiter'=';')");
151154
doTestFormatTable("flink_csv_table_delimiter");
152155
}
153156

154157
@Test
155158
public void testFlinkCreatePartitionTable() throws Exception {
156159
tEnv.executeSql(
157-
"CREATE TABLE flink_partition_table (a INT,b STRING) PARTITIONED BY (b) with ('type'='format-table', 'file.format'='csv')");
160+
"CREATE TABLE flink_partition_table (a INT COMMENT 'comment a', b STRING COMMENT 'comment b') PARTITIONED BY (b) with ('type'='format-table', 'file.format'='csv')");
158161
doTestFormatTable("flink_partition_table");
159162
}
160163

161164
private void doTestFormatTable(String tableName) throws Exception {
165+
List<String> descResult =
166+
collect("DESC " + tableName).stream()
167+
.map(Objects::toString)
168+
.collect(Collectors.toList());
169+
assertThat(descResult)
170+
.containsExactly(
171+
"+I[a, INT, true, null, null, null, comment a]",
172+
"+I[b, STRING, true, null, null, null, comment b]");
162173
hiveShell.execute(
163174
String.format("INSERT INTO %s VALUES (100, 'Hive'), (200, 'Table')", tableName));
164175
assertThat(collect(String.format("SELECT * FROM %s", tableName)))

0 commit comments

Comments
 (0)