Skip to content

Commit 2b566e5

Browse files
committed
rdb field mapping by name
1 parent 72a294e commit 2b566e5

File tree

3 files changed

+46
-2
lines changed

3 files changed

+46
-2
lines changed

core/pom.xml

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717
<properties>
1818
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
1919
<project.package.name>core</project.package.name>
20+
<flink.table.planner>1.8.1</flink.table.planner>
2021
</properties>
2122

2223
<dependencies>
@@ -56,6 +57,11 @@
5657
<artifactId>flink-streaming-scala_2.11</artifactId>
5758
<version>${flink.version}</version>
5859
</dependency>
60+
<dependency>
61+
<groupId>org.apache.flink</groupId>
62+
<artifactId>flink-table-planner_2.11</artifactId>
63+
<version>${flink.table.planner}</version>
64+
</dependency>
5965
</dependencies>
6066

6167
<build>

core/src/main/java/com/dtstack/flink/sql/Main.java

Lines changed: 37 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@
3535
import com.dtstack.flink.sql.util.FlinkUtil;
3636
import com.dtstack.flink.sql.util.PluginUtil;
3737
import org.apache.calcite.config.Lex;
38+
import org.apache.calcite.sql.SqlIdentifier;
3839
import org.apache.calcite.sql.SqlInsert;
3940
import org.apache.calcite.sql.SqlNode;
4041
import org.apache.commons.cli.CommandLine;
@@ -60,7 +61,12 @@
6061
import org.apache.flink.streaming.api.environment.StreamContextEnvironment;
6162
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
6263
import org.apache.flink.table.api.Table;
64+
import org.apache.flink.table.api.TableEnvironment;
65+
import org.apache.flink.table.api.TableException;
6366
import org.apache.flink.table.api.java.StreamTableEnvironment;
67+
import org.apache.flink.table.calcite.FlinkPlannerImpl;
68+
import org.apache.flink.table.plan.logical.LogicalRelNode;
69+
import org.apache.flink.table.plan.schema.TableSinkTable;
6470
import org.apache.flink.table.sinks.TableSink;
6571
import org.apache.flink.types.Row;
6672
import org.slf4j.Logger;
@@ -204,7 +210,7 @@ public static void main(String[] args) throws Exception {
204210
//sql-dimensional table contains the dimension table of execution
205211
sideSqlExec.exec(result.getExecSql(), sideTableMap, tableEnv, registerTableCache);
206212
}else{
207-
tableEnv.sqlUpdate(result.getExecSql());
213+
sqlUpdate(tableEnv, result.getExecSql());
208214
if(LOG.isInfoEnabled()){
209215
LOG.info("exec sql: " + result.getExecSql());
210216
}
@@ -222,6 +228,36 @@ public static void main(String[] args) throws Exception {
222228
env.execute(name);
223229
}
224230

231+
public static void sqlUpdate(StreamTableEnvironment tableEnv, String stmt) {
232+
233+
FlinkPlannerImpl planner = new FlinkPlannerImpl(tableEnv.getFrameworkConfig(), tableEnv.getPlanner(), tableEnv.getTypeFactory());
234+
SqlNode insert = planner.parse(stmt);
235+
236+
if (!(insert instanceof SqlInsert)) {
237+
throw new TableException(
238+
"Unsupported SQL query! sqlUpdate() only accepts SQL statements of type INSERT.");
239+
}
240+
SqlNode query = ((SqlInsert) insert).getSource();
241+
242+
SqlNode validatedQuery = planner.validate(query);
243+
244+
Table queryResult = new Table(tableEnv, new LogicalRelNode(planner.rel(validatedQuery).rel));
245+
String targetTableName = ((SqlIdentifier) ((SqlInsert) insert).getTargetTable()).names.get(0);
246+
247+
try {
248+
Method method = TableEnvironment.class.getDeclaredMethod("getTable", String.class);
249+
method.setAccessible(true);
250+
251+
TableSinkTable targetTable = (TableSinkTable) method.invoke(tableEnv, targetTableName);
252+
String[] fieldNames = targetTable.tableSink().getFieldNames();
253+
Table newTable = queryResult.select(String.join(",", fieldNames));
254+
// insert query result into sink table
255+
tableEnv.insertInto(newTable, targetTableName, tableEnv.queryConfig());
256+
} catch (Exception e) {
257+
e.printStackTrace();
258+
}
259+
}
260+
225261
/**
226262
* This part is just to add classpath for the jar when reading remote execution, and will not submit jar from a local
227263
* @param env

core/src/main/java/com/dtstack/flink/sql/side/SideSqlExec.java

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020

2121
package com.dtstack.flink.sql.side;
2222

23+
import com.dtstack.flink.sql.Main;
2324
import com.dtstack.flink.sql.enums.ECacheType;
2425
import com.dtstack.flink.sql.parser.CreateTmpTableParser;
2526
import com.dtstack.flink.sql.side.operator.SideAsyncOperator;
@@ -105,7 +106,8 @@ public void exec(String sql, Map<String, SideTableInfo> sideTableMap, StreamTabl
105106
}
106107

107108
if(pollSqlNode.getKind() == INSERT){
108-
tableEnv.sqlUpdate(pollSqlNode.toString());
109+
// tableEnv.sqlUpdate(pollSqlNode.toString());
110+
Main.sqlUpdate(tableEnv, pollSqlNode.toString());
109111
if(LOG.isInfoEnabled()){
110112
LOG.info("exec sql: " + pollSqlNode.toString());
111113
}

0 commit comments

Comments
 (0)