Skip to content

Commit 03bc6b6

Browse files
author
yanxi
committed
Merge branch 'v1.8.0_dev_bugfix' into 'v1.8.0_dev'
V1.8.0 dev bugfix See merge request !35
2 parents 3e56e9b + a749540 commit 03bc6b6

File tree

124 files changed

+2519
-2504
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

124 files changed

+2519
-2504
lines changed

README.md

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -11,12 +11,17 @@
1111
# 已支持
1212
* 源表:kafka 0.9,1.x版本
1313
* 维表:mysql,SQlServer,oracle,hbase,mongo,redis,cassandra
14-
* 结果表:mysql,SQlServer,oracle,hbase,elasticsearch5.x,mongo,redis,cassandra,console
14+
* 结果表:mysql,SQlServer,oracle,hbase,elasticsearch5.x,mongo,redis,cassandra
1515

1616
# 后续开发计划
1717
* 增加SQL支持CEP
1818
* 维表快照
1919
* sql优化(谓词下移等)
20+
* serverSocket 源表
21+
* console 结果表
22+
* kafka avro格式
23+
* topN
24+
2025

2126
## 1 快速起步
2227
### 1.1 运行模式
@@ -142,7 +147,6 @@ sh submit.sh -sql D:\sideSql.txt -name xctest -remoteSqlPluginPath /opt/dtstack
142147
## 2 结构
143148
### 2.1 源表插件
144149
* [kafka 源表插件](docs/kafkaSource.md)
145-
* [serverSocket 源表插件](docs/serverSocketSource.md)
146150

147151
### 2.2 结果表插件
148152
* [elasticsearch 结果表插件](docs/elasticsearchSink.md)
@@ -151,7 +155,6 @@ sh submit.sh -sql D:\sideSql.txt -name xctest -remoteSqlPluginPath /opt/dtstack
151155
* [mongo 结果表插件](docs/mongoSink.md)
152156
* [redis 结果表插件](docs/redisSink.md)
153157
* [cassandra 结果表插件](docs/cassandraSink.md)
154-
* [console 结果表插件](docs/consoleSink.md)
155158

156159
### 2.3 维表插件
157160
* [hbase 维表插件](docs/hbaseSide.md)

cassandra/cassandra-side/cassandra-all-side/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,7 @@
7676
</copy>
7777

7878
<move file="${basedir}/../../../plugins/cassandraallside/${project.artifactId}-${project.version}.jar"
79-
tofile="${basedir}/../../../plugins/cassandraallside/${project.name}.jar" />
79+
tofile="${basedir}/../../../plugins/cassandraallside/${project.name}-${git.branch}.jar" />
8080
</tasks>
8181
</configuration>
8282
</execution>

cassandra/cassandra-side/cassandra-all-side/src/main/java/com/dtstack/flink/sql/side/cassandra/CassandraAllSideInfo.java

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@
2323
import com.dtstack.flink.sql.side.SideInfo;
2424
import com.dtstack.flink.sql.side.SideTableInfo;
2525
import com.dtstack.flink.sql.side.cassandra.table.CassandraSideTableInfo;
26+
import com.dtstack.flink.sql.util.ParseUtils;
2627
import org.apache.calcite.sql.SqlBasicCall;
2728
import org.apache.calcite.sql.SqlKind;
2829
import org.apache.calcite.sql.SqlNode;
@@ -86,11 +87,7 @@ public void parseSelectFields(JoinInfo joinInfo) {
8687
SqlNode conditionNode = joinInfo.getCondition();
8788

8889
List<SqlNode> sqlNodeList = Lists.newArrayList();
89-
if (conditionNode.getKind() == SqlKind.AND) {
90-
sqlNodeList.addAll(Lists.newArrayList(((SqlBasicCall) conditionNode).getOperands()));
91-
} else {
92-
sqlNodeList.add(conditionNode);
93-
}
90+
ParseUtils.parseAnd(conditionNode, sqlNodeList);
9491

9592
for (SqlNode sqlNode : sqlNodeList) {
9693
dealOneEqualCon(sqlNode, sideTableName);

cassandra/cassandra-side/cassandra-async-side/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,7 @@
9292
</copy>
9393

9494
<move file="${basedir}/../../../plugins/cassandraasyncside/${project.artifactId}-${project.version}.jar"
95-
tofile="${basedir}/../../../plugins/cassandraasyncside/${project.name}.jar" />
95+
tofile="${basedir}/../../../plugins/cassandraasyncside/${project.name}-${git.branch}.jar" />
9696
</tasks>
9797
</configuration>
9898
</execution>

cassandra/cassandra-side/cassandra-async-side/src/main/java/com/dtstack/flink/sql/side/cassandra/CassandraAsyncSideInfo.java

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@
2323
import com.dtstack.flink.sql.side.SideInfo;
2424
import com.dtstack.flink.sql.side.SideTableInfo;
2525
import com.dtstack.flink.sql.side.cassandra.table.CassandraSideTableInfo;
26+
import com.dtstack.flink.sql.util.ParseUtils;
2627
import org.apache.calcite.sql.SqlBasicCall;
2728
import org.apache.calcite.sql.SqlIdentifier;
2829
import org.apache.calcite.sql.SqlKind;
@@ -55,11 +56,7 @@ public void buildEqualInfo(JoinInfo joinInfo, SideTableInfo sideTableInfo) {
5556
SqlNode conditionNode = joinInfo.getCondition();
5657

5758
List<SqlNode> sqlNodeList = Lists.newArrayList();
58-
if (conditionNode.getKind() == SqlKind.AND) {
59-
sqlNodeList.addAll(Lists.newArrayList(((SqlBasicCall) conditionNode).getOperands()));
60-
} else {
61-
sqlNodeList.add(conditionNode);
62-
}
59+
ParseUtils.parseAnd(conditionNode, sqlNodeList);
6360

6461
for (SqlNode sqlNode : sqlNodeList) {
6562
dealOneEqualCon(sqlNode, sideTableName);

core/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -150,7 +150,7 @@
150150
</fileset>
151151
</copy>
152152
<move file="${basedir}/../plugins/${project.artifactId}-${project.version}.jar"
153-
tofile="${basedir}/../plugins/${project.name}.jar" />
153+
tofile="${basedir}/../plugins/${project.name}-${git.branch}.jar" />
154154
</tasks>
155155
</configuration>
156156
</execution>

core/src/main/java/com/dtstack/flink/sql/Main.java

Lines changed: 76 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -23,8 +23,6 @@
2323
import com.dtstack.flink.sql.classloader.DtClassLoader;
2424
import com.dtstack.flink.sql.enums.ECacheType;
2525
import com.dtstack.flink.sql.environment.MyLocalStreamEnvironment;
26-
import com.dtstack.flink.sql.options.LauncherOptionParser;
27-
import com.dtstack.flink.sql.options.LauncherOptions;
2826
import com.dtstack.flink.sql.parser.*;
2927
import com.dtstack.flink.sql.side.SideSqlExec;
3028
import com.dtstack.flink.sql.side.SideTableInfo;
@@ -40,31 +38,40 @@
4038
import org.apache.calcite.config.Lex;
4139
import org.apache.calcite.sql.SqlInsert;
4240
import org.apache.calcite.sql.SqlNode;
41+
import org.apache.commons.cli.CommandLine;
42+
import org.apache.commons.cli.CommandLineParser;
43+
import org.apache.commons.cli.DefaultParser;
44+
import org.apache.commons.cli.Options;
4345
import org.apache.commons.io.Charsets;
44-
import org.apache.commons.lang3.StringUtils;
46+
import org.apache.flink.api.common.ExecutionConfig;
4547
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
4648
import org.apache.flink.api.common.time.Time;
4749
import org.apache.flink.api.common.typeinfo.TypeInformation;
50+
import org.apache.flink.api.java.tuple.Tuple2;
4851
import org.apache.flink.api.java.typeutils.RowTypeInfo;
52+
import org.apache.flink.calcite.shaded.com.google.common.base.Preconditions;
4953
import org.apache.flink.calcite.shaded.com.google.common.base.Strings;
5054
import org.apache.flink.calcite.shaded.com.google.common.collect.Lists;
5155
import org.apache.flink.calcite.shaded.com.google.common.collect.Maps;
5256
import org.apache.flink.calcite.shaded.com.google.common.collect.Sets;
5357
import org.apache.flink.client.program.ContextEnvironment;
58+
import org.apache.flink.configuration.Configuration;
5459
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;
5560
import org.apache.flink.streaming.api.datastream.DataStream;
5661
import org.apache.flink.streaming.api.environment.StreamContextEnvironment;
5762
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
5863
import org.apache.flink.table.api.Table;
5964
import org.apache.flink.table.api.java.StreamTableEnvironment;
6065
import org.apache.flink.table.sinks.TableSink;
66+
import org.apache.flink.types.Row;
6167
import org.slf4j.Logger;
6268
import org.slf4j.LoggerFactory;
6369

6470
import java.io.File;
6571
import java.io.IOException;
6672
import java.lang.reflect.Field;
6773
import java.lang.reflect.InvocationTargetException;
74+
import java.lang.reflect.Method;
6875
import java.net.URL;
6976
import java.net.URLClassLoader;
7077
import java.net.URLDecoder;
@@ -98,36 +105,44 @@ public class Main {
98105

99106
public static void main(String[] args) throws Exception {
100107

101-
LauncherOptionParser optionParser = new LauncherOptionParser(args);
102-
LauncherOptions launcherOptions = optionParser.getLauncherOptions();
103-
104-
String sql = launcherOptions.getSql();
105-
String name =launcherOptions.getName();
106-
String addJarListStr = launcherOptions.getAddjar();
107-
String localSqlPluginPath = launcherOptions.getLocalSqlPluginPath();
108-
String remoteSqlPluginPath = launcherOptions.getRemoteSqlPluginPath();
109-
String deployMode = launcherOptions.getMode();
110-
String confProp = launcherOptions.getConfProp();
108+
Options options = new Options();
109+
options.addOption("sql", true, "sql config");
110+
options.addOption("name", true, "job name");
111+
options.addOption("addjar", true, "add jar");
112+
options.addOption("localSqlPluginPath", true, "local sql plugin path");
113+
options.addOption("remoteSqlPluginPath", true, "remote sql plugin path");
114+
options.addOption("confProp", true, "env properties");
115+
options.addOption("mode", true, "deploy mode");
116+
117+
options.addOption("savePointPath", true, "Savepoint restore path");
118+
options.addOption("allowNonRestoredState", true, "Flag indicating whether non restored state is allowed if the savepoint");
119+
120+
CommandLineParser parser = new DefaultParser();
121+
CommandLine cl = parser.parse(options, args);
122+
String sql = cl.getOptionValue("sql");
123+
String name = cl.getOptionValue("name");
124+
String addJarListStr = cl.getOptionValue("addjar");
125+
String localSqlPluginPath = cl.getOptionValue("localSqlPluginPath");
126+
String remoteSqlPluginPath = cl.getOptionValue("remoteSqlPluginPath");
127+
String deployMode = cl.getOptionValue("mode");
128+
String confProp = cl.getOptionValue("confProp");
129+
130+
Preconditions.checkNotNull(sql, "parameters of sql is required");
131+
Preconditions.checkNotNull(name, "parameters of name is required");
132+
Preconditions.checkNotNull(localSqlPluginPath, "parameters of localSqlPluginPath is required");
111133

112134
sql = URLDecoder.decode(sql, Charsets.UTF_8.name());
113135
SqlParser.setLocalSqlPluginRoot(localSqlPluginPath);
114-
List<String> addJarFileList = Lists.newArrayList();
115136

137+
List<String> addJarFileList = Lists.newArrayList();
116138
if(!Strings.isNullOrEmpty(addJarListStr)){
117139
addJarListStr = URLDecoder.decode(addJarListStr, Charsets.UTF_8.name());
118140
addJarFileList = objMapper.readValue(addJarListStr, List.class);
119141
}
120142

121143
ClassLoader threadClassLoader = Thread.currentThread().getContextClassLoader();
122-
DtClassLoader dtClassLoader = new DtClassLoader(new URL[]{}, threadClassLoader);
123-
Thread.currentThread().setContextClassLoader(dtClassLoader);
124-
125-
URLClassLoader parentClassloader;
126-
if(!ClusterMode.local.name().equals(deployMode)){
127-
parentClassloader = (URLClassLoader) threadClassLoader.getParent();
128-
}else{
129-
parentClassloader = dtClassLoader;
130-
}
144+
DtClassLoader parentClassloader = new DtClassLoader(new URL[]{}, threadClassLoader);
145+
Thread.currentThread().setContextClassLoader(parentClassloader);
131146

132147
confProp = URLDecoder.decode(confProp, Charsets.UTF_8.toString());
133148
Properties confProperties = PluginUtil.jsonStrToObject(confProp, Properties.class);
@@ -198,7 +213,7 @@ public static void main(String[] args) throws Exception {
198213

199214
if(env instanceof MyLocalStreamEnvironment) {
200215
List<URL> urlList = new ArrayList<>();
201-
urlList.addAll(Arrays.asList(dtClassLoader.getURLs()));
216+
urlList.addAll(Arrays.asList(parentClassloader.getURLs()));
202217
((MyLocalStreamEnvironment) env).setClasspaths(urlList);
203218
}
204219

@@ -221,12 +236,6 @@ private static void addEnvClassPath(StreamExecutionEnvironment env, Set<URL> cla
221236
contextEnvironment.getClasspaths().add(url);
222237
}
223238
}
224-
int i = 0;
225-
for(URL url : classPathSet){
226-
String classFileName = String.format(CLASS_FILE_NAME_FMT, i);
227-
env.registerCachedFile(url.getPath(), classFileName, true);
228-
i++;
229-
}
230239
}
231240

232241
private static void registerUDF(SqlTree sqlTree, List<URL> jarURList, URLClassLoader parentClassloader,
@@ -240,7 +249,6 @@ private static void registerUDF(SqlTree sqlTree, List<URL> jarURList, URLClassLo
240249
if (classLoader == null) {
241250
classLoader = FlinkUtil.loadExtraJar(jarURList, parentClassloader);
242251
}
243-
classLoader.loadClass(funcInfo.getClassName());
244252
FlinkUtil.registerUDF(funcInfo.getType(), funcInfo.getClassName(), funcInfo.getName(),
245253
tableEnv, classLoader);
246254
}
@@ -265,7 +273,10 @@ private static void registerTable(SqlTree sqlTree, StreamExecutionEnvironment en
265273
Table adaptTable = adaptSql == null ? table : tableEnv.sqlQuery(adaptSql);
266274

267275
RowTypeInfo typeInfo = new RowTypeInfo(adaptTable.getSchema().getTypes(), adaptTable.getSchema().getColumnNames());
268-
DataStream adaptStream = tableEnv.toAppendStream(adaptTable, typeInfo);
276+
DataStream adaptStream = tableEnv.toRetractStream(adaptTable, typeInfo)
277+
.map((Tuple2<Boolean, Row> f0) -> { return f0.f1; })
278+
.returns(typeInfo);
279+
269280
String fields = String.join(",", typeInfo.getFieldNames());
270281

271282
if(waterMarkerAssigner.checkNeedAssignWaterMarker(sourceTableInfo)){
@@ -278,38 +289,61 @@ private static void registerTable(SqlTree sqlTree, StreamExecutionEnvironment en
278289
Table regTable = tableEnv.fromDataStream(adaptStream, fields);
279290
tableEnv.registerTable(tableInfo.getName(), regTable);
280291
registerTableCache.put(tableInfo.getName(), regTable);
281-
if(StringUtils.isNotBlank(remoteSqlPluginPath)){
282-
classPathSet.add(PluginUtil.getRemoteJarFilePath(tableInfo.getType(), SourceTableInfo.SOURCE_SUFFIX, remoteSqlPluginPath));
283-
}
292+
classPathSet.add(PluginUtil.getRemoteJarFilePath(tableInfo.getType(), SourceTableInfo.SOURCE_SUFFIX, remoteSqlPluginPath, localSqlPluginPath));
284293
} else if (tableInfo instanceof TargetTableInfo) {
285294

286295
TableSink tableSink = StreamSinkFactory.getTableSink((TargetTableInfo) tableInfo, localSqlPluginPath);
287296
TypeInformation[] flinkTypes = FlinkUtil.transformTypes(tableInfo.getFieldClasses());
288297
tableEnv.registerTableSink(tableInfo.getName(), tableInfo.getFields(), flinkTypes, tableSink);
289-
if(StringUtils.isNotBlank(remoteSqlPluginPath)){
290-
classPathSet.add( PluginUtil.getRemoteJarFilePath(tableInfo.getType(), TargetTableInfo.TARGET_SUFFIX, remoteSqlPluginPath));
291-
}
298+
classPathSet.add( PluginUtil.getRemoteJarFilePath(tableInfo.getType(), TargetTableInfo.TARGET_SUFFIX, remoteSqlPluginPath, localSqlPluginPath));
292299
} else if(tableInfo instanceof SideTableInfo){
300+
293301
String sideOperator = ECacheType.ALL.name().equals(((SideTableInfo) tableInfo).getCacheType()) ? "all" : "async";
294302
sideTableMap.put(tableInfo.getName(), (SideTableInfo) tableInfo);
295-
if(StringUtils.isNotBlank(remoteSqlPluginPath)){
296-
classPathSet.add(PluginUtil.getRemoteSideJarFilePath(tableInfo.getType(), sideOperator, SideTableInfo.TARGET_SUFFIX, remoteSqlPluginPath));
297-
}
303+
classPathSet.add(PluginUtil.getRemoteSideJarFilePath(tableInfo.getType(), sideOperator, SideTableInfo.TARGET_SUFFIX, remoteSqlPluginPath, localSqlPluginPath));
298304
}else {
299305
throw new RuntimeException("not support table type:" + tableInfo.getType());
300306
}
301307
}
302308

303309
//The plug-in information corresponding to the table is loaded into the classPath env
304310
addEnvClassPath(env, classPathSet);
311+
int i = 0;
312+
for(URL url : classPathSet){
313+
String classFileName = String.format(CLASS_FILE_NAME_FMT, i);
314+
env.registerCachedFile(url.getPath(), classFileName, true);
315+
i++;
316+
}
305317
}
306318

307-
private static StreamExecutionEnvironment getStreamExeEnv(Properties confProperties, String deployMode) throws IOException {
319+
private static StreamExecutionEnvironment getStreamExeEnv(Properties confProperties, String deployMode) throws IOException, NoSuchMethodException {
308320
StreamExecutionEnvironment env = !ClusterMode.local.name().equals(deployMode) ?
309321
StreamExecutionEnvironment.getExecutionEnvironment() :
310322
new MyLocalStreamEnvironment();
311323

324+
env.getConfig().disableClosureCleaner();
312325
env.setParallelism(FlinkUtil.getEnvParallelism(confProperties));
326+
Configuration globalJobParameters = new Configuration();
327+
Method method = Configuration.class.getDeclaredMethod("setValueInternal", String.class, Object.class);
328+
method.setAccessible(true);
329+
330+
confProperties.forEach((key,val) -> {
331+
try {
332+
method.invoke(globalJobParameters, key, val);
333+
} catch (IllegalAccessException e) {
334+
e.printStackTrace();
335+
} catch (InvocationTargetException e) {
336+
e.printStackTrace();
337+
}
338+
});
339+
340+
ExecutionConfig exeConfig = env.getConfig();
341+
if(exeConfig.getGlobalJobParameters() == null){
342+
exeConfig.setGlobalJobParameters(globalJobParameters);
343+
}else if(exeConfig.getGlobalJobParameters() instanceof Configuration){
344+
((Configuration) exeConfig.getGlobalJobParameters()).addAll(globalJobParameters);
345+
}
346+
313347

314348
if(FlinkUtil.getMaxEnvParallelism(confProperties) > 0){
315349
env.setMaxParallelism(FlinkUtil.getMaxEnvParallelism(confProperties));

core/src/main/java/com/dtstack/flink/sql/parser/SqlParser.java

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -138,12 +138,15 @@ public static SqlTree parseSql(String sql) throws Exception {
138138
if (!sqlTree.getTableInfoMap().keySet().contains(tableName)){
139139
CreateTableParser.SqlParserResult createTableResult = sqlTree.getPreDealTableMap().get(tableName);
140140
if(createTableResult == null){
141-
throw new RuntimeException("can't find table " + tableName);
141+
CreateTmpTableParser.SqlParserResult tmpTableResult = sqlTree.getTmpTableMap().get(tableName);
142+
if (tmpTableResult == null){
143+
throw new RuntimeException("can't find table " + tableName);
144+
}
145+
} else {
146+
TableInfo tableInfo = tableInfoParser.parseWithTableType(ETableType.SOURCE.getType(),
147+
createTableResult, LOCAL_SQL_PLUGIN_ROOT);
148+
sqlTree.addTableInfo(tableName, tableInfo);
142149
}
143-
144-
TableInfo tableInfo = tableInfoParser.parseWithTableType(ETableType.SOURCE.getType(),
145-
createTableResult, LOCAL_SQL_PLUGIN_ROOT);
146-
sqlTree.addTableInfo(tableName, tableInfo);
147150
}
148151
}
149152
}

core/src/main/java/com/dtstack/flink/sql/side/SideSQLParser.java

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -158,8 +158,16 @@ private JoinInfo dealJoinNode(SqlJoin joinNode, Set<String> sideTableSet, Queue<
158158
JoinInfo tableInfo = new JoinInfo();
159159
tableInfo.setLeftTableName(leftTbName);
160160
tableInfo.setRightTableName(rightTableName);
161-
tableInfo.setLeftTableAlias(leftTbAlias);
162-
tableInfo.setRightTableAlias(rightTableAlias);
161+
if (leftTbAlias.equals("")){
162+
tableInfo.setLeftTableAlias(leftTbName);
163+
} else {
164+
tableInfo.setLeftTableAlias(leftTbAlias);
165+
}
166+
if (leftTbAlias.equals("")){
167+
tableInfo.setRightTableAlias(rightTableName);
168+
} else {
169+
tableInfo.setRightTableAlias(rightTableAlias);
170+
}
163171
tableInfo.setLeftIsSideTable(leftIsSide);
164172
tableInfo.setRightIsSideTable(rightIsSide);
165173
tableInfo.setLeftNode(leftNode);

0 commit comments

Comments
 (0)