Skip to content

Commit 2620517

Browse files
author
yanxi0227
committed
fix conflicts
2 parents 5fb4098 + 62ee4f9 commit 2620517

File tree

122 files changed

+2567
-2526
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

122 files changed

+2567
-2526
lines changed

README.md

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -11,12 +11,17 @@
1111
# 已支持
1212
* 源表:kafka 0.9,1.x版本
1313
* 维表:mysql,SQlServer,oracle,hbase,mongo,redis,cassandra
14-
* 结果表:mysql,SQlServer,oracle,hbase,elasticsearch5.x,mongo,redis,cassandra,console
14+
* 结果表:mysql,SQlServer,oracle,hbase,elasticsearch5.x,mongo,redis,cassandra
1515

1616
# 后续开发计划
1717
* 增加SQL支持CEP
1818
* 维表快照
1919
* sql优化(谓词下移等)
20+
* serverSocket 源表
21+
* console 结果表
22+
* kafka avro格式
23+
* topN
24+
2025

2126
## 1 快速起步
2227
### 1.1 运行模式
@@ -142,7 +147,6 @@ sh submit.sh -sql D:\sideSql.txt -name xctest -remoteSqlPluginPath /opt/dtstack
142147
## 2 结构
143148
### 2.1 源表插件
144149
* [kafka 源表插件](docs/kafkaSource.md)
145-
* [serverSocket 源表插件](docs/serverSocketSource.md)
146150

147151
### 2.2 结果表插件
148152
* [elasticsearch 结果表插件](docs/elasticsearchSink.md)
@@ -151,7 +155,6 @@ sh submit.sh -sql D:\sideSql.txt -name xctest -remoteSqlPluginPath /opt/dtstack
151155
* [mongo 结果表插件](docs/mongoSink.md)
152156
* [redis 结果表插件](docs/redisSink.md)
153157
* [cassandra 结果表插件](docs/cassandraSink.md)
154-
* [console 结果表插件](docs/consoleSink.md)
155158

156159
### 2.3 维表插件
157160
* [hbase 维表插件](docs/hbaseSide.md)

cassandra/cassandra-side/cassandra-all-side/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,7 @@
7676
</copy>
7777

7878
<move file="${basedir}/../../../plugins/cassandraallside/${project.artifactId}-${project.version}.jar"
79-
tofile="${basedir}/../../../plugins/cassandraallside/${project.name}.jar" />
79+
tofile="${basedir}/../../../plugins/cassandraallside/${project.name}-${git.branch}.jar" />
8080
</tasks>
8181
</configuration>
8282
</execution>

cassandra/cassandra-side/cassandra-all-side/src/main/java/com/dtstack/flink/sql/side/cassandra/CassandraAllSideInfo.java

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@
2323
import com.dtstack.flink.sql.side.SideInfo;
2424
import com.dtstack.flink.sql.side.SideTableInfo;
2525
import com.dtstack.flink.sql.side.cassandra.table.CassandraSideTableInfo;
26+
import com.dtstack.flink.sql.util.ParseUtils;
2627
import org.apache.calcite.sql.SqlBasicCall;
2728
import org.apache.calcite.sql.SqlKind;
2829
import org.apache.calcite.sql.SqlNode;
@@ -86,11 +87,7 @@ public void parseSelectFields(JoinInfo joinInfo) {
8687
SqlNode conditionNode = joinInfo.getCondition();
8788

8889
List<SqlNode> sqlNodeList = Lists.newArrayList();
89-
if (conditionNode.getKind() == SqlKind.AND) {
90-
sqlNodeList.addAll(Lists.newArrayList(((SqlBasicCall) conditionNode).getOperands()));
91-
} else {
92-
sqlNodeList.add(conditionNode);
93-
}
90+
ParseUtils.parseAnd(conditionNode, sqlNodeList);
9491

9592
for (SqlNode sqlNode : sqlNodeList) {
9693
dealOneEqualCon(sqlNode, sideTableName);

cassandra/cassandra-side/cassandra-async-side/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,7 @@
9292
</copy>
9393

9494
<move file="${basedir}/../../../plugins/cassandraasyncside/${project.artifactId}-${project.version}.jar"
95-
tofile="${basedir}/../../../plugins/cassandraasyncside/${project.name}.jar" />
95+
tofile="${basedir}/../../../plugins/cassandraasyncside/${project.name}-${git.branch}.jar" />
9696
</tasks>
9797
</configuration>
9898
</execution>

cassandra/cassandra-side/cassandra-async-side/src/main/java/com/dtstack/flink/sql/side/cassandra/CassandraAsyncSideInfo.java

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@
2323
import com.dtstack.flink.sql.side.SideInfo;
2424
import com.dtstack.flink.sql.side.SideTableInfo;
2525
import com.dtstack.flink.sql.side.cassandra.table.CassandraSideTableInfo;
26+
import com.dtstack.flink.sql.util.ParseUtils;
2627
import org.apache.calcite.sql.SqlBasicCall;
2728
import org.apache.calcite.sql.SqlIdentifier;
2829
import org.apache.calcite.sql.SqlKind;
@@ -55,11 +56,7 @@ public void buildEqualInfo(JoinInfo joinInfo, SideTableInfo sideTableInfo) {
5556
SqlNode conditionNode = joinInfo.getCondition();
5657

5758
List<SqlNode> sqlNodeList = Lists.newArrayList();
58-
if (conditionNode.getKind() == SqlKind.AND) {
59-
sqlNodeList.addAll(Lists.newArrayList(((SqlBasicCall) conditionNode).getOperands()));
60-
} else {
61-
sqlNodeList.add(conditionNode);
62-
}
59+
ParseUtils.parseAnd(conditionNode, sqlNodeList);
6360

6461
for (SqlNode sqlNode : sqlNodeList) {
6562
dealOneEqualCon(sqlNode, sideTableName);

core/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -138,7 +138,7 @@
138138
</fileset>
139139
</copy>
140140
<move file="${basedir}/../plugins/${project.artifactId}-${project.version}.jar"
141-
tofile="${basedir}/../plugins/${project.name}.jar" />
141+
tofile="${basedir}/../plugins/${project.name}-${git.branch}.jar" />
142142
</tasks>
143143
</configuration>
144144
</execution>

core/src/main/java/com/dtstack/flink/sql/Main.java

Lines changed: 72 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -23,8 +23,6 @@
2323
import com.dtstack.flink.sql.classloader.DtClassLoader;
2424
import com.dtstack.flink.sql.enums.ECacheType;
2525
import com.dtstack.flink.sql.environment.MyLocalStreamEnvironment;
26-
import com.dtstack.flink.sql.options.LauncherOptionParser;
27-
import com.dtstack.flink.sql.options.LauncherOptions;
2826
import com.dtstack.flink.sql.parser.*;
2927
import com.dtstack.flink.sql.side.SideSqlExec;
3028
import com.dtstack.flink.sql.side.SideTableInfo;
@@ -40,31 +38,40 @@
4038
import org.apache.calcite.config.Lex;
4139
import org.apache.calcite.sql.SqlInsert;
4240
import org.apache.calcite.sql.SqlNode;
41+
import org.apache.commons.cli.CommandLine;
42+
import org.apache.commons.cli.CommandLineParser;
43+
import org.apache.commons.cli.DefaultParser;
44+
import org.apache.commons.cli.Options;
4345
import org.apache.commons.io.Charsets;
44-
import org.apache.commons.lang3.StringUtils;
46+
import org.apache.flink.api.common.ExecutionConfig;
4547
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
4648
import org.apache.flink.api.common.time.Time;
4749
import org.apache.flink.api.common.typeinfo.TypeInformation;
50+
import org.apache.flink.api.java.tuple.Tuple2;
4851
import org.apache.flink.api.java.typeutils.RowTypeInfo;
52+
import org.apache.flink.calcite.shaded.com.google.common.base.Preconditions;
4953
import org.apache.flink.calcite.shaded.com.google.common.base.Strings;
5054
import org.apache.flink.calcite.shaded.com.google.common.collect.Lists;
5155
import org.apache.flink.calcite.shaded.com.google.common.collect.Maps;
5256
import org.apache.flink.calcite.shaded.com.google.common.collect.Sets;
5357
import org.apache.flink.client.program.ContextEnvironment;
58+
import org.apache.flink.configuration.Configuration;
5459
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;
5560
import org.apache.flink.streaming.api.datastream.DataStream;
5661
import org.apache.flink.streaming.api.environment.StreamContextEnvironment;
5762
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
5863
import org.apache.flink.table.api.Table;
5964
import org.apache.flink.table.api.java.StreamTableEnvironment;
6065
import org.apache.flink.table.sinks.TableSink;
66+
import org.apache.flink.types.Row;
6167
import org.slf4j.Logger;
6268
import org.slf4j.LoggerFactory;
6369

6470
import java.io.File;
6571
import java.io.IOException;
6672
import java.lang.reflect.Field;
6773
import java.lang.reflect.InvocationTargetException;
74+
import java.lang.reflect.Method;
6875
import java.net.URL;
6976
import java.net.URLClassLoader;
7077
import java.net.URLDecoder;
@@ -98,21 +105,36 @@ public class Main {
98105

99106
public static void main(String[] args) throws Exception {
100107

101-
LauncherOptionParser optionParser = new LauncherOptionParser(args);
102-
LauncherOptions launcherOptions = optionParser.getLauncherOptions();
103-
104-
String sql = launcherOptions.getSql();
105-
String name =launcherOptions.getName();
106-
String addJarListStr = launcherOptions.getAddjar();
107-
String localSqlPluginPath = launcherOptions.getLocalSqlPluginPath();
108-
String remoteSqlPluginPath = launcherOptions.getRemoteSqlPluginPath();
109-
String deployMode = launcherOptions.getMode();
110-
String confProp = launcherOptions.getConfProp();
108+
Options options = new Options();
109+
options.addOption("sql", true, "sql config");
110+
options.addOption("name", true, "job name");
111+
options.addOption("addjar", true, "add jar");
112+
options.addOption("localSqlPluginPath", true, "local sql plugin path");
113+
options.addOption("remoteSqlPluginPath", true, "remote sql plugin path");
114+
options.addOption("confProp", true, "env properties");
115+
options.addOption("mode", true, "deploy mode");
116+
117+
options.addOption("savePointPath", true, "Savepoint restore path");
118+
options.addOption("allowNonRestoredState", true, "Flag indicating whether non restored state is allowed if the savepoint");
119+
120+
CommandLineParser parser = new DefaultParser();
121+
CommandLine cl = parser.parse(options, args);
122+
String sql = cl.getOptionValue("sql");
123+
String name = cl.getOptionValue("name");
124+
String addJarListStr = cl.getOptionValue("addjar");
125+
String localSqlPluginPath = cl.getOptionValue("localSqlPluginPath");
126+
String remoteSqlPluginPath = cl.getOptionValue("remoteSqlPluginPath");
127+
String deployMode = cl.getOptionValue("mode");
128+
String confProp = cl.getOptionValue("confProp");
129+
130+
Preconditions.checkNotNull(sql, "parameters of sql is required");
131+
Preconditions.checkNotNull(name, "parameters of name is required");
132+
Preconditions.checkNotNull(localSqlPluginPath, "parameters of localSqlPluginPath is required");
111133

112134
sql = URLDecoder.decode(sql, Charsets.UTF_8.name());
113135
SqlParser.setLocalSqlPluginRoot(localSqlPluginPath);
114-
List<String> addJarFileList = Lists.newArrayList();
115136

137+
List<String> addJarFileList = Lists.newArrayList();
116138
if(!Strings.isNullOrEmpty(addJarListStr)){
117139
addJarListStr = URLDecoder.decode(addJarListStr, Charsets.UTF_8.name());
118140
addJarFileList = objMapper.readValue(addJarListStr, List.class);
@@ -221,12 +243,6 @@ private static void addEnvClassPath(StreamExecutionEnvironment env, Set<URL> cla
221243
contextEnvironment.getClasspaths().add(url);
222244
}
223245
}
224-
int i = 0;
225-
for(URL url : classPathSet){
226-
String classFileName = String.format(CLASS_FILE_NAME_FMT, i);
227-
env.registerCachedFile(url.getPath(), classFileName, true);
228-
i++;
229-
}
230246
}
231247

232248
private static void registerUDF(SqlTree sqlTree, List<URL> jarURList, URLClassLoader parentClassloader,
@@ -240,7 +256,6 @@ private static void registerUDF(SqlTree sqlTree, List<URL> jarURList, URLClassLo
240256
if (classLoader == null) {
241257
classLoader = FlinkUtil.loadExtraJar(jarURList, parentClassloader);
242258
}
243-
classLoader.loadClass(funcInfo.getClassName());
244259
FlinkUtil.registerUDF(funcInfo.getType(), funcInfo.getClassName(), funcInfo.getName(),
245260
tableEnv, classLoader);
246261
}
@@ -265,7 +280,10 @@ private static void registerTable(SqlTree sqlTree, StreamExecutionEnvironment en
265280
Table adaptTable = adaptSql == null ? table : tableEnv.sqlQuery(adaptSql);
266281

267282
RowTypeInfo typeInfo = new RowTypeInfo(adaptTable.getSchema().getTypes(), adaptTable.getSchema().getColumnNames());
268-
DataStream adaptStream = tableEnv.toAppendStream(adaptTable, typeInfo);
283+
DataStream adaptStream = tableEnv.toRetractStream(adaptTable, typeInfo)
284+
.map((Tuple2<Boolean, Row> f0) -> { return f0.f1; })
285+
.returns(typeInfo);
286+
269287
String fields = String.join(",", typeInfo.getFieldNames());
270288

271289
if(waterMarkerAssigner.checkNeedAssignWaterMarker(sourceTableInfo)){
@@ -278,38 +296,60 @@ private static void registerTable(SqlTree sqlTree, StreamExecutionEnvironment en
278296
Table regTable = tableEnv.fromDataStream(adaptStream, fields);
279297
tableEnv.registerTable(tableInfo.getName(), regTable);
280298
registerTableCache.put(tableInfo.getName(), regTable);
281-
if(StringUtils.isNotBlank(remoteSqlPluginPath)){
282-
classPathSet.add(PluginUtil.getRemoteJarFilePath(tableInfo.getType(), SourceTableInfo.SOURCE_SUFFIX, remoteSqlPluginPath));
283-
}
299+
classPathSet.add(PluginUtil.getRemoteJarFilePath(tableInfo.getType(), SourceTableInfo.SOURCE_SUFFIX, remoteSqlPluginPath, localSqlPluginPath));
284300
} else if (tableInfo instanceof TargetTableInfo) {
285301

286302
TableSink tableSink = StreamSinkFactory.getTableSink((TargetTableInfo) tableInfo, localSqlPluginPath);
287303
TypeInformation[] flinkTypes = FlinkUtil.transformTypes(tableInfo.getFieldClasses());
288304
tableEnv.registerTableSink(tableInfo.getName(), tableInfo.getFields(), flinkTypes, tableSink);
289-
if(StringUtils.isNotBlank(remoteSqlPluginPath)){
290-
classPathSet.add( PluginUtil.getRemoteJarFilePath(tableInfo.getType(), TargetTableInfo.TARGET_SUFFIX, remoteSqlPluginPath));
291-
}
305+
classPathSet.add( PluginUtil.getRemoteJarFilePath(tableInfo.getType(), TargetTableInfo.TARGET_SUFFIX, remoteSqlPluginPath, localSqlPluginPath));
292306
} else if(tableInfo instanceof SideTableInfo){
307+
293308
String sideOperator = ECacheType.ALL.name().equals(((SideTableInfo) tableInfo).getCacheType()) ? "all" : "async";
294309
sideTableMap.put(tableInfo.getName(), (SideTableInfo) tableInfo);
295-
if(StringUtils.isNotBlank(remoteSqlPluginPath)){
296-
classPathSet.add(PluginUtil.getRemoteSideJarFilePath(tableInfo.getType(), sideOperator, SideTableInfo.TARGET_SUFFIX, remoteSqlPluginPath));
297-
}
310+
classPathSet.add(PluginUtil.getRemoteSideJarFilePath(tableInfo.getType(), sideOperator, SideTableInfo.TARGET_SUFFIX, remoteSqlPluginPath, localSqlPluginPath));
298311
}else {
299312
throw new RuntimeException("not support table type:" + tableInfo.getType());
300313
}
301314
}
302315

303316
//The plug-in information corresponding to the table is loaded into the classPath env
304317
addEnvClassPath(env, classPathSet);
318+
int i = 0;
319+
for(URL url : classPathSet){
320+
String classFileName = String.format(CLASS_FILE_NAME_FMT, i);
321+
env.registerCachedFile(url.getPath(), classFileName, true);
322+
i++;
323+
}
305324
}
306325

307-
private static StreamExecutionEnvironment getStreamExeEnv(Properties confProperties, String deployMode) throws IOException {
326+
private static StreamExecutionEnvironment getStreamExeEnv(Properties confProperties, String deployMode) throws IOException, NoSuchMethodException {
308327
StreamExecutionEnvironment env = !ClusterMode.local.name().equals(deployMode) ?
309328
StreamExecutionEnvironment.getExecutionEnvironment() :
310329
new MyLocalStreamEnvironment();
311330

312331
env.setParallelism(FlinkUtil.getEnvParallelism(confProperties));
332+
Configuration globalJobParameters = new Configuration();
333+
Method method = Configuration.class.getDeclaredMethod("setValueInternal", String.class, Object.class);
334+
method.setAccessible(true);
335+
336+
confProperties.forEach((key,val) -> {
337+
try {
338+
method.invoke(globalJobParameters, key, val);
339+
} catch (IllegalAccessException e) {
340+
e.printStackTrace();
341+
} catch (InvocationTargetException e) {
342+
e.printStackTrace();
343+
}
344+
});
345+
346+
ExecutionConfig exeConfig = env.getConfig();
347+
if(exeConfig.getGlobalJobParameters() == null){
348+
exeConfig.setGlobalJobParameters(globalJobParameters);
349+
}else if(exeConfig.getGlobalJobParameters() instanceof Configuration){
350+
((Configuration) exeConfig.getGlobalJobParameters()).addAll(globalJobParameters);
351+
}
352+
313353

314354
if(FlinkUtil.getMaxEnvParallelism(confProperties) > 0){
315355
env.setMaxParallelism(FlinkUtil.getMaxEnvParallelism(confProperties));

core/src/main/java/com/dtstack/flink/sql/parser/SqlParser.java

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -138,12 +138,15 @@ public static SqlTree parseSql(String sql) throws Exception {
138138
if (!sqlTree.getTableInfoMap().keySet().contains(tableName)){
139139
CreateTableParser.SqlParserResult createTableResult = sqlTree.getPreDealTableMap().get(tableName);
140140
if(createTableResult == null){
141-
throw new RuntimeException("can't find table " + tableName);
141+
CreateTmpTableParser.SqlParserResult tmpTableResult = sqlTree.getTmpTableMap().get(tableName);
142+
if (tmpTableResult == null){
143+
throw new RuntimeException("can't find table " + tableName);
144+
}
145+
} else {
146+
TableInfo tableInfo = tableInfoParser.parseWithTableType(ETableType.SOURCE.getType(),
147+
createTableResult, LOCAL_SQL_PLUGIN_ROOT);
148+
sqlTree.addTableInfo(tableName, tableInfo);
142149
}
143-
144-
TableInfo tableInfo = tableInfoParser.parseWithTableType(ETableType.SOURCE.getType(),
145-
createTableResult, LOCAL_SQL_PLUGIN_ROOT);
146-
sqlTree.addTableInfo(tableName, tableInfo);
147150
}
148151
}
149152
}

core/src/main/java/com/dtstack/flink/sql/side/SideSQLParser.java

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -158,8 +158,16 @@ private JoinInfo dealJoinNode(SqlJoin joinNode, Set<String> sideTableSet, Queue<
158158
JoinInfo tableInfo = new JoinInfo();
159159
tableInfo.setLeftTableName(leftTbName);
160160
tableInfo.setRightTableName(rightTableName);
161-
tableInfo.setLeftTableAlias(leftTbAlias);
162-
tableInfo.setRightTableAlias(rightTableAlias);
161+
if (leftTbAlias.equals("")){
162+
tableInfo.setLeftTableAlias(leftTbName);
163+
} else {
164+
tableInfo.setLeftTableAlias(leftTbAlias);
165+
}
166+
if (leftTbAlias.equals("")){
167+
tableInfo.setRightTableAlias(rightTableName);
168+
} else {
169+
tableInfo.setRightTableAlias(rightTableAlias);
170+
}
163171
tableInfo.setLeftIsSideTable(leftIsSide);
164172
tableInfo.setRightIsSideTable(rightIsSide);
165173
tableInfo.setLeftNode(leftNode);

0 commit comments

Comments
 (0)