Skip to content

Commit 1692b61

Browse files
Update flink-scala-api to 1.18.1_1.1.7 (#168)
1 parent d3bd21e commit 1692b61

File tree

6 files changed

+24
-21
lines changed

6 files changed

+24
-21
lines changed

modules/examples/scripts/PubSubConnectorWithJson.sc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
//> using toolkit default
2-
//> using dep "org.flinkextended::flink-scala-api:1.18.1_1.1.6"
2+
//> using dep "org.flinkextended::flink-scala-api:1.18.1_1.1.7"
33
//> using dep "org.apache.flink:flink-clients:1.18.1"
44
//> using dep org.apache.flink:flink-connector-gcp-pubsub:3.1.0-1.18
55

modules/examples/scripts/debug-sql.sc

Lines changed: 12 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import $ivy.`org.flinkextended::flink-scala-api:1.18.1_1.1.6`
1+
import $ivy.`org.flinkextended::flink-scala-api:1.18.1_1.1.7`
22

33
import $ivy.`org.apache.flink:flink-clients:1.18.1`
44

@@ -18,7 +18,7 @@ import org.apache.flinkx.api.serializers._
1818

1919
import java.lang.{Long => JLong}
2020

21-
val env = StreamExecutionEnvironment.getExecutionEnvironment
21+
val env = StreamExecutionEnvironment.getExecutionEnvironment
2222
val tEnv = StreamTableEnvironment.create(env.getJavaEnv)
2323

2424
val settings = EnvironmentSettings.newInstance().inStreamingMode().build()
@@ -39,17 +39,20 @@ table.createTemporaryTable(
3939
)
4040

4141
val tableDescriptor = TableDescriptor
42-
.forConnector("datagen")
43-
.schema(
44-
Schema.newBuilder
45-
.column("id", DataTypes.INT.notNull)
46-
.column("a", DataTypes.ROW(DataTypes.FIELD("np", DataTypes.INT.notNull())).notNull())
47-
.build)
42+
.forConnector("datagen")
43+
.schema(
44+
Schema.newBuilder
45+
.column("id", DataTypes.INT.notNull)
46+
.column("a", DataTypes.ROW(DataTypes.FIELD("np", DataTypes.INT.notNull())).notNull())
4847
.build
48+
)
49+
.build
4950
table.createTemporaryTable("t1", tableDescriptor)
5051
table.createTemporaryTable("t2", tableDescriptor)
5152
// table.dropTemporaryTable("t1")
5253
// table.dropTemporaryTable("t2")
5354

54-
val res = table.executeSql("EXPLAIN SELECT a.id, COALESCE(a.a.np, b.a.np) c1, IFNULL(a.a.np, b.a.np) c2 FROM t1 a left JOIN t2 b ON a.id=b.id where a.a is null or a.a.np is null")
55+
val res = table.executeSql(
56+
"EXPLAIN SELECT a.id, COALESCE(a.a.np, b.a.np) c1, IFNULL(a.a.np, b.a.np) c2 FROM t1 a left JOIN t2 b ON a.id=b.id where a.a is null or a.a.np is null"
57+
)
5558
res.print

modules/examples/scripts/flink-scala-cli.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
//> using dep "org.flinkextended::flink-scala-api:1.18.1_1.1.6"
1+
//> using dep "org.flinkextended::flink-scala-api:1.18.1_1.1.7"
22
//> using dep "org.apache.flink:flink-clients:1.18.1"
33

44
import org.apache.flinkx.api.*
@@ -12,7 +12,7 @@ import java.io.File
1212
.getOrElse(Array.empty[File])
1313
val elems = files.filter(_.isFile).map(_.getAbsolutePath())
1414

15-
val env = StreamExecutionEnvironment.getExecutionEnvironment
15+
val env = StreamExecutionEnvironment.getExecutionEnvironment
1616
val text = env.fromElements(elems*)
1717

1818
text.addSink(logger.info(_))

modules/examples/scripts/gen-csv-file.sc

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
//> using dep "org.flinkextended::flink-scala-api:1.18.1_1.1.6"
1+
//> using dep "org.flinkextended::flink-scala-api:1.18.1_1.1.7"
22
//> using dep "org.apache.flink:flink-clients:1.18.1"
33
//> using dep "org.apache.flink:flink-csv:1.18.1"
44
//> using dep "org.apache.flink:flink-connector-files:1.18.1"
@@ -12,9 +12,9 @@ import org.apache.flinkx.api.serializers._
1212

1313
import java.lang.{Long => JLong}
1414

15-
val env = StreamExecutionEnvironment.getExecutionEnvironment
15+
val env = StreamExecutionEnvironment.getExecutionEnvironment
1616
val settings = EnvironmentSettings.newInstance.inStreamingMode.build
17-
val table = TableEnvironment.create(settings)
17+
val table = TableEnvironment.create(settings)
1818
val schema = Schema.newBuilder
1919
.column("id", DataTypes.INT())
2020
.column("bid_price", DataTypes.DOUBLE())

modules/examples/scripts/gen-kafka-data.sc

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
//> using dep "org.flinkextended::flink-scala-api:1.18.1_1.1.6"
1+
//> using dep "org.flinkextended::flink-scala-api:1.18.1_1.1.7"
22
//> using dep "org.apache.flink:flink-clients:1.18.1"
33
//> using dep "org.apache.flink:flink-csv:1.18.1"
44
//> using dep "org.apache.flink:flink-connector-files:1.18.1"
@@ -13,9 +13,9 @@ import org.apache.flinkx.api.serializers._
1313

1414
import java.lang.{Long => JLong}
1515

16-
val env = StreamExecutionEnvironment.getExecutionEnvironment
16+
val env = StreamExecutionEnvironment.getExecutionEnvironment
1717
val settings = EnvironmentSettings.newInstance.inStreamingMode.build
18-
val table = TableEnvironment.create(settings)
18+
val table = TableEnvironment.create(settings)
1919
val schema = Schema.newBuilder
2020
.column("id", DataTypes.INT())
2121
.column("bid_price", DataTypes.DOUBLE())
@@ -30,7 +30,7 @@ table.createTemporaryTable(
3030
.option(DataGenConnectorOptions.NUMBER_OF_ROWS, JLong(1000))
3131
.option("fields.id.kind", "sequence")
3232
.option("fields.id.start", "10001")
33-
.option("fields.id.end", "20000")
33+
.option("fields.id.end", "20000")
3434
.build
3535
)
3636

modules/examples/scripts/hybrid-source.sc

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
//> using dep "org.flinkextended::flink-scala-api:1.18.1_1.1.6"
1+
//> using dep "org.flinkextended::flink-scala-api:1.18.1_1.1.7"
22
//> using dep "org.apache.flink:flink-clients:1.18.1"
33
//> using dep "org.apache.flink:flink-csv:1.18.1"
44
//> using dep "org.apache.flink:flink-connector-files:1.18.1"
@@ -26,7 +26,7 @@ val fileSource = FileSource
2626
.build
2727

2828
val switchTimestamp = -1L
29-
val brokers = "confluentkafka-cp-kafka:9092"
29+
val brokers = "confluentkafka-cp-kafka:9092"
3030

3131
val kafkaSource = KafkaSource
3232
.builder[String]()

0 commit comments

Comments
 (0)