Skip to content

Commit

Permalink
Update flink-scala-api to 1.18.1_1.1.7
Browse files Browse the repository at this point in the history
  • Loading branch information
scala-steward committed Oct 20, 2024
1 parent 1017ae2 commit 3d9efc6
Show file tree
Hide file tree
Showing 6 changed files with 24 additions and 21 deletions.
2 changes: 1 addition & 1 deletion modules/examples/scripts/PubSubConnectorWithJson.sc
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
//> using toolkit default
//> using dep "org.flinkextended::flink-scala-api:1.18.1_1.1.6"
//> using dep "org.flinkextended::flink-scala-api:1.18.1_1.1.7"
//> using dep "org.apache.flink:flink-clients:1.18.1"
//> using dep org.apache.flink:flink-connector-gcp-pubsub:3.1.0-1.18

Expand Down
21 changes: 12 additions & 9 deletions modules/examples/scripts/debug-sql.sc
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import $ivy.`org.flinkextended::flink-scala-api:1.18.1_1.1.6`
import $ivy.`org.flinkextended::flink-scala-api:1.18.1_1.1.7`

import $ivy.`org.apache.flink:flink-clients:1.18.1`

Expand All @@ -18,7 +18,7 @@ import org.apache.flinkx.api.serializers._

import java.lang.{Long => JLong}

val env = StreamExecutionEnvironment.getExecutionEnvironment
val env = StreamExecutionEnvironment.getExecutionEnvironment
val tEnv = StreamTableEnvironment.create(env.getJavaEnv)

val settings = EnvironmentSettings.newInstance().inStreamingMode().build()
Expand All @@ -39,17 +39,20 @@ table.createTemporaryTable(
)

val tableDescriptor = TableDescriptor
.forConnector("datagen")
.schema(
Schema.newBuilder
.column("id", DataTypes.INT.notNull)
.column("a", DataTypes.ROW(DataTypes.FIELD("np", DataTypes.INT.notNull())).notNull())
.build)
.forConnector("datagen")
.schema(
Schema.newBuilder
.column("id", DataTypes.INT.notNull)
.column("a", DataTypes.ROW(DataTypes.FIELD("np", DataTypes.INT.notNull())).notNull())
.build
)
.build
table.createTemporaryTable("t1", tableDescriptor)
table.createTemporaryTable("t2", tableDescriptor)
// table.dropTemporaryTable("t1")
// table.dropTemporaryTable("t2")

val res = table.executeSql("EXPLAIN SELECT a.id, COALESCE(a.a.np, b.a.np) c1, IFNULL(a.a.np, b.a.np) c2 FROM t1 a left JOIN t2 b ON a.id=b.id where a.a is null or a.a.np is null")
val res = table.executeSql(
"EXPLAIN SELECT a.id, COALESCE(a.a.np, b.a.np) c1, IFNULL(a.a.np, b.a.np) c2 FROM t1 a left JOIN t2 b ON a.id=b.id where a.a is null or a.a.np is null"
)
res.print
4 changes: 2 additions & 2 deletions modules/examples/scripts/flink-scala-cli.scala
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
//> using dep "org.flinkextended::flink-scala-api:1.18.1_1.1.6"
//> using dep "org.flinkextended::flink-scala-api:1.18.1_1.1.7"
//> using dep "org.apache.flink:flink-clients:1.18.1"

import org.apache.flinkx.api.*
Expand All @@ -12,7 +12,7 @@ import java.io.File
.getOrElse(Array.empty[File])
val elems = files.filter(_.isFile).map(_.getAbsolutePath())

val env = StreamExecutionEnvironment.getExecutionEnvironment
val env = StreamExecutionEnvironment.getExecutionEnvironment
val text = env.fromElements(elems*)

text.addSink(logger.info(_))
Expand Down
6 changes: 3 additions & 3 deletions modules/examples/scripts/gen-csv-file.sc
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
//> using dep "org.flinkextended::flink-scala-api:1.18.1_1.1.6"
//> using dep "org.flinkextended::flink-scala-api:1.18.1_1.1.7"
//> using dep "org.apache.flink:flink-clients:1.18.1"
//> using dep "org.apache.flink:flink-csv:1.18.1"
//> using dep "org.apache.flink:flink-connector-files:1.18.1"
Expand All @@ -12,9 +12,9 @@ import org.apache.flinkx.api.serializers._

import java.lang.{Long => JLong}

val env = StreamExecutionEnvironment.getExecutionEnvironment
val env = StreamExecutionEnvironment.getExecutionEnvironment
val settings = EnvironmentSettings.newInstance.inStreamingMode.build
val table = TableEnvironment.create(settings)
val table = TableEnvironment.create(settings)
val schema = Schema.newBuilder
.column("id", DataTypes.INT())
.column("bid_price", DataTypes.DOUBLE())
Expand Down
8 changes: 4 additions & 4 deletions modules/examples/scripts/gen-kafka-data.sc
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
//> using dep "org.flinkextended::flink-scala-api:1.18.1_1.1.6"
//> using dep "org.flinkextended::flink-scala-api:1.18.1_1.1.7"
//> using dep "org.apache.flink:flink-clients:1.18.1"
//> using dep "org.apache.flink:flink-csv:1.18.1"
//> using dep "org.apache.flink:flink-connector-files:1.18.1"
Expand All @@ -13,9 +13,9 @@ import org.apache.flinkx.api.serializers._

import java.lang.{Long => JLong}

val env = StreamExecutionEnvironment.getExecutionEnvironment
val env = StreamExecutionEnvironment.getExecutionEnvironment
val settings = EnvironmentSettings.newInstance.inStreamingMode.build
val table = TableEnvironment.create(settings)
val table = TableEnvironment.create(settings)
val schema = Schema.newBuilder
.column("id", DataTypes.INT())
.column("bid_price", DataTypes.DOUBLE())
Expand All @@ -30,7 +30,7 @@ table.createTemporaryTable(
.option(DataGenConnectorOptions.NUMBER_OF_ROWS, JLong(1000))
.option("fields.id.kind", "sequence")
.option("fields.id.start", "10001")
.option("fields.id.end", "20000")
.option("fields.id.end", "20000")
.build
)

Expand Down
4 changes: 2 additions & 2 deletions modules/examples/scripts/hybrid-source.sc
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
//> using dep "org.flinkextended::flink-scala-api:1.18.1_1.1.6"
//> using dep "org.flinkextended::flink-scala-api:1.18.1_1.1.7"
//> using dep "org.apache.flink:flink-clients:1.18.1"
//> using dep "org.apache.flink:flink-csv:1.18.1"
//> using dep "org.apache.flink:flink-connector-files:1.18.1"
Expand Down Expand Up @@ -26,7 +26,7 @@ val fileSource = FileSource
.build

val switchTimestamp = -1L
val brokers = "confluentkafka-cp-kafka:9092"
val brokers = "confluentkafka-cp-kafka:9092"

val kafkaSource = KafkaSource
.builder[String]()
Expand Down

0 comments on commit 3d9efc6

Please sign in to comment.