diff --git a/.github/workflows/build-pr.yaml b/.github/workflows/build-pr.yaml index f997e08d1..1d4a00d83 100644 --- a/.github/workflows/build-pr.yaml +++ b/.github/workflows/build-pr.yaml @@ -2,34 +2,29 @@ name: Build PRs on: pull_request: jobs: - build-all: - runs-on: ubuntu-latest + run-scripted-tests: + runs-on: ubuntu-20.04 strategy: matrix: java: [ '8', '11', '13' ] steps: - - name: Checkout - uses: actions/checkout@v2 + + - uses: actions/checkout@v2 + with: + fetch-depth: 0 + - name: Setup java uses: joschi/setup-jdk@v2 with: java-version: ${{ matrix.java }} architecture: x64 + - name: Cache SBT and Coursier cache uses: coursier/cache-action@v3 - - name: Scalafmt check - run: | - cd core - sbt "scalafmtCheckAll; scalafmtSbtCheck" - - name: Build documentation - run: | - sudo snap install yq - cd docs - make html-author-mode - - name: Run integration tests + + - name: Run scripted tests run: | git config --global user.email "cloudflow@lightbend.com" git config --global user.name "Cloudflow CI" cd core - sbt -mem 2048 +test sbt -mem 2048 +publishLocal cloudflow-sbt-plugin/scripted diff --git a/.github/workflows/build-test-tools.yml b/.github/workflows/build-test-tools.yml index 497fd2b5a..7007a78e4 100644 --- a/.github/workflows/build-test-tools.yml +++ b/.github/workflows/build-test-tools.yml @@ -6,14 +6,13 @@ on: branches: [main] jobs: - build: - runs-on: ubuntu-18.04 + build-binary: + runs-on: ubuntu-20.04 steps: - - name: Checkout - uses: actions/checkout@v2 - - name: Fetch tags - run: git fetch --depth=100 origin +refs/tags/*:refs/tags/* + - uses: actions/checkout@v2 + with: + fetch-depth: 0 - name: Checkout GitHub merge if: github.event.pull_request diff --git a/.github/workflows/build_and_publish.yaml b/.github/workflows/build_and_publish.yaml index 554b972cf..943de8e55 100644 --- a/.github/workflows/build_and_publish.yaml +++ b/.github/workflows/build_and_publish.yaml @@ -6,10 +6,9 @@ on: - main jobs: build-docs: - runs-on: ubuntu-latest + runs-on: ubuntu-20.04 steps: - - name: Checkout - uses: actions/checkout@v2.3.3 + - uses: actions/checkout@v2 - run: | git fetch --no-tags --prune --depth=1 origin +refs/heads/*:refs/remotes/origin/* git fetch --depth=1 origin +refs/tags/*:refs/tags/* diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml index 4e31f4d11..bd5000803 100644 --- a/.github/workflows/push.yml +++ b/.github/workflows/push.yml @@ -1,3 +1,5 @@ +name: Build and Test + on: # Trigger the workflow on push or pull request, # push only for the main branch @@ -7,13 +9,11 @@ on: - main pull_request: - -name: Build and Test jobs: build-and-test-docs: - runs-on: ubuntu-latest + runs-on: ubuntu-20.04 steps: - - uses: actions/checkout@v2.3.3 + - uses: actions/checkout@v2 with: fetch-depth: 0 - name: Build documentation @@ -23,23 +23,62 @@ jobs: cd docs make html-author-mode grep -r "page unresolved" target/staging/docs/ && exit 1 || echo 'ok' - build-and-test-scala: - runs-on: ubuntu-latest + + build-and-test-core: + name: "Cloudflow Core" + runs-on: ubuntu-20.04 + strategy: + matrix: + java: [ '8', '11', '13' ] steps: - - uses: actions/checkout@v2 - - name: Setup environment - run: | - make -C .github/workflows/setup setup-local-all + - uses: actions/checkout@v2 + with: + fetch-depth: 0 - name: Cache SBT and Coursier cache uses: coursier/cache-action@v3 + - name: Scalafmt check + run: cd core && sbt "scalafmtCheckAll; scalafmtSbtCheck" + - name: build-and-test env: SBT_OPTS: -Xms512M -Xmx2048M -Xss2M -XX:MaxMetaspaceSize=1024M - run: | - export PATH="$HOME/opt/sbt/bin:$PATH" - export SBT_OPTS="-Xss256M" - cd scripts - ./build-all.sh +test + run: ./scripts/build-core.sh +test + + build-and-test-sbt-examples: + name: "sbt samples" + runs-on: ubuntu-20.04 + strategy: + matrix: + java: [ '8' ] + steps: + + - uses: actions/checkout@v2 + with: + fetch-depth: 0 + + - name: Cache SBT and Coursier cache + uses: coursier/cache-action@v3 + + - name: test-sbt-examples + run: ./scripts/build-sbt-examples.sh test + + build-and-test-mvn-examples: + name: "mvn samples" + runs-on: ubuntu-20.04 + strategy: + matrix: + java: [ '8' ] + steps: + + - uses: actions/checkout@v2 + with: + fetch-depth: 0 + + - name: Cache SBT and Coursier cache + uses: coursier/cache-action@v3 + + - name: test-maven-examples + run: ./scripts/build-mvn-examples.sh test diff --git a/.github/workflows/release-drafter.yml b/.github/workflows/release-drafter.yml index b675d7a46..6c1ee4957 100644 --- a/.github/workflows/release-drafter.yml +++ b/.github/workflows/release-drafter.yml @@ -8,7 +8,7 @@ on: jobs: update_release_draft: - runs-on: ubuntu-latest + runs-on: ubuntu-20.04 steps: # Drafts your next Release notes as Pull Requests are merged # https://github.com/raboof/release-drafter/releases diff --git a/.github/workflows/setup/Makefile b/.github/workflows/setup/Makefile deleted file mode 100644 index 75b4771f0..000000000 --- a/.github/workflows/setup/Makefile +++ /dev/null @@ -1,8 +0,0 @@ -all: setup-local-all - -setup-local-all: install-newer-sbt - -install-newer-sbt: - mkdir -p "${HOME}/opt" - curl -Lo "${HOME}/opt/sbt.tgz" https://github.com/sbt/sbt/releases/download/v1.3.10/sbt-1.3.10.tgz - tar xf "${HOME}/opt/sbt.tgz" -C "${HOME}/opt/" diff --git a/core/cloudflow-it/swiss-knife/project/cloudflow-plugins.sbt b/core/cloudflow-it/swiss-knife/project/cloudflow-plugins.sbt index 25de68ea9..4fc707bc5 100644 --- a/core/cloudflow-it/swiss-knife/project/cloudflow-plugins.sbt +++ b/core/cloudflow-it/swiss-knife/project/cloudflow-plugins.sbt @@ -1,5 +1,5 @@ val latestVersion = { - sys.env.get("CLOUDFLOW_VERSION").fold( + sys.env.get("CLOUDFLOW_VERSION").filter(_.nonEmpty).fold( sbtdynver.DynVer(None, "-", "v") .getGitDescribeOutput(new java.util.Date()) .fold(throw new Exception("Failed to retrieve version"))(_.version("-")) diff --git a/examples/call-record-aggregator/akka-cdr-ingestor/src/test/scala/carly/ingestor/CallRecordSplitSpec.scala b/examples/call-record-aggregator/akka-cdr-ingestor/src/test/scala/carly/ingestor/CallRecordSplitSpec.scala index 26080694a..423e6204a 100644 --- a/examples/call-record-aggregator/akka-cdr-ingestor/src/test/scala/carly/ingestor/CallRecordSplitSpec.scala +++ b/examples/call-record-aggregator/akka-cdr-ingestor/src/test/scala/carly/ingestor/CallRecordSplitSpec.scala @@ -43,7 +43,6 @@ class CallRecordSplitSpec extends AnyWordSpec with Matchers with ScalaFutures wi val streamlet = new CallRecordSplit val instant = Instant.now.toEpochMilli / 1000 - val past = Instant.now.minus(5000, ChronoUnit.DAYS).toEpochMilli / 1000 val cr1 = CallRecord("user-1", "user-2", "f", 10L, instant) val cr2 = CallRecord("user-1", "user-2", "f", 15L, instant) @@ -51,7 +50,7 @@ class CallRecordSplitSpec extends AnyWordSpec with Matchers with ScalaFutures wi val source = Source(Vector(cr1, cr2, cr3)) - val in = testkit.inletFromSource(streamlet.in, source) + val in = testkit.inletFromSource(streamlet.in, source) val left = testkit.outletAsTap(streamlet.left) val right = testkit.outletAsTap(streamlet.right) diff --git a/examples/call-record-aggregator/project/cloudflow-plugins.sbt b/examples/call-record-aggregator/project/cloudflow-plugins.sbt index 11d649007..680a1df07 100644 --- a/examples/call-record-aggregator/project/cloudflow-plugins.sbt +++ b/examples/call-record-aggregator/project/cloudflow-plugins.sbt @@ -1,5 +1,5 @@ val latestVersion = { - sys.env.get("CLOUDFLOW_VERSION").fold( + sys.env.get("CLOUDFLOW_VERSION").filter(_.nonEmpty).fold( sbtdynver.DynVer(None, "-", "v") .getGitDescribeOutput(new java.util.Date()) .fold(throw new Exception("Failed to retrieve version"))(_.version("-")) diff --git a/examples/connected-car-cluster-sharding/akka-connected-car-streamlet/project/build.properties b/examples/connected-car-cluster-sharding/akka-connected-car-streamlet/project/build.properties new file mode 100644 index 000000000..dbae93bcf --- /dev/null +++ b/examples/connected-car-cluster-sharding/akka-connected-car-streamlet/project/build.properties @@ -0,0 +1 @@ +sbt.version=1.4.9 diff --git a/examples/connected-car-cluster-sharding/akka-connected-car-streamlet/src/main/scala/connectedcar/actors/ConnectedCarActor.scala b/examples/connected-car-cluster-sharding/akka-connected-car-streamlet/src/main/scala/connectedcar/actors/ConnectedCarActor.scala index 3ed9cf4dc..8408fff9e 100644 --- a/examples/connected-car-cluster-sharding/akka-connected-car-streamlet/src/main/scala/connectedcar/actors/ConnectedCarActor.scala +++ b/examples/connected-car-cluster-sharding/akka-connected-car-streamlet/src/main/scala/connectedcar/actors/ConnectedCarActor.scala @@ -1,29 +1,31 @@ package connectedcar.actors -import akka.actor.typed.{ActorRef, Behavior} +import akka.actor.typed.{ ActorRef, Behavior } import akka.actor.typed.scaladsl.Behaviors -import connectedcar.data.{ConnectedCarAgg, ConnectedCarERecord} +import connectedcar.data.{ ConnectedCarAgg, ConnectedCarERecord } -case class ConnectedCarERecordWrapper(record: ConnectedCarERecord, sender:ActorRef[ConnectedCarAgg]) +case class ConnectedCarERecordWrapper(record: ConnectedCarERecord, sender: ActorRef[ConnectedCarAgg]) object ConnectedCarActor { - def apply(carId:String): Behavior[ConnectedCarERecordWrapper] = { - def updated(numberOfRecords: Int, driverName: String, carId:String, averageSpeed: Double, currentSpeed: Double): Behavior[ConnectedCarERecordWrapper] = { - Behaviors.receive { (context, message) => { - context.log.info(s"Update Received- CarId: ${carId} MessageCarId: ${message.record.carId} From Actor: ${message.sender.path}") + def apply(carId: String): Behavior[ConnectedCarERecordWrapper] = { + def updated(numberOfRecords: Int, + driverName: String, + carId: String, + averageSpeed: Double, + currentSpeed: Double): Behavior[ConnectedCarERecordWrapper] = + Behaviors.receive { (context, message) => + context.log.info(s"Update Received- CarId: ${carId} MessageCarId: ${message.record.carId} From Actor: ${message.sender.path}") - val newAverage = ((averageSpeed * numberOfRecords) + message.record.speed) / (numberOfRecords + 1) - val newNumberOfRecords = numberOfRecords+1 + val newAverage = ((averageSpeed * numberOfRecords) + message.record.speed) / (numberOfRecords + 1) + val newNumberOfRecords = numberOfRecords + 1 - val newAgg = ConnectedCarAgg(message.record.carId, message.record.driver, newAverage, newNumberOfRecords) + val newAgg = ConnectedCarAgg(message.record.carId, message.record.driver, newAverage, newNumberOfRecords) - message.sender ! newAgg + message.sender ! newAgg - updated(newNumberOfRecords, message.record.driver, carId, newAverage, message.record.speed) - } + updated(newNumberOfRecords, message.record.driver, carId, newAverage, message.record.speed) } - } updated(0, "", carId, 0, 0.0) } diff --git a/examples/connected-car-cluster-sharding/akka-connected-car-streamlet/src/main/scala/connectedcar/streamlets/ConnectedCarCluster.scala b/examples/connected-car-cluster-sharding/akka-connected-car-streamlet/src/main/scala/connectedcar/streamlets/ConnectedCarCluster.scala index f8ea69226..afc0fa91e 100644 --- a/examples/connected-car-cluster-sharding/akka-connected-car-streamlet/src/main/scala/connectedcar/streamlets/ConnectedCarCluster.scala +++ b/examples/connected-car-cluster-sharding/akka-connected-car-streamlet/src/main/scala/connectedcar/streamlets/ConnectedCarCluster.scala @@ -1,17 +1,17 @@ package connectedcar.streamlets -import akka.cluster.sharding.typed.scaladsl.{Entity, EntityTypeKey} +import akka.cluster.sharding.typed.scaladsl.{ Entity, EntityTypeKey } import akka.util.Timeout import akka.kafka.ConsumerMessage.CommittableOffset import akka.stream.scaladsl.SourceWithContext -import cloudflow.akkastream.scaladsl.{FlowWithCommittableContext, RunnableGraphStreamletLogic} +import cloudflow.akkastream.scaladsl.{ FlowWithCommittableContext, RunnableGraphStreamletLogic } import cloudflow.streamlets.StreamletShape -import cloudflow.streamlets.avro.{AvroInlet, AvroOutlet} -import connectedcar.actors.{ConnectedCarActor, ConnectedCarERecordWrapper} +import cloudflow.streamlets.avro.{ AvroInlet, AvroOutlet } +import connectedcar.actors.{ ConnectedCarActor, ConnectedCarERecordWrapper } import scala.concurrent.duration._ -import cloudflow.akkastream.{AkkaStreamlet, Clustering} -import connectedcar.data.{ConnectedCarAgg, ConnectedCarERecord} +import cloudflow.akkastream.{ AkkaStreamlet, Clustering } +import connectedcar.data.{ ConnectedCarAgg, ConnectedCarERecord } class ConnectedCarCluster extends AkkaStreamlet with Clustering { val in = AvroInlet[ConnectedCarERecord]("in") @@ -23,9 +23,7 @@ class ConnectedCarCluster extends AkkaStreamlet with Clustering { val entity = Entity(typeKey)(createBehavior = entityContext => ConnectedCarActor(entityContext.entityId)) - val source:SourceWithContext[ - ConnectedCarERecord, - CommittableOffset, _] = shardedSourceWithCommittableContext(in, entity) + val source: SourceWithContext[ConnectedCarERecord, CommittableOffset, _] = shardedSourceWithCommittableContext(in, entity) val sharding = clusterSharding() @@ -34,9 +32,9 @@ class ConnectedCarCluster extends AkkaStreamlet with Clustering { implicit val timeout: Timeout = 3.seconds def flow = FlowWithCommittableContext[ConnectedCarERecord] - .mapAsync(5)(msg ⇒ { - val carActor = sharding.entityRefFor(typeKey, msg.carId.toString) - carActor.ask[ConnectedCarAgg](ref => ConnectedCarERecordWrapper(msg, ref)) - }) + .mapAsync(5) { msg ⇒ + val carActor = sharding.entityRefFor(typeKey, msg.carId.toString) + carActor.ask[ConnectedCarAgg](ref => ConnectedCarERecordWrapper(msg, ref)) + } } } diff --git a/examples/connected-car-cluster-sharding/akka-connected-car-streamlet/src/test/scala/connectedcar/streamlet/ConnectedCarClusterTest.scala b/examples/connected-car-cluster-sharding/akka-connected-car-streamlet/src/test/scala/connectedcar/streamlet/ConnectedCarClusterTest.scala index 817db01e3..c47f670e6 100644 --- a/examples/connected-car-cluster-sharding/akka-connected-car-streamlet/src/test/scala/connectedcar/streamlet/ConnectedCarClusterTest.scala +++ b/examples/connected-car-cluster-sharding/akka-connected-car-streamlet/src/test/scala/connectedcar/streamlet/ConnectedCarClusterTest.scala @@ -1,21 +1,14 @@ package connectedcar.streamlet import akka.actor._ -import akka.cluster.typed.{Cluster, Join} -import akka.stream._ +import akka.cluster.typed.{ Cluster, Join } import akka.stream.scaladsl._ import akka.testkit._ import org.scalatest._ import org.scalatest.wordspec._ import org.scalatest.matchers.must._ -import org.scalatest.concurrent._ -import cloudflow.streamlets._ -import cloudflow.streamlets.avro._ -import cloudflow.akkastream._ -import cloudflow.akkastream.scaladsl._ -import cloudflow.akkastream.testkit._ import cloudflow.akkastream.testkit.scaladsl._ -import connectedcar.data.{ConnectedCarAgg, ConnectedCarERecord} +import connectedcar.data._ import connectedcar.streamlets.ConnectedCarCluster import connectedcar.streamlets.RawCarDataGenerator.generateCarERecord import akka.actor.typed.scaladsl.adapter._ @@ -23,15 +16,13 @@ import akka.actor.typed.scaladsl.adapter._ class ConnectedCarClusterTest extends AnyWordSpec with Matchers with BeforeAndAfterAll { private implicit val system = ActorSystem("AkkaStreamletSpec") - private val cluster = Cluster(system.toTyped) + private val cluster = Cluster(system.toTyped) - override def beforeAll: Unit = { + override def beforeAll: Unit = cluster.manager ! Join(cluster.selfMember.address) - } - override def afterAll: Unit = { + override def afterAll: Unit = TestKit.shutdownActorSystem(system) - } "A ConnectedCarCluster streamlet" should { @@ -39,18 +30,16 @@ class ConnectedCarClusterTest extends AnyWordSpec with Matchers with BeforeAndAf "Allow for creating a 'flow processor'" in { val record = generateCarERecord() - val data = Vector(record) + val data = Vector(record) - val agg = ConnectedCarAgg(record.carId, record.driver, record.speed, 1) + val agg = ConnectedCarAgg(record.carId, record.driver, record.speed, 1) val expectedData = Vector(agg) - val source = Source(data) - val proc = new ConnectedCarCluster - val in = testkit.inletFromSource(proc.in, source) - val out = testkit.outletAsTap(proc.out) - - testkit.run(proc, in, out, () ⇒ { - out.probe.receiveN(1) mustBe expectedData.map(d ⇒ proc.out.partitioner(d) -> d) - }) + val source = Source(data) + val proc = new ConnectedCarCluster + val in = testkit.inletFromSource(proc.in, source) + val out = testkit.outletAsTap(proc.out) + + testkit.run(proc, in, out, () ⇒ out.probe.receiveN(1) mustBe expectedData.map(d ⇒ proc.out.partitioner(d) -> d)) out.probe.expectMsg(Completed) } diff --git a/examples/connected-car-cluster-sharding/project/cloudflow-plugins.sbt b/examples/connected-car-cluster-sharding/project/cloudflow-plugins.sbt index 25de68ea9..4fc707bc5 100644 --- a/examples/connected-car-cluster-sharding/project/cloudflow-plugins.sbt +++ b/examples/connected-car-cluster-sharding/project/cloudflow-plugins.sbt @@ -1,5 +1,5 @@ val latestVersion = { - sys.env.get("CLOUDFLOW_VERSION").fold( + sys.env.get("CLOUDFLOW_VERSION").filter(_.nonEmpty).fold( sbtdynver.DynVer(None, "-", "v") .getGitDescribeOutput(new java.util.Date()) .fold(throw new Exception("Failed to retrieve version"))(_.version("-")) diff --git a/examples/examples.yaml b/examples/examples.yaml deleted file mode 100644 index 0ec9940f1..000000000 --- a/examples/examples.yaml +++ /dev/null @@ -1,63 +0,0 @@ -examples: - - id: "spark-sensors" - path: "spark-sensors" - description: | - This example shows how sensor data can be aggregated using Spark. - - id: "call-record-aggregator" - path: "call-record-aggregator" - description: | - This multi-project example shows the combination of Akka/Scala, Akka/Java and Spark projects into an end-to-end application. - - id: "taxi-ride" - path: "taxi-ride" - description: This app shows the combination of multi-project streamlets using Flink and Akka - - id: "tensorflow-akka" - path: "tensorflow-akka" - description: This app shows how to serve a Tensorflow model using Akka - - id: "sensor-data-scala-snippets" - path: "snippets/modules/ROOT/examples/sensor-data-scala" - description: | - Copy of sensor-data-scala to be able to include the files in the documentation - - id: "akkastreams-scala-snippets" - path: "snippets/modules/ROOT/examples/akkastreams-scala" - description: | - Set of files containing the Scala Akka streams code snippets used in the documentation - - id: "akkastreams-java-snippets" - path: "snippets/modules/ROOT/examples/akkastreams-java" - description: | - Set of files containing the Java Akka streams code snippets used in the documentation - - id: "akkastreams-grpc-scala" - path: "snippets/modules/ROOT/examples/akkastreams-grpc-scala" - description: | - Set of files containing the Scala Akka streams code snippets used in the documentation for providing gRPC services - - id: "spark-scala-snippets" - path: "snippets/modules/ROOT/examples/spark-scala" - description: | - Set of files containing the Scala Spark code snippets used in the documentation - - id: "build-akka-streamlets-java-snippets" - path: "snippets/modules/ROOT/examples/build-akka-streamlets-java" - description: | - Set of files containing the Java code snippets used in the build-akka-streamlets documentation - - id: "build-akka-streamlets-scala-snippets" - path: "snippets/modules/ROOT/examples/build-akka-streamlets-scala" - description: | - Set of files containing the Scala code snippets used in the build-akka-streamlets documentation - - id: "build-flink-streamlets-java-snippets" - path: "snippets/modules/ROOT/examples/build-flink-streamlets-java" - description: | - Set of files containing the Java code snippets used in the build-flink-streamlets documentation - - id: "build-flink-streamlets-scala-snippets" - path: "snippets/modules/ROOT/examples/build-flink-streamlets-scala" - description: | - Set of files containing the Scala code snippets used in the build-flink-streamlets documentation - - id: "build-spark-streamlets-scala-snippets" - path: "snippets/modules/ROOT/examples/build-spark-streamlets-scala" - description: | - Set of files containing the Scala code snippets used in the build-spark-streamlets documentation - - id: "template-scala" - path: "templates/single-backend-scala" - description: | - A getting started template containing a minimal Scala project that can be used as a starting point - - id: "template-java" - path: "templates/single-backend-java" - description: | - A getting started template containing a minimal Java project that can be used as a starting point diff --git a/examples/mvn-call-record-aggregator/akka-cdr-ingestor/pom.xml b/examples/mvn-call-record-aggregator/akka-cdr-ingestor/pom.xml index d21789f34..f89ff4076 100644 --- a/examples/mvn-call-record-aggregator/akka-cdr-ingestor/pom.xml +++ b/examples/mvn-call-record-aggregator/akka-cdr-ingestor/pom.xml @@ -127,7 +127,7 @@ net.alchim31.maven scala-maven-plugin - 4.5.1 + 4.5.3 add-generated-sources diff --git a/examples/mvn-call-record-aggregator/akka-java-aggregation-output/pom.xml b/examples/mvn-call-record-aggregator/akka-java-aggregation-output/pom.xml index 7b33b781b..bb80b0e71 100644 --- a/examples/mvn-call-record-aggregator/akka-java-aggregation-output/pom.xml +++ b/examples/mvn-call-record-aggregator/akka-java-aggregation-output/pom.xml @@ -127,7 +127,7 @@ net.alchim31.maven scala-maven-plugin - 4.5.1 + 4.5.3 add-generated-sources diff --git a/examples/mvn-call-record-aggregator/spark-aggregation/pom.xml b/examples/mvn-call-record-aggregator/spark-aggregation/pom.xml index faa3a4594..fce53a508 100644 --- a/examples/mvn-call-record-aggregator/spark-aggregation/pom.xml +++ b/examples/mvn-call-record-aggregator/spark-aggregation/pom.xml @@ -176,7 +176,7 @@ net.alchim31.maven scala-maven-plugin - 4.5.1 + 4.5.3 add-generated-sources diff --git a/examples/mvn-hello-world/hello-world/pom.xml b/examples/mvn-hello-world/hello-world/pom.xml index 7fda3ccf2..0ba2edf88 100644 --- a/examples/mvn-hello-world/hello-world/pom.xml +++ b/examples/mvn-hello-world/hello-world/pom.xml @@ -129,17 +129,17 @@ com.lightbend.cloudflow - cloudflow-akka_2.12 + cloudflow-akka_${scala.binary.version} ${cloudflow.version} com.lightbend.cloudflow - cloudflow-runner_2.12 + cloudflow-runner_${scala.binary.version} ${cloudflow.version} com.lightbend.cloudflow - cloudflow-localrunner_2.12 + cloudflow-localrunner_${scala.binary.version} ${cloudflow.version} diff --git a/examples/mvn-hello-world/pom.xml b/examples/mvn-hello-world/pom.xml index 1fc865390..ff4c16079 100644 --- a/examples/mvn-hello-world/pom.xml +++ b/examples/mvn-hello-world/pom.xml @@ -1,5 +1,5 @@ + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> 4.0.0 com.lightbend.cloudflow @@ -7,13 +7,15 @@ 0.1.0-SNAPSHOT pom + mvn-hello-world http://cloudflow.io - 0.36.0 UTF-8 UTF-8 2.12.12 + 2.12 + 0.36.0 ${env.CLOUDFLOW_VERSION} ${env.CLOUDFLOW_VERSION} diff --git a/examples/scripts/build-all.sh b/examples/scripts/build-all.sh deleted file mode 100755 index adc0d7cf1..000000000 --- a/examples/scripts/build-all.sh +++ /dev/null @@ -1,38 +0,0 @@ -#!/usr/bin/env bash - -# Copyright (C) 2016-2021 Lightbend Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd -P )" -TARGET=$1 - -echo "=========================================================================" -echo "Runs 'sbt $TARGET' for examples" -echo "=========================================================================" - -# Obtain current project list from examples.yaml -PROJECTS=$(cat $DIR/../examples.yaml | grep "path" | cut -d\" -f2) - -for prj in $PROJECTS; do - echo "=========================================================================" - echo "${TARGET}: $prj" - echo "=========================================================================" - - cd $DIR/../$prj - sbt $TARGET - RETVAL=$? - [ $RETVAL -ne 0 ] && echo "Failure in project $prj" && exit -1 - cd ../.. -done diff --git a/examples/snippets/modules/ROOT/examples/akkastreams-grpc-java/project/cloudflow-plugins.sbt b/examples/snippets/modules/ROOT/examples/akkastreams-grpc-java/project/cloudflow-plugins.sbt index b42b6e58b..76a5428cc 100644 --- a/examples/snippets/modules/ROOT/examples/akkastreams-grpc-java/project/cloudflow-plugins.sbt +++ b/examples/snippets/modules/ROOT/examples/akkastreams-grpc-java/project/cloudflow-plugins.sbt @@ -1,6 +1,6 @@ // Needs cloudflow from https://github.com/lightbend/cloudflow/pull/642 val latestVersion = { - sys.env.get("CLOUDFLOW_VERSION").fold( + sys.env.get("CLOUDFLOW_VERSION").filter(_.nonEmpty).fold( sbtdynver.DynVer(None, "-", "v") .getGitDescribeOutput(new java.util.Date()) .fold(throw new Exception("Failed to retrieve version"))(_.version("-")) diff --git a/examples/snippets/modules/ROOT/examples/akkastreams-grpc-java/src/main/java/sensordata/Logger.java b/examples/snippets/modules/ROOT/examples/akkastreams-grpc-java/src/main/java/sensordata/Logger.java index 26e3c33c6..17f66f95f 100644 --- a/examples/snippets/modules/ROOT/examples/akkastreams-grpc-java/src/main/java/sensordata/Logger.java +++ b/examples/snippets/modules/ROOT/examples/akkastreams-grpc-java/src/main/java/sensordata/Logger.java @@ -5,6 +5,7 @@ import cloudflow.akkastream.AkkaStreamlet; import cloudflow.akkastream.AkkaStreamletLogic; import cloudflow.akkastream.javadsl.RunnableGraphStreamletLogic; +import cloudflow.streamlets.CodecInlet; import cloudflow.streamlets.StreamletShape; import cloudflow.streamlets.proto.javadsl.ProtoInlet; diff --git a/examples/snippets/modules/ROOT/examples/akkastreams-grpc-java/src/main/java/sensordata/SensorDataIngress.java b/examples/snippets/modules/ROOT/examples/akkastreams-grpc-java/src/main/java/sensordata/SensorDataIngress.java index 1db07bdb1..e5a1fdfd4 100644 --- a/examples/snippets/modules/ROOT/examples/akkastreams-grpc-java/src/main/java/sensordata/SensorDataIngress.java +++ b/examples/snippets/modules/ROOT/examples/akkastreams-grpc-java/src/main/java/sensordata/SensorDataIngress.java @@ -5,7 +5,7 @@ //end::logic[] import akka.http.javadsl.model.HttpRequest; import akka.http.javadsl.model.HttpResponse; -import akka.japi.Function; +import akka.japi.function.Function; import cloudflow.akkastream.*; //tag::logic[] @@ -48,4 +48,4 @@ public List>> handlers() { }; } } -//end::logic[] \ No newline at end of file +//end::logic[] diff --git a/examples/snippets/modules/ROOT/examples/akkastreams-grpc-scala/project/cloudflow-plugins.sbt b/examples/snippets/modules/ROOT/examples/akkastreams-grpc-scala/project/cloudflow-plugins.sbt index b42b6e58b..76a5428cc 100644 --- a/examples/snippets/modules/ROOT/examples/akkastreams-grpc-scala/project/cloudflow-plugins.sbt +++ b/examples/snippets/modules/ROOT/examples/akkastreams-grpc-scala/project/cloudflow-plugins.sbt @@ -1,6 +1,6 @@ // Needs cloudflow from https://github.com/lightbend/cloudflow/pull/642 val latestVersion = { - sys.env.get("CLOUDFLOW_VERSION").fold( + sys.env.get("CLOUDFLOW_VERSION").filter(_.nonEmpty).fold( sbtdynver.DynVer(None, "-", "v") .getGitDescribeOutput(new java.util.Date()) .fold(throw new Exception("Failed to retrieve version"))(_.version("-")) diff --git a/examples/snippets/modules/ROOT/examples/akkastreams-java/project/cloudflow-plugins.sbt b/examples/snippets/modules/ROOT/examples/akkastreams-java/project/cloudflow-plugins.sbt index 11d649007..680a1df07 100644 --- a/examples/snippets/modules/ROOT/examples/akkastreams-java/project/cloudflow-plugins.sbt +++ b/examples/snippets/modules/ROOT/examples/akkastreams-java/project/cloudflow-plugins.sbt @@ -1,5 +1,5 @@ val latestVersion = { - sys.env.get("CLOUDFLOW_VERSION").fold( + sys.env.get("CLOUDFLOW_VERSION").filter(_.nonEmpty).fold( sbtdynver.DynVer(None, "-", "v") .getGitDescribeOutput(new java.util.Date()) .fold(throw new Exception("Failed to retrieve version"))(_.version("-")) diff --git a/examples/snippets/modules/ROOT/examples/akkastreams-scala/project/cloudflow-plugins.sbt b/examples/snippets/modules/ROOT/examples/akkastreams-scala/project/cloudflow-plugins.sbt index 0444ce6fe..71b5daaeb 100644 --- a/examples/snippets/modules/ROOT/examples/akkastreams-scala/project/cloudflow-plugins.sbt +++ b/examples/snippets/modules/ROOT/examples/akkastreams-scala/project/cloudflow-plugins.sbt @@ -1,5 +1,5 @@ val latestVersion = { - sys.env.get("CLOUDFLOW_VERSION").fold( + sys.env.get("CLOUDFLOW_VERSION").filter(_.nonEmpty).fold( sbtdynver.DynVer(None, "-", "v") .getGitDescribeOutput(new java.util.Date()) .fold(throw new Exception("Failed to retrieve version"))(_.version("-")) diff --git a/examples/snippets/modules/ROOT/examples/akkastreams-scala/src/main/scala/cloudflow/akkastreamsdoc/DataFileIngress.scala b/examples/snippets/modules/ROOT/examples/akkastreams-scala/src/main/scala/cloudflow/akkastreamsdoc/DataFileIngress.scala index fc571b1a6..15876a674 100644 --- a/examples/snippets/modules/ROOT/examples/akkastreams-scala/src/main/scala/cloudflow/akkastreamsdoc/DataFileIngress.scala +++ b/examples/snippets/modules/ROOT/examples/akkastreams-scala/src/main/scala/cloudflow/akkastreamsdoc/DataFileIngress.scala @@ -53,13 +53,13 @@ class DataFileIngress extends AkkaStreamlet { // *) Note that reading and deserializing the file content is done in separate steps for readability only, in production they should be merged into one step for performance reasons. override def createLogic = new RunnableGraphStreamletLogic() { - val listFiles: NotUsed ⇒ Source[file.Path, NotUsed] = { _ ⇒ + val listFiles: NotUsed => Source[file.Path, NotUsed] = { _ => Directory.ls(getMountedPath(sourceData)) } - val readFile: Path ⇒ Source[ByteString, Future[IOResult]] = { path: Path ⇒ + val readFile: Path => Source[ByteString, Future[IOResult]] = { path: Path => FileIO.fromPath(path).via(JsonFraming.objectScanner(Int.MaxValue)) } - val parseFile: ByteString ⇒ Data = { jsonByteString ⇒ + val parseFile: ByteString => Data = { jsonByteString => JsonParser(jsonByteString.utf8String).convertTo[Data] } diff --git a/examples/snippets/modules/ROOT/examples/akkastreams-scala/src/main/scala/cloudflow/akkastreamsdoc/DataHttpIngressCustomRoute.scala b/examples/snippets/modules/ROOT/examples/akkastreams-scala/src/main/scala/cloudflow/akkastreamsdoc/DataHttpIngressCustomRoute.scala index 89b7773ef..46449f643 100644 --- a/examples/snippets/modules/ROOT/examples/akkastreams-scala/src/main/scala/cloudflow/akkastreamsdoc/DataHttpIngressCustomRoute.scala +++ b/examples/snippets/modules/ROOT/examples/akkastreams-scala/src/main/scala/cloudflow/akkastreamsdoc/DataHttpIngressCustomRoute.scala @@ -21,8 +21,8 @@ class DataHttpIngressCustomRoute extends AkkaServerStreamlet { val writer = sinkRef(out) override def route(): Route = put { - entity(as[Data]) { data ⇒ - onSuccess(writer.write(data)) { _ ⇒ + entity(as[Data]) { data => + onSuccess(writer.write(data)) { _ => complete(StatusCodes.OK) } } diff --git a/examples/snippets/modules/ROOT/examples/akkastreams-scala/src/main/scala/cloudflow/akkastreamsdoc/DataMerge.scala b/examples/snippets/modules/ROOT/examples/akkastreams-scala/src/main/scala/cloudflow/akkastreamsdoc/DataMerge.scala index c2ba69375..e9c01caf6 100644 --- a/examples/snippets/modules/ROOT/examples/akkastreams-scala/src/main/scala/cloudflow/akkastreamsdoc/DataMerge.scala +++ b/examples/snippets/modules/ROOT/examples/akkastreams-scala/src/main/scala/cloudflow/akkastreamsdoc/DataMerge.scala @@ -11,7 +11,7 @@ class DataMerge extends AkkaStreamlet { val in0 = AvroInlet[Data]("in-0") val in1 = AvroInlet[Data]("in-1") - val out = AvroOutlet[Data]("out", d ⇒ d.key) + val out = AvroOutlet[Data]("out", d => d.key) final override val shape = StreamletShape.withInlets(in0, in1).withOutlets(out) diff --git a/examples/snippets/modules/ROOT/examples/akkastreams-scala/src/main/scala/cloudflow/akkastreamsdoc/DataSplitter.scala b/examples/snippets/modules/ROOT/examples/akkastreams-scala/src/main/scala/cloudflow/akkastreamsdoc/DataSplitter.scala index 4e340975c..7b1b5c711 100644 --- a/examples/snippets/modules/ROOT/examples/akkastreams-scala/src/main/scala/cloudflow/akkastreamsdoc/DataSplitter.scala +++ b/examples/snippets/modules/ROOT/examples/akkastreams-scala/src/main/scala/cloudflow/akkastreamsdoc/DataSplitter.scala @@ -10,7 +10,7 @@ import cloudflow.streamlets.avro._ class DataSplitter extends AkkaStreamlet { val in = AvroInlet[Data]("in") - val invalid = AvroOutlet[DataInvalid]("invalid").withPartitioner(data ⇒ data.key) + val invalid = AvroOutlet[DataInvalid]("invalid").withPartitioner(data => data.key) val valid = AvroOutlet[Data]("valid").withPartitioner(RoundRobinPartitioner) val shape = StreamletShape(in).withOutlets(invalid, valid) @@ -18,7 +18,7 @@ class DataSplitter extends AkkaStreamlet { def runnableGraph = sourceWithCommittableContext(in).to(Splitter.sink(flow, invalid, valid)) def flow = FlowWithCommittableContext[Data] - .map { data ⇒ + .map { data => if (data.value < 0) Left(DataInvalid(data.key, data.value, "All data must be positive numbers!")) else Right(data) } diff --git a/examples/snippets/modules/ROOT/examples/akkastreams-scala/src/main/scala/com/example/MetricsEgress.scala b/examples/snippets/modules/ROOT/examples/akkastreams-scala/src/main/scala/com/example/MetricsEgress.scala index 24a8deddd..18ca64411 100644 --- a/examples/snippets/modules/ROOT/examples/akkastreams-scala/src/main/scala/com/example/MetricsEgress.scala +++ b/examples/snippets/modules/ROOT/examples/akkastreams-scala/src/main/scala/com/example/MetricsEgress.scala @@ -14,7 +14,7 @@ object MetricsEgress extends AkkaStreamlet { final override def createLogic = new RunnableGraphStreamletLogic { override final def runnableGraph = sourceWithCommittableContext(in) - .map { i ⇒ + .map { i => println(s"Int: ${i.value}"); i } .to(committableSink(defaultCommitterSettings)) diff --git a/examples/snippets/modules/ROOT/examples/akkastreams-scala/src/main/scala/com/example/MetricsValidation.scala b/examples/snippets/modules/ROOT/examples/akkastreams-scala/src/main/scala/com/example/MetricsValidation.scala index 99fd318fb..734813fa7 100644 --- a/examples/snippets/modules/ROOT/examples/akkastreams-scala/src/main/scala/com/example/MetricsValidation.scala +++ b/examples/snippets/modules/ROOT/examples/akkastreams-scala/src/main/scala/com/example/MetricsValidation.scala @@ -1,23 +1,21 @@ package com.example +import akka.stream.scaladsl.RunnableGraph import cloudflow.streamlets._ import cloudflow.streamlets.avro._ import cloudflow.akkastream._ import cloudflow.akkastream.scaladsl._ - import cloudflow.akkastreamsdoc._ object MetricsValidation extends AkkaStreamlet { - val in = AvroInlet[Data]("in") - val out = AvroOutlet[Data]("out-0") - final override val shape = StreamletShape.withInlets(in).withOutlets(out) + val in: CodecInlet[Data] = AvroInlet[Data]("in") + val out: CodecOutlet[Data] = AvroOutlet[Data]("out-0") + override val shape: StreamletShape = StreamletShape.withInlets(in).withOutlets(out) - final override def createLogic = new RunnableGraphStreamletLogic { - override final def runnableGraph = - sourceWithOffsetContext(in) - .map { i ⇒ - i - } + override def createLogic(): AkkaStreamletLogic = new RunnableGraphStreamletLogic { + override final def runnableGraph(): RunnableGraph[_] = + sourceWithCommittableContext(in) + .map(i => i) .to(committableSink(out)) } } diff --git a/examples/snippets/modules/ROOT/examples/akkastreams-scala/src/main/scala/com/example/SensorDataToMetrics.scala b/examples/snippets/modules/ROOT/examples/akkastreams-scala/src/main/scala/com/example/SensorDataToMetrics.scala index 0dfe2bc84..88a4b6851 100644 --- a/examples/snippets/modules/ROOT/examples/akkastreams-scala/src/main/scala/com/example/SensorDataToMetrics.scala +++ b/examples/snippets/modules/ROOT/examples/akkastreams-scala/src/main/scala/com/example/SensorDataToMetrics.scala @@ -1,23 +1,21 @@ package com.example +import akka.stream.scaladsl.RunnableGraph import cloudflow.streamlets._ import cloudflow.streamlets.avro._ import cloudflow.akkastream._ import cloudflow.akkastream.scaladsl._ - import cloudflow.akkastreamsdoc._ object SensorDataToMetrics extends AkkaStreamlet { - val in = AvroInlet[Data]("in") - val out = AvroOutlet[Data]("out") - final override val shape = StreamletShape.withInlets(in).withOutlets(out) + val in: CodecInlet[Data] = AvroInlet[Data]("in") + val out: CodecOutlet[Data] = AvroOutlet[Data]("out") + override val shape: StreamletShape = StreamletShape.withInlets(in).withOutlets(out) - final override def createLogic = new RunnableGraphStreamletLogic { - override final def runnableGraph = - sourceWithOffsetContext(in) - .map { i ⇒ - i - } + override def createLogic(): AkkaStreamletLogic = new RunnableGraphStreamletLogic { + override final def runnableGraph: RunnableGraph[_] = + sourceWithCommittableContext(in) + .map(i => i) .to(committableSink(out)) } } diff --git a/examples/snippets/modules/ROOT/examples/akkastreams-scala/src/test/scala/com/example/SampleSpec.scala b/examples/snippets/modules/ROOT/examples/akkastreams-scala/src/test/scala/com/example/SampleSpec.scala index dc615a9d6..411c6ee0f 100644 --- a/examples/snippets/modules/ROOT/examples/akkastreams-scala/src/test/scala/com/example/SampleSpec.scala +++ b/examples/snippets/modules/ROOT/examples/akkastreams-scala/src/test/scala/com/example/SampleSpec.scala @@ -1,20 +1,17 @@ package com.example import akka.actor._ -import akka.stream._ import akka.testkit._ import org.scalatest._ import org.scalatest.wordspec._ import org.scalatest.matchers.must._ -import cloudflow.akkastream.testkit._ import cloudflow.akkastream.testkit.scaladsl._ import cloudflow.akkastreamsdoc.RecordSumFlow class SampleSpec extends AnyWordSpec with Matchers with BeforeAndAfterAll { private implicit val system = ActorSystem("AkkaStreamletSpec") - private implicit val mat = ActorMaterializer() override def afterAll: Unit = TestKit.shutdownActorSystem(system) diff --git a/examples/snippets/modules/ROOT/examples/base-images-override/.scalafmt.conf b/examples/snippets/modules/ROOT/examples/base-images-override/.scalafmt.conf new file mode 100644 index 000000000..66c627ef0 --- /dev/null +++ b/examples/snippets/modules/ROOT/examples/base-images-override/.scalafmt.conf @@ -0,0 +1,44 @@ +version = "2.3.2" + +style = defaultWithAlign + +align = more +align.openParenCallSite = true +align.openParenDefnSite = true +rewrite.rules = [AvoidInfix,RedundantBraces,RedundantParens,SortImports] +danglingParentheses = true +docstrings = JavaDoc +maxColumn = 140 +unindentTopLevelOperators = true +rewrite.neverInfix.excludeFilters = [ + and + min + max + until + to + by + eq + ne + "should.*" + "contain.*" + "must.*" + in + ignore + be + taggedAs + thrownBy + synchronized + have + when + size + only + noneOf + oneElementOf + noElementsOf + atLeastOneElementOf + atMostOneElementOf + allElementsOf + inOrderElementsOf + theSameElementsAs +] +spaces.inImportCurlyBraces = true diff --git a/examples/snippets/modules/ROOT/examples/base-images-override/build.sbt b/examples/snippets/modules/ROOT/examples/base-images-override/build.sbt index 92f17be50..d8d3dd9da 100644 --- a/examples/snippets/modules/ROOT/examples/base-images-override/build.sbt +++ b/examples/snippets/modules/ROOT/examples/base-images-override/build.sbt @@ -3,8 +3,9 @@ import sbt.Keys._ //tag::docs-projectSetup-example[] lazy val sampleApp = (project in file(".")) + .enablePlugins(CloudflowApplicationPlugin) .settings( - cloudflowAkkaBaseImage := "myRepositoryUrl/myRepositoryPath:2.0.10-cloudflow-akka-2.6.6-scala-2.12", + cloudflowDockerBaseImage := "myRepositoryUrl/myRepositoryPath:2.0.10-cloudflow-akka-2.6.6-scala-2.12", //end::docs-projectSetup-example[] name := "sample-app", organization := "com.lightbend.cloudflow", diff --git a/examples/snippets/modules/ROOT/examples/base-images-override/project/build.properties b/examples/snippets/modules/ROOT/examples/base-images-override/project/build.properties new file mode 100644 index 000000000..efae80e9a --- /dev/null +++ b/examples/snippets/modules/ROOT/examples/base-images-override/project/build.properties @@ -0,0 +1 @@ +sbt.version = 1.4.4 \ No newline at end of file diff --git a/examples/snippets/modules/ROOT/examples/base-images-override/project/cloudflow-plugins.sbt b/examples/snippets/modules/ROOT/examples/base-images-override/project/cloudflow-plugins.sbt new file mode 100644 index 000000000..36ce4cc23 --- /dev/null +++ b/examples/snippets/modules/ROOT/examples/base-images-override/project/cloudflow-plugins.sbt @@ -0,0 +1,10 @@ +// Needs cloudflow from https://github.com/lightbend/cloudflow/pull/642 +val latestVersion = { + sys.env.get("CLOUDFLOW_VERSION").filter(_.nonEmpty).fold( + sbtdynver.DynVer(None, "-", "v") + .getGitDescribeOutput(new java.util.Date()) + .fold(throw new Exception("Failed to retrieve version"))(_.version("-")) + )(identity) +} + +addSbtPlugin("com.lightbend.cloudflow" % "sbt-cloudflow" % latestVersion) \ No newline at end of file diff --git a/examples/snippets/modules/ROOT/examples/base-images-override/project/plugins.sbt b/examples/snippets/modules/ROOT/examples/base-images-override/project/plugins.sbt new file mode 100644 index 000000000..0a46db08b --- /dev/null +++ b/examples/snippets/modules/ROOT/examples/base-images-override/project/plugins.sbt @@ -0,0 +1 @@ +addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.2.1") diff --git a/examples/snippets/modules/ROOT/examples/base-images-override/project/project/plugins.sbt b/examples/snippets/modules/ROOT/examples/base-images-override/project/project/plugins.sbt new file mode 100644 index 000000000..7ff034b80 --- /dev/null +++ b/examples/snippets/modules/ROOT/examples/base-images-override/project/project/plugins.sbt @@ -0,0 +1,2 @@ +addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.2.1") +addSbtPlugin("com.dwijnand" % "sbt-dynver" % "4.1.1") \ No newline at end of file diff --git a/examples/snippets/modules/ROOT/examples/build-akka-streamlets-java/project/cloudflow-plugins.sbt b/examples/snippets/modules/ROOT/examples/build-akka-streamlets-java/project/cloudflow-plugins.sbt index 11d649007..680a1df07 100644 --- a/examples/snippets/modules/ROOT/examples/build-akka-streamlets-java/project/cloudflow-plugins.sbt +++ b/examples/snippets/modules/ROOT/examples/build-akka-streamlets-java/project/cloudflow-plugins.sbt @@ -1,5 +1,5 @@ val latestVersion = { - sys.env.get("CLOUDFLOW_VERSION").fold( + sys.env.get("CLOUDFLOW_VERSION").filter(_.nonEmpty).fold( sbtdynver.DynVer(None, "-", "v") .getGitDescribeOutput(new java.util.Date()) .fold(throw new Exception("Failed to retrieve version"))(_.version("-")) diff --git a/examples/snippets/modules/ROOT/examples/build-akka-streamlets-scala/project/cloudflow-plugins.sbt b/examples/snippets/modules/ROOT/examples/build-akka-streamlets-scala/project/cloudflow-plugins.sbt index 11d649007..680a1df07 100644 --- a/examples/snippets/modules/ROOT/examples/build-akka-streamlets-scala/project/cloudflow-plugins.sbt +++ b/examples/snippets/modules/ROOT/examples/build-akka-streamlets-scala/project/cloudflow-plugins.sbt @@ -1,5 +1,5 @@ val latestVersion = { - sys.env.get("CLOUDFLOW_VERSION").fold( + sys.env.get("CLOUDFLOW_VERSION").filter(_.nonEmpty).fold( sbtdynver.DynVer(None, "-", "v") .getGitDescribeOutput(new java.util.Date()) .fold(throw new Exception("Failed to retrieve version"))(_.version("-")) diff --git a/examples/snippets/modules/ROOT/examples/build-akka-streamlets-scala/step0/src/main/scala/com/example/ReportPrinter.scala b/examples/snippets/modules/ROOT/examples/build-akka-streamlets-scala/step0/src/main/scala/com/example/ReportPrinter.scala index 83464af08..51d4c8474 100644 --- a/examples/snippets/modules/ROOT/examples/build-akka-streamlets-scala/step0/src/main/scala/com/example/ReportPrinter.scala +++ b/examples/snippets/modules/ROOT/examples/build-akka-streamlets-scala/step0/src/main/scala/com/example/ReportPrinter.scala @@ -1,19 +1,13 @@ package com.example -import akka.stream.scaladsl.Sink - -import cloudflow.streamlets._ -import cloudflow.streamlets.avro._ -import cloudflow.streamlets.StreamletShape - import cloudflow.akkastream._ -import cloudflow.akkastream.scaladsl._ +import cloudflow.streamlets.StreamletShape //TODO rename to ReportPrinter object ReportPrinterStep0 extends AkkaStreamlet { // 1. TODO Create inlets and outlets // 2. TODO Define the shape of the streamlet - val shape = StreamletShape.empty + override val shape: StreamletShape = StreamletShape.empty // 3. TODO Override createLogic to provide StreamletLogic - def createLogic = new AkkaStreamletLogic() { def run = () } + override def createLogic: AkkaStreamletLogic = new AkkaStreamletLogic() { override def run: Unit = () } } diff --git a/examples/snippets/modules/ROOT/examples/build-akka-streamlets-scala/step1/src/main/scala/com/example/ReportPrinter.scala b/examples/snippets/modules/ROOT/examples/build-akka-streamlets-scala/step1/src/main/scala/com/example/ReportPrinter.scala index 9b0af293f..be6521278 100644 --- a/examples/snippets/modules/ROOT/examples/build-akka-streamlets-scala/step1/src/main/scala/com/example/ReportPrinter.scala +++ b/examples/snippets/modules/ROOT/examples/build-akka-streamlets-scala/step1/src/main/scala/com/example/ReportPrinter.scala @@ -1,21 +1,18 @@ package com.example -import akka.stream.scaladsl.Sink - -import cloudflow.streamlets._ import cloudflow.streamlets.avro._ import cloudflow.streamlets.StreamletShape import cloudflow.akkastream._ -import cloudflow.akkastream.scaladsl._ +import cloudflow.streamlets.Inlet //TODO rename to ReportPrinter object ReportPrinterStep1 extends AkkaStreamlet { // 1. Create inlets and outlets - val inlet = AvroInlet[Report]("report-in") + val inlet: Inlet = AvroInlet[Report]("report-in") // 2. TODO Define the shape of the streamlet - val shape = StreamletShape.empty + override val shape: StreamletShape = StreamletShape.empty // 3. TODO Override createLogic to provide StreamletLogic - def createLogic = new AkkaStreamletLogic() { def run = () } + override def createLogic: AkkaStreamletLogic = new AkkaStreamletLogic() { override def run: Unit = () } } diff --git a/examples/snippets/modules/ROOT/examples/build-akka-streamlets-scala/step2/src/main/scala/com/example/ReportPrinter.scala b/examples/snippets/modules/ROOT/examples/build-akka-streamlets-scala/step2/src/main/scala/com/example/ReportPrinter.scala index 372ace818..ef82bf824 100644 --- a/examples/snippets/modules/ROOT/examples/build-akka-streamlets-scala/step2/src/main/scala/com/example/ReportPrinter.scala +++ b/examples/snippets/modules/ROOT/examples/build-akka-streamlets-scala/step2/src/main/scala/com/example/ReportPrinter.scala @@ -1,22 +1,19 @@ package com.example -import akka.stream.scaladsl.Sink - -import cloudflow.streamlets._ import cloudflow.streamlets.avro._ import cloudflow.streamlets.StreamletShape import cloudflow.akkastream._ -import cloudflow.akkastream.scaladsl._ +import cloudflow.streamlets.Inlet //TODO rename to ReportPrinter object ReportPrinterStep2 extends AkkaStreamlet { // 1. Create inlets and outlets - val inlet = AvroInlet[Report]("report-in") + val inlet: Inlet = AvroInlet[Report]("report-in") // 2. Define the shape of the streamlet - val shape = StreamletShape.withInlets(inlet) + override val shape: StreamletShape = StreamletShape.withInlets(inlet) // 3. TODO Override createLogic to provide StreamletLogic - def createLogic = new AkkaStreamletLogic() { def run = () } + override def createLogic: AkkaStreamletLogic = new AkkaStreamletLogic() { override def run: Unit = () } } diff --git a/examples/snippets/modules/ROOT/examples/build-akka-streamlets-scala/step3/src/test/scala/com/example/TestProcessorSpec.scala b/examples/snippets/modules/ROOT/examples/build-akka-streamlets-scala/step3/src/test/scala/com/example/TestProcessorSpec.scala index f749f2bab..e500eaff0 100644 --- a/examples/snippets/modules/ROOT/examples/build-akka-streamlets-scala/step3/src/test/scala/com/example/TestProcessorSpec.scala +++ b/examples/snippets/modules/ROOT/examples/build-akka-streamlets-scala/step3/src/test/scala/com/example/TestProcessorSpec.scala @@ -2,20 +2,13 @@ package com.example //tag::imports[] import akka.actor._ -import akka.stream._ import akka.stream.scaladsl._ import akka.testkit._ import org.scalatest._ import org.scalatest.wordspec._ import org.scalatest.matchers.must._ -import org.scalatest.concurrent._ -import cloudflow.streamlets._ -import cloudflow.streamlets.avro._ -import cloudflow.akkastream._ -import cloudflow.akkastream.scaladsl._ -import cloudflow.akkastream.testkit._ import cloudflow.akkastream.testkit.scaladsl._ //end::imports[] @@ -23,7 +16,6 @@ import cloudflow.akkastream.testkit.scaladsl._ class TestProcessorSpec extends AnyWordSpec with Matchers with BeforeAndAfterAll { private implicit val system = ActorSystem("AkkaStreamletSpec") - private implicit val mat = ActorMaterializer() //tag::afterAll[] override def afterAll: Unit = diff --git a/examples/snippets/modules/ROOT/examples/build-flink-streamlets-java/project/cloudflow-plugins.sbt b/examples/snippets/modules/ROOT/examples/build-flink-streamlets-java/project/cloudflow-plugins.sbt index 11d649007..680a1df07 100644 --- a/examples/snippets/modules/ROOT/examples/build-flink-streamlets-java/project/cloudflow-plugins.sbt +++ b/examples/snippets/modules/ROOT/examples/build-flink-streamlets-java/project/cloudflow-plugins.sbt @@ -1,5 +1,5 @@ val latestVersion = { - sys.env.get("CLOUDFLOW_VERSION").fold( + sys.env.get("CLOUDFLOW_VERSION").filter(_.nonEmpty).fold( sbtdynver.DynVer(None, "-", "v") .getGitDescribeOutput(new java.util.Date()) .fold(throw new Exception("Failed to retrieve version"))(_.version("-")) diff --git a/examples/snippets/modules/ROOT/examples/build-flink-streamlets-scala/project/cloudflow-plugins.sbt b/examples/snippets/modules/ROOT/examples/build-flink-streamlets-scala/project/cloudflow-plugins.sbt index 11d649007..680a1df07 100644 --- a/examples/snippets/modules/ROOT/examples/build-flink-streamlets-scala/project/cloudflow-plugins.sbt +++ b/examples/snippets/modules/ROOT/examples/build-flink-streamlets-scala/project/cloudflow-plugins.sbt @@ -1,5 +1,5 @@ val latestVersion = { - sys.env.get("CLOUDFLOW_VERSION").fold( + sys.env.get("CLOUDFLOW_VERSION").filter(_.nonEmpty).fold( sbtdynver.DynVer(None, "-", "v") .getGitDescribeOutput(new java.util.Date()) .fold(throw new Exception("Failed to retrieve version"))(_.version("-")) diff --git a/examples/snippets/modules/ROOT/examples/build-flink-streamlets-scala/step3/src/test/scala/com/example/FlinkProcessorSpec.scala b/examples/snippets/modules/ROOT/examples/build-flink-streamlets-scala/step3/src/test/scala/com/example/FlinkProcessorSpec.scala index 23f93b848..979006217 100644 --- a/examples/snippets/modules/ROOT/examples/build-flink-streamlets-scala/step3/src/test/scala/com/example/FlinkProcessorSpec.scala +++ b/examples/snippets/modules/ROOT/examples/build-flink-streamlets-scala/step3/src/test/scala/com/example/FlinkProcessorSpec.scala @@ -11,15 +11,12 @@ import org.scalatest._ //tag::test[] // 1. Extend from the abstract class FlinkTestkit -class FlinkProcessorSpec extends FlinkTestkit - with WordSpecLike - with Matchers - with BeforeAndAfterAll { +class FlinkProcessorSpec extends FlinkTestkit with WordSpecLike with Matchers with BeforeAndAfterAll { "FlinkProcessor" should { "process streaming data" in { @transient lazy val env = StreamExecutionEnvironment.getExecutionEnvironment - + // 2. Create the FlinkStreamlet to test val processor = new FlinkProcessor @@ -27,9 +24,7 @@ class FlinkProcessorSpec extends FlinkTestkit val data = (1 to 10).map(i ⇒ new Data(i, s"name$i")) // 4. Setup inlet taps that tap the inlet ports of the streamlet - val in: FlinkInletTap[Data] = inletAsTap[Data]( - processor.in, - env.addSource(FlinkSource.CollectionSourceFunction(data))) + val in: FlinkInletTap[Data] = inletAsTap[Data](processor.in, env.addSource(FlinkSource.CollectionSourceFunction(data))) // 5. Setup outlet taps for outlet ports val out: FlinkOutletTap[Data] = outletAsTap[Data](processor.out) diff --git a/examples/snippets/modules/ROOT/examples/build-spark-streamlets-scala/project/cloudflow-plugins.sbt b/examples/snippets/modules/ROOT/examples/build-spark-streamlets-scala/project/cloudflow-plugins.sbt index 11d649007..680a1df07 100644 --- a/examples/snippets/modules/ROOT/examples/build-spark-streamlets-scala/project/cloudflow-plugins.sbt +++ b/examples/snippets/modules/ROOT/examples/build-spark-streamlets-scala/project/cloudflow-plugins.sbt @@ -1,5 +1,5 @@ val latestVersion = { - sys.env.get("CLOUDFLOW_VERSION").fold( + sys.env.get("CLOUDFLOW_VERSION").filter(_.nonEmpty).fold( sbtdynver.DynVer(None, "-", "v") .getGitDescribeOutput(new java.util.Date()) .fold(throw new Exception("Failed to retrieve version"))(_.version("-")) diff --git a/examples/snippets/modules/ROOT/examples/build-spark-streamlets-scala/step3/src/test/scala/com/example/SparkProcessorSpec.scala b/examples/snippets/modules/ROOT/examples/build-spark-streamlets-scala/step3/src/test/scala/com/example/SparkProcessorSpec.scala index b4e1c0ce1..1fb4296ad 100644 --- a/examples/snippets/modules/ROOT/examples/build-spark-streamlets-scala/step3/src/test/scala/com/example/SparkProcessorSpec.scala +++ b/examples/snippets/modules/ROOT/examples/build-spark-streamlets-scala/step3/src/test/scala/com/example/SparkProcessorSpec.scala @@ -39,7 +39,7 @@ class SparkProcessorSpec extends SparkScalaTestSupport { // 1. Extend SparkScala // 8. Assert that actual matches expectation results must contain(Data(2, "name2")) results.size must be(5) - run.totalRows must be (10) + run.totalRows must be(10) } } } diff --git a/examples/snippets/modules/ROOT/examples/sensor-data-scala/project/cloudflow-plugins.sbt b/examples/snippets/modules/ROOT/examples/sensor-data-scala/project/cloudflow-plugins.sbt index d87969192..5597f7d05 100644 --- a/examples/snippets/modules/ROOT/examples/sensor-data-scala/project/cloudflow-plugins.sbt +++ b/examples/snippets/modules/ROOT/examples/sensor-data-scala/project/cloudflow-plugins.sbt @@ -1,5 +1,5 @@ val latestVersion = { - sys.env.get("CLOUDFLOW_VERSION").fold( + sys.env.get("CLOUDFLOW_VERSION").filter(_.nonEmpty).fold( sbtdynver.DynVer(None, "-", "v") .getGitDescribeOutput(new java.util.Date()) .fold(throw new Exception("Failed to retrieve version"))(_.version("-")) diff --git a/examples/snippets/modules/ROOT/examples/sensor-data-scala/src/main/scala/sensordata/InvalidMetricLogger.scala b/examples/snippets/modules/ROOT/examples/sensor-data-scala/src/main/scala/sensordata/InvalidMetricLogger.scala index 5e077446e..626af6184 100644 --- a/examples/snippets/modules/ROOT/examples/sensor-data-scala/src/main/scala/sensordata/InvalidMetricLogger.scala +++ b/examples/snippets/modules/ROOT/examples/sensor-data-scala/src/main/scala/sensordata/InvalidMetricLogger.scala @@ -16,23 +16,24 @@ //tag::code[] package sensordata +import akka.stream.scaladsl._ import cloudflow.akkastream._ import cloudflow.akkastream.scaladsl._ import cloudflow.streamlets._ import cloudflow.streamlets.avro._ class InvalidMetricLogger extends AkkaStreamlet { - val inlet = AvroInlet[InvalidMetric]("in") - val shape = StreamletShape.withInlets(inlet) + val inlet: CodecInlet[InvalidMetric] = AvroInlet[InvalidMetric]("in") + override val shape: StreamletShape = StreamletShape.withInlets(inlet) - override def createLogic = new RunnableGraphStreamletLogic() { - val flow = FlowWithCommittableContext[InvalidMetric] - .map { invalidMetric ⇒ + override def createLogic(): AkkaStreamletLogic = new RunnableGraphStreamletLogic() { + val flow = FlowWithCommittableContext[InvalidMetric]() + .map { invalidMetric => system.log.warning(s"Invalid metric detected! $invalidMetric") invalidMetric } - def runnableGraph = + override def runnableGraph(): RunnableGraph[_] = sourceWithCommittableContext(inlet).via(flow).to(committableSink) } } diff --git a/examples/snippets/modules/ROOT/examples/sensor-data-scala/src/main/scala/sensordata/JsonFormats.scala b/examples/snippets/modules/ROOT/examples/sensor-data-scala/src/main/scala/sensordata/JsonFormats.scala index 11f2cad04..3da13e98b 100644 --- a/examples/snippets/modules/ROOT/examples/sensor-data-scala/src/main/scala/sensordata/JsonFormats.scala +++ b/examples/snippets/modules/ROOT/examples/sensor-data-scala/src/main/scala/sensordata/JsonFormats.scala @@ -28,8 +28,8 @@ trait UUIDJsonSupport extends DefaultJsonProtocol { def write(uuid: UUID) = JsString(uuid.toString) def read(json: JsValue): UUID = json match { - case JsString(uuid) ⇒ Try(UUID.fromString(uuid)).getOrElse(deserializationError(s"Expected valid UUID but got '$uuid'.")) - case other ⇒ deserializationError(s"Expected UUID as JsString, but got: $other") + case JsString(uuid) => Try(UUID.fromString(uuid)).getOrElse(deserializationError(s"Expected valid UUID but got '$uuid'.")) + case other => deserializationError(s"Expected UUID as JsString, but got: $other") } } } @@ -39,8 +39,8 @@ trait InstantJsonSupport extends DefaultJsonProtocol { def write(instant: Instant) = JsNumber(instant.toEpochMilli) def read(json: JsValue): Instant = json match { - case JsNumber(value) ⇒ Instant.ofEpochMilli(value.toLong) - case other ⇒ deserializationError(s"Expected Instant as JsNumber, but got: $other") + case JsNumber(value) => Instant.ofEpochMilli(value.toLong) + case other => deserializationError(s"Expected Instant as JsNumber, but got: $other") } } } diff --git a/examples/snippets/modules/ROOT/examples/sensor-data-scala/src/main/scala/sensordata/MetricsValidation.scala b/examples/snippets/modules/ROOT/examples/sensor-data-scala/src/main/scala/sensordata/MetricsValidation.scala index 2ef585d74..e51600841 100644 --- a/examples/snippets/modules/ROOT/examples/sensor-data-scala/src/main/scala/sensordata/MetricsValidation.scala +++ b/examples/snippets/modules/ROOT/examples/sensor-data-scala/src/main/scala/sensordata/MetricsValidation.scala @@ -16,6 +16,7 @@ //tag::code[] package sensordata +import akka.stream.scaladsl.RunnableGraph import cloudflow.akkastream._ import cloudflow.akkastream.scaladsl._ import cloudflow.akkastream.util.scaladsl._ @@ -23,16 +24,16 @@ import cloudflow.streamlets._ import cloudflow.streamlets.avro._ class MetricsValidation extends AkkaStreamlet { - val in = AvroInlet[Metric]("in") - val invalid = AvroOutlet[InvalidMetric]("invalid").withPartitioner(metric ⇒ metric.metric.deviceId.toString) - val valid = AvroOutlet[Metric]("valid").withPartitioner(RoundRobinPartitioner) - val shape = StreamletShape(in).withOutlets(invalid, valid) + val in: CodecInlet[Metric] = AvroInlet[Metric]("in") + val invalid: CodecOutlet[InvalidMetric] = AvroOutlet[InvalidMetric]("invalid").withPartitioner(metric => metric.metric.deviceId.toString) + val valid: CodecOutlet[Metric] = AvroOutlet[Metric]("valid").withPartitioner(RoundRobinPartitioner) + override val shape: StreamletShape = StreamletShape(in).withOutlets(invalid, valid) - override def createLogic = new RunnableGraphStreamletLogic() { - def runnableGraph = sourceWithCommittableContext(in).to(Splitter.sink(flow, invalid, valid)) + override def createLogic(): AkkaStreamletLogic = new RunnableGraphStreamletLogic() { + override def runnableGraph(): RunnableGraph[_] = sourceWithCommittableContext(in).to(Splitter.sink(flow, invalid, valid)) def flow = - FlowWithCommittableContext[Metric] - .map { metric ⇒ + FlowWithCommittableContext[Metric]() + .map { metric => if (!SensorDataUtils.isValidMetric(metric)) Left(InvalidMetric(metric, "All measurements must be positive numbers!")) else Right(metric) } diff --git a/examples/snippets/modules/ROOT/examples/sensor-data-scala/src/main/scala/sensordata/RotorSpeedFilter.scala b/examples/snippets/modules/ROOT/examples/sensor-data-scala/src/main/scala/sensordata/RotorSpeedFilter.scala index 1354459fa..d8c30d1be 100644 --- a/examples/snippets/modules/ROOT/examples/sensor-data-scala/src/main/scala/sensordata/RotorSpeedFilter.scala +++ b/examples/snippets/modules/ROOT/examples/sensor-data-scala/src/main/scala/sensordata/RotorSpeedFilter.scala @@ -16,18 +16,20 @@ package sensordata +import akka.stream.scaladsl.RunnableGraph import cloudflow.akkastream._ import cloudflow.akkastream.scaladsl._ import cloudflow.streamlets._ import cloudflow.streamlets.avro._ class RotorSpeedFilter extends AkkaStreamlet { - val in = AvroInlet[Metric]("in") - val out = AvroOutlet[Metric]("out").withPartitioner(RoundRobinPartitioner) - val shape = StreamletShape(in, out) + val in: CodecInlet[Metric] = AvroInlet[Metric]("in") + val out: CodecOutlet[Metric] = AvroOutlet[Metric]("out").withPartitioner(RoundRobinPartitioner) + override val shape: StreamletShape = StreamletShape(in, out) - override def createLogic = new RunnableGraphStreamletLogic() { - def runnableGraph = sourceWithCommittableContext(in).via(flow).to(committableSink(out)) - def flow = FlowWithCommittableContext[Metric].filter(_.name == "rotorSpeed") + override def createLogic(): AkkaStreamletLogic = new RunnableGraphStreamletLogic() { + def flow = FlowWithCommittableContext[Metric]().filter(_.name == "rotorSpeed") + + override def runnableGraph(): RunnableGraph[_] = sourceWithCommittableContext(in).via(flow).to(committableSink(out)) } } diff --git a/examples/snippets/modules/ROOT/examples/sensor-data-scala/src/main/scala/sensordata/RotorspeedWindowLogger.scala b/examples/snippets/modules/ROOT/examples/sensor-data-scala/src/main/scala/sensordata/RotorspeedWindowLogger.scala index 670963e8c..a52e3e7f6 100644 --- a/examples/snippets/modules/ROOT/examples/sensor-data-scala/src/main/scala/sensordata/RotorspeedWindowLogger.scala +++ b/examples/snippets/modules/ROOT/examples/sensor-data-scala/src/main/scala/sensordata/RotorspeedWindowLogger.scala @@ -16,21 +16,22 @@ package sensordata +import akka.stream.scaladsl.RunnableGraph import cloudflow.akkastream._ import cloudflow.akkastream.scaladsl._ import cloudflow.streamlets._ import cloudflow.streamlets.avro._ class RotorspeedWindowLogger extends AkkaStreamlet { - val in = AvroInlet[Metric]("in") - val shape = StreamletShape(in) - override def createLogic = new RunnableGraphStreamletLogic() { - def runnableGraph = sourceWithCommittableContext(in).via(flow).to(committableSink) + val in: CodecInlet[Metric] = AvroInlet[Metric]("in") + override val shape: StreamletShape = StreamletShape(in) + override def createLogic(): AkkaStreamletLogic = new RunnableGraphStreamletLogic() { + override def runnableGraph(): RunnableGraph[_] = sourceWithCommittableContext(in).via(flow).to(committableSink) def flow = - FlowWithCommittableContext[Metric] + FlowWithCommittableContext[Metric]() .grouped(5) - .map { rotorSpeedWindow ⇒ - val (avg, _) = rotorSpeedWindow.map(_.value).foldLeft((0.0, 1)) { case ((avg, idx), next) ⇒ (avg + (next - avg) / idx, idx + 1) } + .map { rotorSpeedWindow => + val (avg, _) = rotorSpeedWindow.map(_.value).foldLeft((0.0, 1)) { case ((avg, idx), next) => (avg + (next - avg) / idx, idx + 1) } system.log.info(s"Average rotorspeed is: $avg") diff --git a/examples/snippets/modules/ROOT/examples/sensor-data-scala/src/main/scala/sensordata/SensorDataFileIngress.scala b/examples/snippets/modules/ROOT/examples/sensor-data-scala/src/main/scala/sensordata/SensorDataFileIngress.scala index 980ba62f3..aebbc2f0c 100644 --- a/examples/snippets/modules/ROOT/examples/sensor-data-scala/src/main/scala/sensordata/SensorDataFileIngress.scala +++ b/examples/snippets/modules/ROOT/examples/sensor-data-scala/src/main/scala/sensordata/SensorDataFileIngress.scala @@ -36,8 +36,8 @@ class SensorDataFileIngress extends AkkaStreamlet { import SensorDataJsonSupport._ - val out = AvroOutlet[SensorData]("out").withPartitioner(RoundRobinPartitioner) - def shape = StreamletShape.withOutlets(out) + val out: CodecOutlet[SensorData] = AvroOutlet[SensorData]("out").withPartitioner(RoundRobinPartitioner) + override def shape(): StreamletShape = StreamletShape.withOutlets(out) //tag::volume-mount1[] private val sourceData = VolumeMount("source-data-mount", "/mnt/data", ReadWriteMany) @@ -52,16 +52,16 @@ class SensorDataFileIngress extends AkkaStreamlet { // *) Note that reading and deserializing the file content is done in separate steps for readability only, in production they should be merged into one step for performance reasons. - override def createLogic = new RunnableGraphStreamletLogic() { + override def createLogic(): AkkaStreamletLogic = new RunnableGraphStreamletLogic() { //tag::volume-mount2[] - val listFiles: NotUsed ⇒ Source[Path, NotUsed] = { _ ⇒ + val listFiles: NotUsed => Source[Path, NotUsed] = { _ => Directory.ls(getMountedPath(sourceData)) } //end::volume-mount2[] - val readFile: Path ⇒ Source[ByteString, Future[IOResult]] = { path: Path ⇒ + val readFile: Path => Source[ByteString, Future[IOResult]] = { path: Path => FileIO.fromPath(path).via(JsonFraming.objectScanner(Int.MaxValue)) } - val parseFile: ByteString ⇒ SensorData = { jsonByteString ⇒ + val parseFile: ByteString => SensorData = { jsonByteString => JsonParser(jsonByteString.utf8String).convertTo[SensorData] } @@ -70,7 +70,7 @@ class SensorDataFileIngress extends AkkaStreamlet { .flatMapConcat(listFiles) .flatMapConcat(readFile) .map(parseFile) - def runnableGraph = emitFromFilesContinuously.to(plainSink(out)) + override def runnableGraph(): RunnableGraph[_] = emitFromFilesContinuously.to(plainSink(out)) } // example of what not to do diff --git a/examples/snippets/modules/ROOT/examples/sensor-data-scala/src/main/scala/sensordata/SensorDataHttpIngress.scala b/examples/snippets/modules/ROOT/examples/sensor-data-scala/src/main/scala/sensordata/SensorDataHttpIngress.scala index 8fc38a1d9..131e79721 100644 --- a/examples/snippets/modules/ROOT/examples/sensor-data-scala/src/main/scala/sensordata/SensorDataHttpIngress.scala +++ b/examples/snippets/modules/ROOT/examples/sensor-data-scala/src/main/scala/sensordata/SensorDataHttpIngress.scala @@ -25,8 +25,8 @@ import cloudflow.streamlets.avro._ import SensorDataJsonSupport._ class SensorDataHttpIngress extends AkkaServerStreamlet { - val out = AvroOutlet[SensorData]("out").withPartitioner(RoundRobinPartitioner) - def shape = StreamletShape.withOutlets(out) - override def createLogic = HttpServerLogic.default(this, out) + val out: CodecOutlet[SensorData] = AvroOutlet[SensorData]("out").withPartitioner(RoundRobinPartitioner) + override def shape(): StreamletShape = StreamletShape.withOutlets(out) + override def createLogic(): AkkaStreamletLogic = HttpServerLogic.default(this, out) } //end::code[] diff --git a/examples/snippets/modules/ROOT/examples/sensor-data-scala/src/main/scala/sensordata/SensorDataStreamingIngress.scala b/examples/snippets/modules/ROOT/examples/sensor-data-scala/src/main/scala/sensordata/SensorDataStreamingIngress.scala index 6e921f19f..750f4db13 100644 --- a/examples/snippets/modules/ROOT/examples/sensor-data-scala/src/main/scala/sensordata/SensorDataStreamingIngress.scala +++ b/examples/snippets/modules/ROOT/examples/sensor-data-scala/src/main/scala/sensordata/SensorDataStreamingIngress.scala @@ -20,15 +20,15 @@ import akka.http.scaladsl.common.EntityStreamingSupport import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._ import SensorDataJsonSupport._ -import cloudflow.akkastream.AkkaServerStreamlet -import cloudflow.akkastream.util.scaladsl._ -import cloudflow.streamlets.{ RoundRobinPartitioner, StreamletShape } +import cloudflow.akkastream._ +import cloudflow.streamlets._ import cloudflow.streamlets.avro._ +import cloudflow.akkastream.util.scaladsl._ class SensorDataStreamingIngress extends AkkaServerStreamlet { - val out = AvroOutlet[SensorData]("out", RoundRobinPartitioner) - def shape = StreamletShape.withOutlets(out) + val out: CodecOutlet[SensorData] = AvroOutlet[SensorData]("out", RoundRobinPartitioner) + override def shape(): StreamletShape = StreamletShape.withOutlets(out) - implicit val entityStreamingSupport = EntityStreamingSupport.json() - override def createLogic = HttpServerLogic.defaultStreaming(this, out) + implicit val entityStreamingSupport = EntityStreamingSupport.json() + override def createLogic(): AkkaStreamletLogic = HttpServerLogic.defaultStreaming(this, out) } diff --git a/examples/snippets/modules/ROOT/examples/sensor-data-scala/src/main/scala/sensordata/SensorDataToMetrics.scala b/examples/snippets/modules/ROOT/examples/sensor-data-scala/src/main/scala/sensordata/SensorDataToMetrics.scala index b92770f5d..cfc8c1679 100644 --- a/examples/snippets/modules/ROOT/examples/sensor-data-scala/src/main/scala/sensordata/SensorDataToMetrics.scala +++ b/examples/snippets/modules/ROOT/examples/sensor-data-scala/src/main/scala/sensordata/SensorDataToMetrics.scala @@ -16,26 +16,27 @@ //tag::code[] package sensordata +import akka.stream.scaladsl._ import cloudflow.akkastream._ import cloudflow.akkastream.scaladsl._ -import cloudflow.streamlets.{ RoundRobinPartitioner, StreamletShape } +import cloudflow.streamlets._ import cloudflow.streamlets.avro._ class SensorDataToMetrics extends AkkaStreamlet { - val in = AvroInlet[SensorData]("in") - val out = AvroOutlet[Metric]("out").withPartitioner(RoundRobinPartitioner) - val shape = StreamletShape(in, out) + val in: CodecInlet[SensorData] = AvroInlet[SensorData]("in") + val out: CodecOutlet[Metric] = AvroOutlet[Metric]("out").withPartitioner(RoundRobinPartitioner) + override val shape: StreamletShape = StreamletShape(in, out) def flow = - FlowWithCommittableContext[SensorData] - .mapConcat { data ⇒ + FlowWithCommittableContext[SensorData]() + .mapConcat { data => List( Metric(data.deviceId, data.timestamp, "power", data.measurements.power), Metric(data.deviceId, data.timestamp, "rotorSpeed", data.measurements.rotorSpeed), Metric(data.deviceId, data.timestamp, "windSpeed", data.measurements.windSpeed) ) } - override def createLogic = new RunnableGraphStreamletLogic() { - def runnableGraph = sourceWithCommittableContext(in).via(flow).to(committableSink(out)) + override def createLogic(): AkkaStreamletLogic = new RunnableGraphStreamletLogic() { + override def runnableGraph(): RunnableGraph[_] = sourceWithCommittableContext(in).via(flow).to(committableSink(out)) } } //end::code[] diff --git a/examples/snippets/modules/ROOT/examples/sensor-data-scala/src/main/scala/sensordata/ValidMetricLogger.scala b/examples/snippets/modules/ROOT/examples/sensor-data-scala/src/main/scala/sensordata/ValidMetricLogger.scala index 23babdf74..b2887a665 100644 --- a/examples/snippets/modules/ROOT/examples/sensor-data-scala/src/main/scala/sensordata/ValidMetricLogger.scala +++ b/examples/snippets/modules/ROOT/examples/sensor-data-scala/src/main/scala/sensordata/ValidMetricLogger.scala @@ -16,6 +16,7 @@ //tag::all[] package sensordata +import akka.stream.scaladsl._ import cloudflow.akkastream._ import cloudflow.akkastream.scaladsl._ import cloudflow.streamlets._ @@ -25,8 +26,8 @@ import cloudflow.streamlets.avro._ class ValidMetricLogger extends AkkaStreamlet { //end::config-parameter1[] - val inlet = AvroInlet[Metric]("in") - val shape = StreamletShape.withInlets(inlet) + val inlet: CodecInlet[Metric] = AvroInlet[Metric]("in") + override val shape: StreamletShape = StreamletShape.withInlets(inlet) val LogLevel = RegExpConfigParameter( "log-level", @@ -41,27 +42,27 @@ class ValidMetricLogger extends AkkaStreamlet { override def configParameters = Vector(LogLevel, MsgPrefix) - override def createLogic = new RunnableGraphStreamletLogic() { - val logF: String ⇒ Unit = LogLevel.value.toLowerCase match { - case "debug" ⇒ system.log.debug _ - case "info" ⇒ system.log.info _ - case "warning" ⇒ system.log.warning _ - case "error" ⇒ system.log.error _ + override def createLogic(): AkkaStreamletLogic = new RunnableGraphStreamletLogic() { + val logF: String => Unit = LogLevel.value.toLowerCase match { + case "debug" => system.log.debug _ + case "info" => system.log.info _ + case "warning" => system.log.warning _ + case "error" => system.log.error _ } val msgPrefix = MsgPrefix.value - def log(metric: Metric) = + def log(metric: Metric): Unit = logF(s"$msgPrefix $metric") def flow = - FlowWithCommittableContext[Metric] - .map { validMetric ⇒ + FlowWithCommittableContext[Metric]() + .map { validMetric => log(validMetric) validMetric } - def runnableGraph = + override def runnableGraph(): RunnableGraph[_] = sourceWithCommittableContext(inlet).via(flow).to(committableSink) } } diff --git a/examples/snippets/modules/ROOT/examples/spark-scala/project/cloudflow-plugins.sbt b/examples/snippets/modules/ROOT/examples/spark-scala/project/cloudflow-plugins.sbt index 11d649007..680a1df07 100644 --- a/examples/snippets/modules/ROOT/examples/spark-scala/project/cloudflow-plugins.sbt +++ b/examples/snippets/modules/ROOT/examples/spark-scala/project/cloudflow-plugins.sbt @@ -1,5 +1,5 @@ val latestVersion = { - sys.env.get("CLOUDFLOW_VERSION").fold( + sys.env.get("CLOUDFLOW_VERSION").filter(_.nonEmpty).fold( sbtdynver.DynVer(None, "-", "v") .getGitDescribeOutput(new java.util.Date()) .fold(throw new Exception("Failed to retrieve version"))(_.version("-")) diff --git a/examples/snippets/modules/ROOT/examples/spark-scala/src/test/scala/com/example/SampleSpec.scala b/examples/snippets/modules/ROOT/examples/spark-scala/src/test/scala/com/example/SampleSpec.scala index 12113e71a..f101e9457 100644 --- a/examples/snippets/modules/ROOT/examples/spark-scala/src/test/scala/com/example/SampleSpec.scala +++ b/examples/snippets/modules/ROOT/examples/spark-scala/src/test/scala/com/example/SampleSpec.scala @@ -7,7 +7,10 @@ class SampleSpec extends SparkScalaTestSupport { "An TestProcessor" should { //tag::config-value[] - val testKit = SparkStreamletTestkit(session).withConfigParameterValues(ConfigParameterValue(RecordSumFlow.recordsInWindowParameter, "20")) + val testKit = SparkStreamletTestkit(session) + .withConfigParameterValues( + ConfigParameterValue(RecordSumFlow.recordsInWindowParameter, "20") + ) //end::config-value[] "Allow for creating a 'flow processor'" in { diff --git a/examples/spark-sensors-proto/project/cloudflow-plugins.sbt b/examples/spark-sensors-proto/project/cloudflow-plugins.sbt index 11d649007..680a1df07 100644 --- a/examples/spark-sensors-proto/project/cloudflow-plugins.sbt +++ b/examples/spark-sensors-proto/project/cloudflow-plugins.sbt @@ -1,5 +1,5 @@ val latestVersion = { - sys.env.get("CLOUDFLOW_VERSION").fold( + sys.env.get("CLOUDFLOW_VERSION").filter(_.nonEmpty).fold( sbtdynver.DynVer(None, "-", "v") .getGitDescribeOutput(new java.util.Date()) .fold(throw new Exception("Failed to retrieve version"))(_.version("-")) diff --git a/examples/spark-sensors/project/cloudflow-plugins.sbt b/examples/spark-sensors/project/cloudflow-plugins.sbt index 11d649007..680a1df07 100644 --- a/examples/spark-sensors/project/cloudflow-plugins.sbt +++ b/examples/spark-sensors/project/cloudflow-plugins.sbt @@ -1,5 +1,5 @@ val latestVersion = { - sys.env.get("CLOUDFLOW_VERSION").fold( + sys.env.get("CLOUDFLOW_VERSION").filter(_.nonEmpty).fold( sbtdynver.DynVer(None, "-", "v") .getGitDescribeOutput(new java.util.Date()) .fold(throw new Exception("Failed to retrieve version"))(_.version("-")) diff --git a/examples/taxi-ride-proto/processor/src/main/scala/taxiride/processor/TaxiRideProcessor.scala b/examples/taxi-ride-proto/processor/src/main/scala/taxiride/processor/TaxiRideProcessor.scala index 57f4548e6..2d65b5bdd 100644 --- a/examples/taxi-ride-proto/processor/src/main/scala/taxiride/processor/TaxiRideProcessor.scala +++ b/examples/taxi-ride-proto/processor/src/main/scala/taxiride/processor/TaxiRideProcessor.scala @@ -35,7 +35,7 @@ package taxiride.processor import cloudflow.flink._ import org.apache.flink.streaming.api.scala._ import org.apache.flink.streaming.api.functions.co._ -import org.apache.flink.api.common.state.{ValueState, ValueStateDescriptor} +import org.apache.flink.api.common.state.{ ValueState, ValueStateDescriptor } import org.apache.flink.util.Collector import cloudflow.streamlets.StreamletShape import cloudflow.streamlets.proto._ diff --git a/examples/taxi-ride/project/cloudflow-plugins.sbt b/examples/taxi-ride/project/cloudflow-plugins.sbt index 11d649007..680a1df07 100644 --- a/examples/taxi-ride/project/cloudflow-plugins.sbt +++ b/examples/taxi-ride/project/cloudflow-plugins.sbt @@ -1,5 +1,5 @@ val latestVersion = { - sys.env.get("CLOUDFLOW_VERSION").fold( + sys.env.get("CLOUDFLOW_VERSION").filter(_.nonEmpty).fold( sbtdynver.DynVer(None, "-", "v") .getGitDescribeOutput(new java.util.Date()) .fold(throw new Exception("Failed to retrieve version"))(_.version("-")) diff --git a/examples/templates/single-backend-java/project/cloudflow-plugins.sbt b/examples/templates/single-backend-java/project/cloudflow-plugins.sbt index 25de68ea9..4fc707bc5 100644 --- a/examples/templates/single-backend-java/project/cloudflow-plugins.sbt +++ b/examples/templates/single-backend-java/project/cloudflow-plugins.sbt @@ -1,5 +1,5 @@ val latestVersion = { - sys.env.get("CLOUDFLOW_VERSION").fold( + sys.env.get("CLOUDFLOW_VERSION").filter(_.nonEmpty).fold( sbtdynver.DynVer(None, "-", "v") .getGitDescribeOutput(new java.util.Date()) .fold(throw new Exception("Failed to retrieve version"))(_.version("-")) diff --git a/examples/templates/single-backend-scala/project/cloudflow-plugins.sbt b/examples/templates/single-backend-scala/project/cloudflow-plugins.sbt index 11d649007..680a1df07 100644 --- a/examples/templates/single-backend-scala/project/cloudflow-plugins.sbt +++ b/examples/templates/single-backend-scala/project/cloudflow-plugins.sbt @@ -1,5 +1,5 @@ val latestVersion = { - sys.env.get("CLOUDFLOW_VERSION").fold( + sys.env.get("CLOUDFLOW_VERSION").filter(_.nonEmpty).fold( sbtdynver.DynVer(None, "-", "v") .getGitDescribeOutput(new java.util.Date()) .fold(throw new Exception("Failed to retrieve version"))(_.version("-")) diff --git a/examples/tensorflow-akka/project/cloudflow-plugins.sbt b/examples/tensorflow-akka/project/cloudflow-plugins.sbt index 11d649007..680a1df07 100644 --- a/examples/tensorflow-akka/project/cloudflow-plugins.sbt +++ b/examples/tensorflow-akka/project/cloudflow-plugins.sbt @@ -1,5 +1,5 @@ val latestVersion = { - sys.env.get("CLOUDFLOW_VERSION").fold( + sys.env.get("CLOUDFLOW_VERSION").filter(_.nonEmpty).fold( sbtdynver.DynVer(None, "-", "v") .getGitDescribeOutput(new java.util.Date()) .fold(throw new Exception("Failed to retrieve version"))(_.version("-")) diff --git a/scripts/build-all.sh b/scripts/build-all.sh deleted file mode 100755 index fb39a40ef..000000000 --- a/scripts/build-all.sh +++ /dev/null @@ -1,68 +0,0 @@ -#!/usr/bin/env bash - -# Copyright (C) 2016-2021 Lightbend Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -x - -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd -P )" -TARGET=$1 -if [ "$TARGET" == "" ]; -then - set +x - echo "===================================================================================" - echo "Error: 'target' missing." - echo "Usage: build-all.sh # where is an sbt target: compile, test, ..." - echo "===================================================================================" - exit -1 -fi -echo "=========================================================================" -echo "Runs 'sbt $TARGET' for core and examples" -echo "=========================================================================" - -cd $DIR/../core -sbt -mem 2048 --supershell=false "; scalafmtCheck ; $TARGET ; +publishLocal" -RETVAL=$? -[ $RETVAL -ne 0 ] && echo "Failure in building of core" && exit -1 - -echo "Core streamlet libraries built, tested and published to local" - -echo "Now starting building of examples..." - -# Following section has been commented - will uncomment when we have a way -# to publish artifacts since we need to specify the plugin version for each example - -cd ../examples - -# Obtain current project list from examples.yaml -PROJECTS=$(cat $DIR/../examples/examples.yaml | grep "path" | cut -d\" -f2) - -for prj in $PROJECTS; do - echo "=========================================================================" - echo "${TARGET}: $prj" - echo "=========================================================================" - - cd $prj - case "$prj" in - *-java) - sbt -mem 2048 --supershell=false "; $TARGET ; verifyBlueprint " - ;; - *) - sbt -mem 2048 --supershell=false "; scalafmtCheck ; $TARGET ; verifyBlueprint " - ;; - esac - RETVAL=$? - [ $RETVAL -ne 0 ] && echo "Failure in project $prj" && exit -1 - cd - -done diff --git a/scripts/build-core.sh b/scripts/build-core.sh new file mode 100755 index 000000000..3642e71ad --- /dev/null +++ b/scripts/build-core.sh @@ -0,0 +1,33 @@ +#!/usr/bin/env bash + +# Copyright (C) 2016-2021 Lightbend Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +SCRIPTS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd -P )" +ROOT_DIR=$(dirname "$SCRIPTS_DIR") + +source "$SCRIPTS_DIR"/functions.sh + +TARGET="${1:-test}" +check_argument "$1" "build-core.sh # where is a sbt task: compile, publishM2, ..." + +show_message "Runs 'sbt $TARGET' for core" + +cd $ROOT_DIR/core + +sbt -mem 4096 --supershell=false "; scalafmtCheckAll ; $TARGET" +RETVAL=$? +[ $RETVAL -ne 0 ] && echo "Failure in building of core" && exit 1 + +show_message "Successfully ran $TARGET for Core streamlet libraries" diff --git a/scripts/build-mvn-examples.sh b/scripts/build-mvn-examples.sh new file mode 100755 index 000000000..8537cdaf2 --- /dev/null +++ b/scripts/build-mvn-examples.sh @@ -0,0 +1,72 @@ +#!/usr/bin/env bash + +# Copyright (C) 2016-2021 Lightbend Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +SCRIPTS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd -P )" +ROOT_DIR=$(dirname "$SCRIPTS_DIR") + +source "$SCRIPTS_DIR"/functions.sh + +TARGET="${1:-test}" +check_argument "$1" "build-mvn-examples.sh # where is a mvn task: compile, test, ..." + +CLOUDFLOW_VERSION=$(echo_cf_version) +show_message "Cloudflow version is $CLOUDFLOW_VERSION" +export CLOUDFLOW_VERSION + +# Publish a local version that will be used by the examples. This shouldn't change +# any files, so it is safe to run after reading the version. +"$SCRIPTS_DIR/build-core.sh" +publishM2 + +# Find all directories under `examples` with a `pom.xml` file. +# `sort` so that the parent directories are shown on top. +PROJECTS=$(find "$ROOT_DIR/examples" -name pom.xml -exec dirname {} \; | sort) + +# Avoid to execute the target for subprojects, which also have a `pom.xml` file +PARENT_PROJECTS=() +current_dir="not-a-project" +for item in $PROJECTS; do + if [[ "${item}" == "${current_dir}"* ]]; then + echo "$item is under $current_dir, filtering out" + else + current_dir="$item" + PARENT_PROJECTS+=("${current_dir}") + fi +done + +for prj in "${PARENT_PROJECTS[@]}"; do + show_message "mvn ${TARGET}: $prj" + cd "$prj" + + # Only run `cloudflow:verify-blueprint` task if there are blueprint files in the project + if [[ -n $(find . -name "blueprint.conf") ]]; then + # For more details about Maven workflow, see the docs: + # https://developer.lightbend.com/docs/cloudflow/shared-content/2.1.2/develop/maven-support.html + if ! mvn package cloudflow:extract-streamlets -DskipTests; then + show_message "Failed to extract streamlet for project $prj" + exit 1 + fi + + if ! mvn cloudflow:verify-blueprint; then + show_message "Failed to verify blueprint for project $prj" + exit 1 + fi + fi + + if ! mvn "$TARGET"; then + show_message "Failed to run $TARGET for project $prj" + exit 1 + fi +done diff --git a/scripts/build-sbt-examples.sh b/scripts/build-sbt-examples.sh new file mode 100755 index 000000000..9ade3325d --- /dev/null +++ b/scripts/build-sbt-examples.sh @@ -0,0 +1,53 @@ +#!/usr/bin/env bash + +# Copyright (C) 2016-2021 Lightbend Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +SCRIPTS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd -P )" +ROOT_DIR=$(dirname "$SCRIPTS_DIR") + +source "$SCRIPTS_DIR"/functions.sh + +TARGET="${1:-test}" +check_argument "$1" "build-sbt-examples.sh # where is a sbt task: compile, test, ..." + +CLOUDFLOW_VERSION=$(echo_cf_version) +show_message "Cloudflow version is $CLOUDFLOW_VERSION" +export CLOUDFLOW_VERSION + +# Publish a local version that will be used by the examples. This shouldn't change +# any files, so it is safe to run after reading the version. +"$SCRIPTS_DIR/build-core.sh" +publishLocal + +# Find all directories under `examples` with a `build.sbt` file +PROJECTS=$(find "$ROOT_DIR/examples" -name build.sbt -exec dirname {} \;) + +for prj in $PROJECTS; do + show_message "sbt ${TARGET}: $prj" + cd "$prj" + + # Only run `scalafmtCheckAll` task if there is a scalafmt config file in the project + scalafmtTask="" + if [[ -n $(find . -name ".scalafmt.conf") ]]; then + scalafmtTask="; scalafmtCheckAll" + fi + + # Only run `verifyBlueprint` task if there are blueprint files in the project + blueprintTask="" + if [[ -n $(find . -name "blueprint.conf") ]]; then + blueprintTask="; verifyBlueprint" + fi + + sbt -mem 4096 --supershell=false "${scalafmtTask} $blueprintTask ; $TARGET" +done diff --git a/scripts/functions.sh b/scripts/functions.sh new file mode 100644 index 000000000..6edddc6a2 --- /dev/null +++ b/scripts/functions.sh @@ -0,0 +1,31 @@ +#!/usr/bin/env bash + +SCRIPTS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd -P )" +ROOT_DIR=$(dirname "$SCRIPTS_DIR") + +function echo_cf_version() { + cd "$ROOT_DIR/core" + # Get all sbt dependencies and jars ready if they aren't arlready. + sbt exit > /dev/null 2>&1 + # Scoping to cloudflow-akka because we only want to get the version once. + sbt --supershell=false --no-colors --error "print cloudflow-akka/version" +} + +function show_message() { + message="$1" + echo "-------------------------------------------------------------------------" + echo "${message}" + echo "-------------------------------------------------------------------------" +} + +function check_argument() { + target=$1 + message=$2 + if [ -z "$target" ]; + then + echo "===================================================================================" + echo "Using 'test' as the default target. If you want to run another target, run:" + echo " $message" + echo "===================================================================================" + fi +} \ No newline at end of file