Skip to content

Commit

Permalink
Add latest kafka versions
Browse files Browse the repository at this point in the history
  • Loading branch information
danieltoader committed Mar 1, 2022
1 parent c1868b6 commit 204288b
Show file tree
Hide file tree
Showing 11 changed files with 143 additions and 17 deletions.
20 changes: 20 additions & 0 deletions app/controllers/Logkafka.scala
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,16 @@ class Logkafka (val cc: ControllerComponents, val kafkaManagerContext: KafkaMana
LogkafkaNewConfigs.configMaps(Kafka_2_5_1).map{case(k,v) => LKConfig(k,Some(v))}.toList)
val kafka_2_6_0_Default = CreateLogkafka("","",
LogkafkaNewConfigs.configMaps(Kafka_2_6_0).map{case(k,v) => LKConfig(k,Some(v))}.toList)
val kafka_2_7_0_Default = CreateLogkafka("","",
LogkafkaNewConfigs.configMaps(Kafka_2_7_0).map{case(k,v) => LKConfig(k,Some(v))}.toList)
val kafka_2_8_0_Default = CreateLogkafka("","",
LogkafkaNewConfigs.configMaps(Kafka_2_8_0).map{case(k,v) => LKConfig(k,Some(v))}.toList)
val kafka_2_8_1_Default = CreateLogkafka("","",
LogkafkaNewConfigs.configMaps(Kafka_2_8_1).map{case(k,v) => LKConfig(k,Some(v))}.toList)
val kafka_3_0_0_Default = CreateLogkafka("","",
LogkafkaNewConfigs.configMaps(Kafka_3_0_0).map{case(k,v) => LKConfig(k,Some(v))}.toList)
val kafka_3_1_0_Default = CreateLogkafka("","",
LogkafkaNewConfigs.configMaps(Kafka_3_1_0).map{case(k,v) => LKConfig(k,Some(v))}.toList)

val defaultCreateForm = Form(
mapping(
Expand Down Expand Up @@ -184,6 +194,11 @@ class Logkafka (val cc: ControllerComponents, val kafkaManagerContext: KafkaMana
case Kafka_2_5_0 => (defaultCreateForm.fill(kafka_2_5_0_Default), clusterContext)
case Kafka_2_5_1 => (defaultCreateForm.fill(kafka_2_5_1_Default), clusterContext)
case Kafka_2_6_0 => (defaultCreateForm.fill(kafka_2_6_0_Default), clusterContext)
case Kafka_2_7_0 => (defaultCreateForm.fill(kafka_2_7_0_Default), clusterContext)
case Kafka_2_8_0 => (defaultCreateForm.fill(kafka_2_8_0_Default), clusterContext)
case Kafka_2_8_1 => (defaultCreateForm.fill(kafka_2_8_1_Default), clusterContext)
case Kafka_3_0_0 => (defaultCreateForm.fill(kafka_3_0_0_Default), clusterContext)
case Kafka_3_1_0 => (defaultCreateForm.fill(kafka_3_1_0_Default), clusterContext)
}
}
}
Expand Down Expand Up @@ -297,6 +312,11 @@ class Logkafka (val cc: ControllerComponents, val kafkaManagerContext: KafkaMana
case Kafka_2_5_0 => LogkafkaNewConfigs.configNames(Kafka_2_5_0).map(n => (n,LKConfig(n,None))).toMap
case Kafka_2_5_1 => LogkafkaNewConfigs.configNames(Kafka_2_5_1).map(n => (n,LKConfig(n,None))).toMap
case Kafka_2_6_0 => LogkafkaNewConfigs.configNames(Kafka_2_6_0).map(n => (n,LKConfig(n,None))).toMap
case Kafka_2_7_0 => LogkafkaNewConfigs.configNames(Kafka_2_7_0).map(n => (n,LKConfig(n,None))).toMap
case Kafka_2_8_0 => LogkafkaNewConfigs.configNames(Kafka_2_8_0).map(n => (n,LKConfig(n,None))).toMap
case Kafka_2_8_1 => LogkafkaNewConfigs.configNames(Kafka_2_8_1).map(n => (n,LKConfig(n,None))).toMap
case Kafka_3_0_0 => LogkafkaNewConfigs.configNames(Kafka_3_0_0).map(n => (n,LKConfig(n,None))).toMap
case Kafka_3_1_0 => LogkafkaNewConfigs.configNames(Kafka_3_1_0).map(n => (n,LKConfig(n,None))).toMap
}
val identityOption = li.identityMap.get(log_path)
if (identityOption.isDefined) {
Expand Down
15 changes: 15 additions & 0 deletions app/controllers/Topic.scala
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,11 @@ class Topic (val cc: ControllerComponents, val kafkaManagerContext: KafkaManager
val kafka_2_5_0_Default = CreateTopic("",1,1,TopicConfigs.configNamesAndDoc(Kafka_2_5_0).map{ case (n, h) => TConfig(n,None,Option(h))}.toList)
val kafka_2_5_1_Default = CreateTopic("",1,1,TopicConfigs.configNamesAndDoc(Kafka_2_5_1).map{ case (n, h) => TConfig(n,None,Option(h))}.toList)
val kafka_2_6_0_Default = CreateTopic("",1,1,TopicConfigs.configNamesAndDoc(Kafka_2_6_0).map{ case (n, h) => TConfig(n,None,Option(h))}.toList)
val kafka_2_7_0_Default = CreateTopic("",1,1,TopicConfigs.configNamesAndDoc(Kafka_2_7_0).map{ case (n, h) => TConfig(n,None,Option(h))}.toList)
val kafka_2_8_0_Default = CreateTopic("",1,1,TopicConfigs.configNamesAndDoc(Kafka_2_8_0).map{ case (n, h) => TConfig(n,None,Option(h))}.toList)
val kafka_2_8_1_Default = CreateTopic("",1,1,TopicConfigs.configNamesAndDoc(Kafka_2_8_1).map{ case (n, h) => TConfig(n,None,Option(h))}.toList)
val kafka_3_0_0_Default = CreateTopic("",1,1,TopicConfigs.configNamesAndDoc(Kafka_3_0_0).map{ case (n, h) => TConfig(n,None,Option(h))}.toList)
val kafka_3_1_0_Default = CreateTopic("",1,1,TopicConfigs.configNamesAndDoc(Kafka_3_1_0).map{ case (n, h) => TConfig(n,None,Option(h))}.toList)

val defaultCreateForm = Form(
mapping(
Expand Down Expand Up @@ -187,6 +192,11 @@ class Topic (val cc: ControllerComponents, val kafkaManagerContext: KafkaManager
case Kafka_2_5_0 => (defaultCreateForm.fill(kafka_2_5_0_Default), clusterContext)
case Kafka_2_5_1 => (defaultCreateForm.fill(kafka_2_5_1_Default), clusterContext)
case Kafka_2_6_0 => (defaultCreateForm.fill(kafka_2_6_0_Default), clusterContext)
case Kafka_2_7_0 => (defaultCreateForm.fill(kafka_2_7_0_Default), clusterContext)
case Kafka_2_8_0 => (defaultCreateForm.fill(kafka_2_8_0_Default), clusterContext)
case Kafka_2_8_1 => (defaultCreateForm.fill(kafka_2_8_1_Default), clusterContext)
case Kafka_3_0_0 => (defaultCreateForm.fill(kafka_3_0_0_Default), clusterContext)
case Kafka_3_1_0 => (defaultCreateForm.fill(kafka_3_1_0_Default), clusterContext)
}
}
}
Expand Down Expand Up @@ -446,6 +456,11 @@ class Topic (val cc: ControllerComponents, val kafkaManagerContext: KafkaManager
case Kafka_2_5_0 => TopicConfigs.configNamesAndDoc(Kafka_2_5_0).map { case (n, h) => (n,TConfig(n,None, Option(h))) }
case Kafka_2_5_1 => TopicConfigs.configNamesAndDoc(Kafka_2_5_1).map { case (n, h) => (n,TConfig(n,None, Option(h))) }
case Kafka_2_6_0 => TopicConfigs.configNamesAndDoc(Kafka_2_6_0).map { case (n, h) => (n,TConfig(n,None, Option(h))) }
case Kafka_2_7_0 => TopicConfigs.configNamesAndDoc(Kafka_2_7_0).map { case (n, h) => (n,TConfig(n,None, Option(h))) }
case Kafka_2_8_0 => TopicConfigs.configNamesAndDoc(Kafka_2_8_0).map { case (n, h) => (n,TConfig(n,None, Option(h))) }
case Kafka_2_8_1 => TopicConfigs.configNamesAndDoc(Kafka_2_8_1).map { case (n, h) => (n,TConfig(n,None, Option(h))) }
case Kafka_3_0_0 => TopicConfigs.configNamesAndDoc(Kafka_3_0_0).map { case (n, h) => (n,TConfig(n,None, Option(h))) }
case Kafka_3_1_0 => TopicConfigs.configNamesAndDoc(Kafka_3_1_0).map { case (n, h) => (n,TConfig(n,None, Option(h))) }
}
val updatedConfigMap = ti.config.toMap
val updatedConfigList = defaultConfigs.map {
Expand Down
2 changes: 1 addition & 1 deletion app/kafka/manager/actor/cluster/KafkaStateActor.scala
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,7 @@ class KafkaAdminClient(context: => ActorContext, adminClientActorPath: ActorPath


object KafkaManagedOffsetCache {
val supportedVersions: Set[KafkaVersion] = Set(Kafka_0_8_2_0, Kafka_0_8_2_1, Kafka_0_8_2_2, Kafka_0_9_0_0, Kafka_0_9_0_1, Kafka_0_10_0_0, Kafka_0_10_0_1, Kafka_0_10_1_0, Kafka_0_10_1_1, Kafka_0_10_2_0, Kafka_0_10_2_1, Kafka_0_11_0_0, Kafka_0_11_0_2, Kafka_1_0_0, Kafka_1_0_1, Kafka_1_1_0, Kafka_1_1_1, Kafka_2_0_0, Kafka_2_1_0, Kafka_2_1_1, Kafka_2_2_0, Kafka_2_2_1, Kafka_2_2_2, Kafka_2_3_0, Kafka_2_2_1, Kafka_2_4_0, Kafka_2_4_1, Kafka_2_5_0, Kafka_2_5_1, Kafka_2_6_0)
val supportedVersions: Set[KafkaVersion] = Set(Kafka_0_8_2_0, Kafka_0_8_2_1, Kafka_0_8_2_2, Kafka_0_9_0_0, Kafka_0_9_0_1, Kafka_0_10_0_0, Kafka_0_10_0_1, Kafka_0_10_1_0, Kafka_0_10_1_1, Kafka_0_10_2_0, Kafka_0_10_2_1, Kafka_0_11_0_0, Kafka_0_11_0_2, Kafka_1_0_0, Kafka_1_0_1, Kafka_1_1_0, Kafka_1_1_1, Kafka_2_0_0, Kafka_2_1_0, Kafka_2_1_1, Kafka_2_2_0, Kafka_2_2_1, Kafka_2_2_2, Kafka_2_3_0, Kafka_2_2_1, Kafka_2_4_0, Kafka_2_4_1, Kafka_2_5_0, Kafka_2_5_1, Kafka_2_6_0, Kafka_2_7_0, Kafka_2_8_0, Kafka_2_8_1, Kafka_3_0_0, Kafka_3_1_0)
val ConsumerOffsetTopic = "__consumer_offsets"

def isSupported(version: KafkaVersion) : Boolean = {
Expand Down
27 changes: 26 additions & 1 deletion app/kafka/manager/model/model.scala
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,26 @@ case object Kafka_2_6_0 extends KafkaVersion {
override def toString = "2.6.0"
}

case object Kafka_2_7_0 extends KafkaVersion {
override def toString = "2.7.0"
}

case object Kafka_2_8_0 extends KafkaVersion {
override def toString = "2.8.0"
}

case object Kafka_2_8_1 extends KafkaVersion {
override def toString = "2.8.1"
}

case object Kafka_3_0_0 extends KafkaVersion {
override def toString = "3.0.0"
}

case object Kafka_3_1_0 extends KafkaVersion {
override def toString = "3.1.0"
}

object KafkaVersion {
val supportedVersions: Map[String,KafkaVersion] = Map(
"0.8.1.1" -> Kafka_0_8_1_1,
Expand Down Expand Up @@ -169,7 +189,12 @@ object KafkaVersion {
"2.4.1" -> Kafka_2_4_1,
"2.5.0" -> Kafka_2_5_0,
"2.5.1" -> Kafka_2_5_1,
"2.6.0" -> Kafka_2_6_0
"2.6.0" -> Kafka_2_6_0,
"2.7.0" -> Kafka_2_7_0,
"2.8.0" -> Kafka_2_8_0,
"2.8.1" -> Kafka_2_8_1,
"3.0.0" -> Kafka_3_0_0,
"3.1.0" -> Kafka_3_1_0
)

val formSelectList : IndexedSeq[(String,String)] = supportedVersions.toIndexedSeq.filterNot(_._1.contains("beta")).map(t => (t._1,t._2.toString)).sortWith((a, b) => sortVersion(a._1, b._1))
Expand Down
7 changes: 6 additions & 1 deletion app/kafka/manager/utils/LogkafkaNewConfigs.scala
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,12 @@ object LogkafkaNewConfigs {
Kafka_2_4_1 -> logkafka82.LogConfig,
Kafka_2_5_0 -> logkafka82.LogConfig,
Kafka_2_5_1 -> logkafka82.LogConfig,
Kafka_2_6_0 -> logkafka82.LogConfig
Kafka_2_6_0 -> logkafka82.LogConfig,
Kafka_2_7_0 -> logkafka82.LogConfig,
Kafka_2_8_0 -> logkafka82.LogConfig,
Kafka_2_8_1 -> logkafka82.LogConfig,
Kafka_3_0_0 -> logkafka82.LogConfig,
Kafka_3_1_0 -> logkafka82.LogConfig
)

def configNames(version: KafkaVersion) : Set[String] = {
Expand Down
7 changes: 6 additions & 1 deletion app/kafka/manager/utils/TopicConfigs.scala
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,12 @@ object TopicConfigs {
Kafka_2_4_1 -> two40.LogConfig,
Kafka_2_5_0 -> two40.LogConfig,
Kafka_2_5_1 -> two40.LogConfig,
Kafka_2_6_0 -> two40.LogConfig
Kafka_2_6_0 -> two40.LogConfig,
Kafka_2_7_0 -> two40.LogConfig,
Kafka_2_8_0 -> two40.LogConfig,
Kafka_2_8_1 -> two40.LogConfig,
Kafka_3_0_0 -> two40.LogConfig,
Kafka_3_1_0 -> two40.LogConfig
)

def configNames(version: KafkaVersion): Seq[String] = {
Expand Down
2 changes: 1 addition & 1 deletion build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
name := """cmak"""

/* For packaging purposes, -SNAPSHOT MUST contain a digit */
version := "3.0.0.5"
version := "3.0.0.6"

scalaVersion := "2.12.10"

Expand Down
10 changes: 8 additions & 2 deletions sbt
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,10 @@ set -o pipefail
declare -r sbt_release_version="1.3.8"
declare -r sbt_unreleased_version="1.3.8"

declare -r latest_213="2.13.1"
declare -r latest_212="2.12.10"
declare -r latest_31="3.1.0"
declare -r latest_30="3.0.3"
declare -r latest_213="2.13.8"
declare -r latest_212="2.12.15"
declare -r latest_211="2.11.12"
declare -r latest_210="2.10.7"
declare -r latest_29="2.9.3"
Expand Down Expand Up @@ -396,6 +398,8 @@ are not special.
-211 use $latest_211
-212 use $latest_212
-213 use $latest_213
-30 use $latest_30
-31 use $latest_31
-scala-home <path> use the scala build at the specified directory
-scala-version <version> use the specified version of scala
-binary-version <version> use the specified scala version when searching for dependencies
Expand Down Expand Up @@ -469,6 +473,8 @@ process_args() {
-211) setScalaVersion "$latest_211" && shift ;;
-212) setScalaVersion "$latest_212" && shift ;;
-213) setScalaVersion "$latest_213" && shift ;;
-30) setScalaVersion "$latest_30" && shift ;;
-31) setScalaVersion "$latest_31" && shift ;;

-scala-version) require_arg version "$1" "$2" && setScalaVersion "$2" && shift 2 ;;
-binary-version) require_arg version "$1" "$2" && setThisBuild scalaBinaryVersion "\"$2\"" && shift 2 ;;
Expand Down
14 changes: 7 additions & 7 deletions test/kafka/manager/TestKafkaManagerActor.scala
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ class TestKafkaManagerActor extends CuratorAwareTest with BaseTest {
}

test("add cluster") {
val cc = ClusterConfig("dev","2.4.1",testServer.getConnectString, jmxEnabled = false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(defaultTuning), securityProtocol="PLAINTEXT", saslMechanism = None, jaasConfig = None)
val cc = ClusterConfig("dev","3.1.0",testServer.getConnectString, jmxEnabled = false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(defaultTuning), securityProtocol="PLAINTEXT", saslMechanism = None, jaasConfig = None)
withKafkaManagerActor(KMAddCluster(cc)) { result: KMCommandResult =>
result.result.get
Thread.sleep(1000)
Expand All @@ -80,7 +80,7 @@ class TestKafkaManagerActor extends CuratorAwareTest with BaseTest {
}

test("update cluster zkhost") {
val cc2 = ClusterConfig("dev","2.4.1",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(defaultTuning), securityProtocol="PLAINTEXT", saslMechanism = None, jaasConfig = None)
val cc2 = ClusterConfig("dev","3.1.0",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(defaultTuning), securityProtocol="PLAINTEXT", saslMechanism = None, jaasConfig = None)
withKafkaManagerActor(KMUpdateCluster(cc2)) { result: KMCommandResult =>
result.result.get
Thread.sleep(3000)
Expand Down Expand Up @@ -112,7 +112,7 @@ class TestKafkaManagerActor extends CuratorAwareTest with BaseTest {
}

test("update cluster version") {
val cc2 = ClusterConfig("dev","2.4.1",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(defaultTuning), securityProtocol="PLAINTEXT", saslMechanism = None, jaasConfig = None)
val cc2 = ClusterConfig("dev","3.1.0",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(defaultTuning), securityProtocol="PLAINTEXT", saslMechanism = None, jaasConfig = None)
withKafkaManagerActor(KMUpdateCluster(cc2)) { result: KMCommandResult =>
result.result.get
Thread.sleep(3000)
Expand All @@ -139,7 +139,7 @@ class TestKafkaManagerActor extends CuratorAwareTest with BaseTest {
println(result)
result.msg.contains("dev")
}
val cc2 = ClusterConfig("dev","2.4.1",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(defaultTuning), securityProtocol="PLAINTEXT", saslMechanism = None, jaasConfig = None)
val cc2 = ClusterConfig("dev","3.1.0",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(defaultTuning), securityProtocol="PLAINTEXT", saslMechanism = None, jaasConfig = None)
withKafkaManagerActor(KMAddCluster(cc2)) { result: KMCommandResult =>
result.result.get
Thread.sleep(1000)
Expand All @@ -156,7 +156,7 @@ class TestKafkaManagerActor extends CuratorAwareTest with BaseTest {
}

test("update cluster logkafka enabled") {
val cc2 = ClusterConfig("dev","2.4.1",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, logkafkaEnabled = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(defaultTuning), securityProtocol="PLAINTEXT", saslMechanism = None, jaasConfig = None)
val cc2 = ClusterConfig("dev","3.1.0",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, logkafkaEnabled = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(defaultTuning), securityProtocol="PLAINTEXT", saslMechanism = None, jaasConfig = None)
withKafkaManagerActor(KMUpdateCluster(cc2)) { result: KMCommandResult =>
result.result.get
Thread.sleep(3000)
Expand All @@ -168,7 +168,7 @@ class TestKafkaManagerActor extends CuratorAwareTest with BaseTest {

test("update cluster tuning") {
val newTuning = getClusterTuning(3, 101, 11, 10000, 10000, 1)
val cc2 = ClusterConfig("dev","2.4.1",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, logkafkaEnabled = true, jmxUser = None, jmxPass = None, jmxSsl = false,
val cc2 = ClusterConfig("dev","3.1.0",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, logkafkaEnabled = true, jmxUser = None, jmxPass = None, jmxSsl = false,
tuning = Option(newTuning), securityProtocol="PLAINTEXT", saslMechanism = None, jaasConfig = None
)
withKafkaManagerActor(KMUpdateCluster(cc2)) { result: KMCommandResult =>
Expand All @@ -185,7 +185,7 @@ class TestKafkaManagerActor extends CuratorAwareTest with BaseTest {
}

test("update cluster security protocol") {
val cc2 = ClusterConfig("dev","2.4.1",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, logkafkaEnabled = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(defaultTuning), securityProtocol="SASL_PLAINTEXT", saslMechanism = Option("PLAIN"), jaasConfig = Option("blah"))
val cc2 = ClusterConfig("dev","3.1.0",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, logkafkaEnabled = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(defaultTuning), securityProtocol="SASL_PLAINTEXT", saslMechanism = Option("PLAIN"), jaasConfig = Option("blah"))
withKafkaManagerActor(KMUpdateCluster(cc2)) { result: KMCommandResult =>
result.result.get
Thread.sleep(3000)
Expand Down
14 changes: 12 additions & 2 deletions test/kafka/manager/model/KafkaVersionTest.scala
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,12 @@ class KafkaVersionTest extends FunSuite {
"2.4.1" -> Kafka_2_4_1,
"2.5.0" -> Kafka_2_5_0,
"2.5.1" -> Kafka_2_5_1,
"2.6.0" -> Kafka_2_6_0
"2.6.0" -> Kafka_2_6_0,
"2.7.0" -> Kafka_2_7_0,
"2.8.0" -> Kafka_2_8_0,
"2.8.1" -> Kafka_2_8_1,
"3.0.0" -> Kafka_3_0_0,
"3.1.0" -> Kafka_3_1_0
)

test("apply method: supported version.") {
Expand Down Expand Up @@ -93,7 +98,12 @@ class KafkaVersionTest extends FunSuite {
("2.4.1","2.4.1"),
("2.5.0","2.5.0"),
("2.5.1","2.5.1"),
("2.6.0","2.6.0")
("2.6.0","2.6.0"),
("2.7.0","2.7.0"),
("2.8.0","2.8.0"),
("2.8.1","2.8.1"),
("3.0.0","3.0.0"),
("3.1.0","3.1.0")
)
assertResult(expected)(KafkaVersion.formSelectList)
}
Expand Down
Loading

0 comments on commit 204288b

Please sign in to comment.