Skip to content

Commit

Permalink
Merge pull request yahoo#720 from arenadata/2.4.0
Browse files Browse the repository at this point in the history
Add support Kafka 2.4.0
  • Loading branch information
patelh authored Feb 18, 2020
2 parents 20cbcf0 + d09508e commit f2175fc
Show file tree
Hide file tree
Showing 11 changed files with 42 additions and 18 deletions.
4 changes: 4 additions & 0 deletions app/controllers/Logkafka.scala
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,8 @@ class Logkafka (val cc: ControllerComponents, val kafkaManagerContext: KafkaMana
LogkafkaNewConfigs.configMaps(Kafka_2_1_1).map{case(k,v) => LKConfig(k,Some(v))}.toList)
val kafka_2_2_0_Default = CreateLogkafka("","",
LogkafkaNewConfigs.configMaps(Kafka_2_2_0).map{case(k,v) => LKConfig(k,Some(v))}.toList)
val kafka_2_4_0_Default = CreateLogkafka("","",
LogkafkaNewConfigs.configMaps(Kafka_2_4_0).map{case(k,v) => LKConfig(k,Some(v))}.toList)

val defaultCreateForm = Form(
mapping(
Expand Down Expand Up @@ -157,6 +159,7 @@ class Logkafka (val cc: ControllerComponents, val kafkaManagerContext: KafkaMana
case Kafka_2_1_0 => (defaultCreateForm.fill(kafka_2_1_0_Default), clusterContext)
case Kafka_2_1_1 => (defaultCreateForm.fill(kafka_2_1_1_Default), clusterContext)
case Kafka_2_2_0 => (defaultCreateForm.fill(kafka_2_2_0_Default), clusterContext)
case Kafka_2_4_0 => (defaultCreateForm.fill(kafka_2_4_0_Default), clusterContext)
}
}
}
Expand Down Expand Up @@ -261,6 +264,7 @@ class Logkafka (val cc: ControllerComponents, val kafkaManagerContext: KafkaMana
case Kafka_2_1_0 => LogkafkaNewConfigs.configNames(Kafka_2_1_0).map(n => (n,LKConfig(n,None))).toMap
case Kafka_2_1_1 => LogkafkaNewConfigs.configNames(Kafka_2_1_1).map(n => (n,LKConfig(n,None))).toMap
case Kafka_2_2_0 => LogkafkaNewConfigs.configNames(Kafka_2_2_0).map(n => (n,LKConfig(n,None))).toMap
case Kafka_2_4_0 => LogkafkaNewConfigs.configNames(Kafka_2_4_0).map(n => (n,LKConfig(n,None))).toMap
}
val identityOption = li.identityMap.get(log_path)
if (identityOption.isDefined) {
Expand Down
5 changes: 4 additions & 1 deletion app/controllers/Topic.scala
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@ class Topic (val cc: ControllerComponents, val kafkaManagerContext: KafkaManager
val kafka_2_1_0_Default = CreateTopic("",1,1,TopicConfigs.configNamesAndDoc(Kafka_2_1_0).map{ case (n, h) => TConfig(n,None,Option(h))}.toList)
val kafka_2_1_1_Default = CreateTopic("",1,1,TopicConfigs.configNamesAndDoc(Kafka_2_1_1).map{ case (n, h) => TConfig(n,None,Option(h))}.toList)
val kafka_2_2_0_Default = CreateTopic("",1,1,TopicConfigs.configNamesAndDoc(Kafka_2_2_0).map{ case (n, h) => TConfig(n,None,Option(h))}.toList)
val kafka_2_4_0_Default = CreateTopic("",1,1,TopicConfigs.configNamesAndDoc(Kafka_2_4_0).map{ case (n, h) => TConfig(n,None,Option(h))}.toList)

val defaultCreateForm = Form(
mapping(
Expand Down Expand Up @@ -168,7 +169,8 @@ class Topic (val cc: ControllerComponents, val kafkaManagerContext: KafkaManager
case Kafka_2_0_0 => (defaultCreateForm.fill(kafka_2_0_0_Default), clusterContext)
case Kafka_2_1_0 => (defaultCreateForm.fill(kafka_2_1_0_Default), clusterContext)
case Kafka_2_1_1 => (defaultCreateForm.fill(kafka_2_1_1_Default), clusterContext)
case Kafka_2_2_0 => (defaultCreateForm.fill(kafka_2_1_1_Default), clusterContext)
case Kafka_2_2_0 => (defaultCreateForm.fill(kafka_2_2_0_Default), clusterContext)
case Kafka_2_4_0 => (defaultCreateForm.fill(kafka_2_4_0_Default), clusterContext)
}
}
}
Expand Down Expand Up @@ -419,6 +421,7 @@ class Topic (val cc: ControllerComponents, val kafkaManagerContext: KafkaManager
case Kafka_2_1_0 => TopicConfigs.configNamesAndDoc(Kafka_2_1_0).map { case (n, h) => (n,TConfig(n,None, Option(h))) }
case Kafka_2_1_1 => TopicConfigs.configNamesAndDoc(Kafka_2_1_1).map { case (n, h) => (n,TConfig(n,None, Option(h))) }
case Kafka_2_2_0 => TopicConfigs.configNamesAndDoc(Kafka_2_2_0).map { case (n, h) => (n,TConfig(n,None, Option(h))) }
case Kafka_2_4_0 => TopicConfigs.configNamesAndDoc(Kafka_2_4_0).map { case (n, h) => (n,TConfig(n,None, Option(h))) }
}
val updatedConfigMap = ti.config.toMap
val updatedConfigList = defaultConfigs.map {
Expand Down
2 changes: 1 addition & 1 deletion app/kafka/manager/actor/cluster/KafkaStateActor.scala
Original file line number Diff line number Diff line change
Expand Up @@ -176,7 +176,7 @@ class KafkaAdminClient(context: => ActorContext, adminClientActorPath: ActorPath


object KafkaManagedOffsetCache {
val supportedVersions: Set[KafkaVersion] = Set(Kafka_0_8_2_0, Kafka_0_8_2_1, Kafka_0_8_2_2, Kafka_0_9_0_0, Kafka_0_9_0_1, Kafka_0_10_0_0, Kafka_0_10_0_1, Kafka_0_10_1_0, Kafka_0_10_1_1, Kafka_0_10_2_0, Kafka_0_10_2_1, Kafka_0_11_0_0, Kafka_0_11_0_2, Kafka_1_0_0, Kafka_1_0_1, Kafka_1_1_0, Kafka_1_1_1, Kafka_2_0_0, Kafka_2_1_0, Kafka_2_1_1, Kafka_2_2_0)
val supportedVersions: Set[KafkaVersion] = Set(Kafka_0_8_2_0, Kafka_0_8_2_1, Kafka_0_8_2_2, Kafka_0_9_0_0, Kafka_0_9_0_1, Kafka_0_10_0_0, Kafka_0_10_0_1, Kafka_0_10_1_0, Kafka_0_10_1_1, Kafka_0_10_2_0, Kafka_0_10_2_1, Kafka_0_11_0_0, Kafka_0_11_0_2, Kafka_1_0_0, Kafka_1_0_1, Kafka_1_1_0, Kafka_1_1_1, Kafka_2_0_0, Kafka_2_1_0, Kafka_2_1_1, Kafka_2_2_0, Kafka_2_4_0)
val ConsumerOffsetTopic = "__consumer_offsets"

def isSupported(version: KafkaVersion) : Boolean = {
Expand Down
7 changes: 6 additions & 1 deletion app/kafka/manager/model/model.scala
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,10 @@ case object Kafka_2_2_0 extends KafkaVersion {
override def toString = "2.2.0"
}

case object Kafka_2_4_0 extends KafkaVersion {
override def toString = "2.4.0"
}

object KafkaVersion {
val supportedVersions: Map[String,KafkaVersion] = Map(
"0.8.1.1" -> Kafka_0_8_1_1,
Expand All @@ -124,7 +128,8 @@ object KafkaVersion {
"2.0.0" -> Kafka_2_0_0,
"2.1.0" -> Kafka_2_1_0,
"2.1.1" -> Kafka_2_1_1,
"2.2.0" -> Kafka_2_2_0
"2.2.0" -> Kafka_2_2_0,
"2.4.0" -> Kafka_2_4_0
)

val formSelectList : IndexedSeq[(String,String)] = supportedVersions.toIndexedSeq.filterNot(_._1.contains("beta")).map(t => (t._1,t._2.toString)).sortWith((a, b) => sortVersion(a._1, b._1))
Expand Down
3 changes: 2 additions & 1 deletion app/kafka/manager/utils/LogkafkaNewConfigs.scala
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,8 @@ object LogkafkaNewConfigs {
Kafka_2_0_0 -> logkafka82.LogConfig,
Kafka_2_1_0 -> logkafka82.LogConfig,
Kafka_2_1_1 -> logkafka82.LogConfig,
Kafka_2_2_0 -> logkafka82.LogConfig
Kafka_2_2_0 -> logkafka82.LogConfig,
Kafka_2_4_0 -> logkafka82.LogConfig
)

def configNames(version: KafkaVersion) : Set[String] = {
Expand Down
3 changes: 2 additions & 1 deletion app/kafka/manager/utils/TopicConfigs.scala
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,8 @@ object TopicConfigs {
Kafka_2_0_0 -> two00.LogConfig,
Kafka_2_1_0 -> two00.LogConfig,
Kafka_2_1_1 -> two00.LogConfig,
Kafka_2_2_0 -> two00.LogConfig
Kafka_2_2_0 -> two00.LogConfig,
Kafka_2_4_0 -> two00.LogConfig
)

def configNames(version: KafkaVersion): Seq[String] = {
Expand Down
2 changes: 1 addition & 1 deletion test/controller/api/TestKafkaStateCheck.scala
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ class TestKafkaStateCheck extends CuratorAwareTest with KafkaServerInTest with M

private[this] def createCluster() = {
val future = kafkaManagerContext.get.getKafkaManager.addCluster(
testClusterName, "2.2.0", kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(kafkaManagerContext.get.getKafkaManager.defaultTuning), securityProtocol = "PLAINTEXT", saslMechanism = None, jaasConfig = None
testClusterName, "2.4.0", kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(kafkaManagerContext.get.getKafkaManager.defaultTuning), securityProtocol = "PLAINTEXT", saslMechanism = None, jaasConfig = None
)
val result = Await.result(future, duration)
result.toEither.left.foreach(apiError => sys.error(apiError.msg))
Expand Down
6 changes: 3 additions & 3 deletions test/kafka/manager/TestKafkaManager.scala
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ class TestKafkaManager extends CuratorAwareTest with BaseTest {
}

test("add cluster") {
val future = kafkaManager.addCluster("dev","2.2.0",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(kafkaManager.defaultTuning), securityProtocol="PLAINTEXT", saslMechanism = None, jaasConfig = None)
val future = kafkaManager.addCluster("dev","2.4.0",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(kafkaManager.defaultTuning), securityProtocol="PLAINTEXT", saslMechanism = None, jaasConfig = None)
val result = Await.result(future,duration)
assert(result.isRight === true)
Thread.sleep(2000)
Expand Down Expand Up @@ -392,7 +392,7 @@ class TestKafkaManager extends CuratorAwareTest with BaseTest {
}

test("update cluster zkhost") {
val future = kafkaManager.updateCluster("dev","2.2.0",testServer.getConnectString, jmxEnabled = false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxSsl = false, jmxPass = None, tuning = Option(defaultTuning), securityProtocol = "PLAINTEXT", saslMechanism = None, jaasConfig = None)
val future = kafkaManager.updateCluster("dev","2.4.0",testServer.getConnectString, jmxEnabled = false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxSsl = false, jmxPass = None, tuning = Option(defaultTuning), securityProtocol = "PLAINTEXT", saslMechanism = None, jaasConfig = None)
val result = Await.result(future,duration)
assert(result.isRight === true)

Expand Down Expand Up @@ -449,7 +449,7 @@ class TestKafkaManager extends CuratorAwareTest with BaseTest {
}

test("update cluster logkafka enabled and activeOffsetCache enabled") {
val future = kafkaManager.updateCluster("dev","2.2.0",testServer.getConnectString, jmxEnabled = false, pollConsumers = true, filterConsumers = true, logkafkaEnabled = true, activeOffsetCacheEnabled = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(defaultTuning), securityProtocol = "PLAINTEXT", saslMechanism = None, jaasConfig = None)
val future = kafkaManager.updateCluster("dev","2.4.0",testServer.getConnectString, jmxEnabled = false, pollConsumers = true, filterConsumers = true, logkafkaEnabled = true, activeOffsetCacheEnabled = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(defaultTuning), securityProtocol = "PLAINTEXT", saslMechanism = None, jaasConfig = None)
val result = Await.result(future,duration)
assert(result.isRight === true)

Expand Down
14 changes: 7 additions & 7 deletions test/kafka/manager/TestKafkaManagerActor.scala
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ class TestKafkaManagerActor extends CuratorAwareTest with BaseTest {
}

test("add cluster") {
val cc = ClusterConfig("dev","2.2.0",testServer.getConnectString, jmxEnabled = false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(defaultTuning), securityProtocol="PLAINTEXT", saslMechanism = None, jaasConfig = None)
val cc = ClusterConfig("dev","2.4.0",testServer.getConnectString, jmxEnabled = false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(defaultTuning), securityProtocol="PLAINTEXT", saslMechanism = None, jaasConfig = None)
withKafkaManagerActor(KMAddCluster(cc)) { result: KMCommandResult =>
result.result.get
Thread.sleep(1000)
Expand All @@ -80,7 +80,7 @@ class TestKafkaManagerActor extends CuratorAwareTest with BaseTest {
}

test("update cluster zkhost") {
val cc2 = ClusterConfig("dev","2.2.0",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(defaultTuning), securityProtocol="PLAINTEXT", saslMechanism = None, jaasConfig = None)
val cc2 = ClusterConfig("dev","2.4.0",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(defaultTuning), securityProtocol="PLAINTEXT", saslMechanism = None, jaasConfig = None)
withKafkaManagerActor(KMUpdateCluster(cc2)) { result: KMCommandResult =>
result.result.get
Thread.sleep(3000)
Expand Down Expand Up @@ -112,7 +112,7 @@ class TestKafkaManagerActor extends CuratorAwareTest with BaseTest {
}

test("update cluster version") {
val cc2 = ClusterConfig("dev","2.2.0",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(defaultTuning), securityProtocol="PLAINTEXT", saslMechanism = None, jaasConfig = None)
val cc2 = ClusterConfig("dev","2.4.0",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(defaultTuning), securityProtocol="PLAINTEXT", saslMechanism = None, jaasConfig = None)
withKafkaManagerActor(KMUpdateCluster(cc2)) { result: KMCommandResult =>
result.result.get
Thread.sleep(3000)
Expand All @@ -139,7 +139,7 @@ class TestKafkaManagerActor extends CuratorAwareTest with BaseTest {
println(result)
result.msg.contains("dev")
}
val cc2 = ClusterConfig("dev","2.2.0",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(defaultTuning), securityProtocol="PLAINTEXT", saslMechanism = None, jaasConfig = None)
val cc2 = ClusterConfig("dev","2.4.0",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(defaultTuning), securityProtocol="PLAINTEXT", saslMechanism = None, jaasConfig = None)
withKafkaManagerActor(KMAddCluster(cc2)) { result: KMCommandResult =>
result.result.get
Thread.sleep(1000)
Expand All @@ -156,7 +156,7 @@ class TestKafkaManagerActor extends CuratorAwareTest with BaseTest {
}

test("update cluster logkafka enabled") {
val cc2 = ClusterConfig("dev","2.2.0",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, logkafkaEnabled = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(defaultTuning), securityProtocol="PLAINTEXT", saslMechanism = None, jaasConfig = None)
val cc2 = ClusterConfig("dev","2.4.0",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, logkafkaEnabled = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(defaultTuning), securityProtocol="PLAINTEXT", saslMechanism = None, jaasConfig = None)
withKafkaManagerActor(KMUpdateCluster(cc2)) { result: KMCommandResult =>
result.result.get
Thread.sleep(3000)
Expand All @@ -168,7 +168,7 @@ class TestKafkaManagerActor extends CuratorAwareTest with BaseTest {

test("update cluster tuning") {
val newTuning = getClusterTuning(3, 101, 11, 10000, 10000, 1)
val cc2 = ClusterConfig("dev","2.2.0",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, logkafkaEnabled = true, jmxUser = None, jmxPass = None, jmxSsl = false,
val cc2 = ClusterConfig("dev","2.4.0",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, logkafkaEnabled = true, jmxUser = None, jmxPass = None, jmxSsl = false,
tuning = Option(newTuning), securityProtocol="PLAINTEXT", saslMechanism = None, jaasConfig = None
)
withKafkaManagerActor(KMUpdateCluster(cc2)) { result: KMCommandResult =>
Expand All @@ -185,7 +185,7 @@ class TestKafkaManagerActor extends CuratorAwareTest with BaseTest {
}

test("update cluster security protocol") {
val cc2 = ClusterConfig("dev","2.2.0",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, logkafkaEnabled = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(defaultTuning), securityProtocol="SASL_PLAINTEXT", saslMechanism = Option("PLAIN"), jaasConfig = Option("blah"))
val cc2 = ClusterConfig("dev","2.4.0",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, logkafkaEnabled = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(defaultTuning), securityProtocol="SASL_PLAINTEXT", saslMechanism = Option("PLAIN"), jaasConfig = Option("blah"))
withKafkaManagerActor(KMUpdateCluster(cc2)) { result: KMCommandResult =>
result.result.get
Thread.sleep(3000)
Expand Down
6 changes: 4 additions & 2 deletions test/kafka/manager/model/KafkaVersionTest.scala
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,8 @@ class KafkaVersionTest extends FunSuite {
"2.0.0" -> Kafka_2_0_0,
"2.1.0" -> Kafka_2_1_0,
"2.1.1" -> Kafka_2_1_1,
"2.2.0" -> Kafka_2_2_0
"2.2.0" -> Kafka_2_2_0,
"2.4.0" -> Kafka_2_4_0
)

test("apply method: supported version.") {
Expand Down Expand Up @@ -75,7 +76,8 @@ class KafkaVersionTest extends FunSuite {
("2.0.0","2.0.0"),
("2.1.0","2.1.0"),
("2.1.1","2.1.1"),
("2.2.0","2.2.0")
("2.2.0","2.2.0"),
("2.4.0","2.4.0")
)
assertResult(expected)(KafkaVersion.formSelectList)
}
Expand Down
8 changes: 8 additions & 0 deletions test/kafka/manager/utils/TestClusterConfig.scala
Original file line number Diff line number Diff line change
Expand Up @@ -229,4 +229,12 @@ class TestClusterConfig extends FunSuite with Matchers {
assert(cc == deserialize.get)
}

test("serialize and deserialize 2.4.0") {
val cc = ClusterConfig("qa", "2.4.0", "localhost:2181", jmxEnabled = false, pollConsumers = true, filterConsumers = true, activeOffsetCacheEnabled = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = None, securityProtocol = "SASL_PLAINTEXT", saslMechanism = Option("PLAIN"), jaasConfig = Option("blah"))
val serialize: String = ClusterConfig.serialize(cc)
val deserialize = ClusterConfig.deserialize(serialize)
assert(deserialize.isSuccess === true)
assert(cc == deserialize.get)
}

}

0 comments on commit f2175fc

Please sign in to comment.