Skip to content

Commit c2139df

Browse files
committed
minor clean up, optimize imports, disable inventory mysql test for now
1 parent 790aa7d commit c2139df

File tree

35 files changed

+96
-237
lines changed

35 files changed

+96
-237
lines changed

.idea/inspectionProfiles/ktlint.xml

+2
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

buildSrc/build.gradle.kts

-2
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,3 @@
1-
import org.gradle.kotlin.dsl.`kotlin-dsl`
2-
31
plugins {
42
`kotlin-dsl`
53
id("org.jetbrains.kotlin.jvm").version("1.4.20")

floodplain-direct/src/main/java/org/apache/kafka/streams/TestOutputTopic.java

+1-6
Original file line numberDiff line numberDiff line change
@@ -19,12 +19,7 @@
1919
import org.apache.kafka.common.serialization.Deserializer;
2020
import org.apache.kafka.streams.test.TestRecord;
2121

22-
import java.util.HashMap;
23-
import java.util.LinkedList;
24-
import java.util.List;
25-
import java.util.Map;
26-
import java.util.Objects;
27-
import java.util.StringJoiner;
22+
import java.util.*;
2823

2924
/**
3025
* {@code TestOutputTopic} is used to read records from a topic in {@link TopologyTestDriver}.

floodplain-direct/src/main/java/org/apache/kafka/streams/TopologyTestDriver.java

+6-54
Original file line numberDiff line numberDiff line change
@@ -16,12 +16,7 @@
1616
*/
1717
package org.apache.kafka.streams;
1818

19-
import org.apache.kafka.clients.consumer.Consumer;
20-
import org.apache.kafka.clients.consumer.ConsumerGroupMetadata;
21-
import org.apache.kafka.clients.consumer.ConsumerRecord;
22-
import org.apache.kafka.clients.consumer.MockConsumer;
23-
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
24-
import org.apache.kafka.clients.consumer.OffsetResetStrategy;
19+
import org.apache.kafka.clients.consumer.*;
2520
import org.apache.kafka.clients.producer.MockProducer;
2621
import org.apache.kafka.clients.producer.Producer;
2722
import org.apache.kafka.clients.producer.ProducerRecord;
@@ -44,41 +39,11 @@
4439
import org.apache.kafka.streams.errors.TopologyException;
4540
import org.apache.kafka.streams.internals.KeyValueStoreFacade;
4641
import org.apache.kafka.streams.internals.WindowStoreFacade;
47-
import org.apache.kafka.streams.processor.ProcessorContext;
48-
import org.apache.kafka.streams.processor.PunctuationType;
49-
import org.apache.kafka.streams.processor.Punctuator;
50-
import org.apache.kafka.streams.processor.StateRestoreListener;
51-
import org.apache.kafka.streams.processor.StateStore;
52-
import org.apache.kafka.streams.processor.TaskId;
53-
import org.apache.kafka.streams.processor.internals.ChangelogRegister;
54-
import org.apache.kafka.streams.processor.internals.ClientUtils;
55-
import org.apache.kafka.streams.processor.internals.GlobalProcessorContextImpl;
56-
import org.apache.kafka.streams.processor.internals.GlobalStateManager;
57-
import org.apache.kafka.streams.processor.internals.GlobalStateManagerImpl;
58-
import org.apache.kafka.streams.processor.internals.GlobalStateUpdateTask;
59-
import org.apache.kafka.streams.processor.internals.InternalProcessorContext;
60-
import org.apache.kafka.streams.processor.internals.InternalTopologyBuilder;
61-
import org.apache.kafka.streams.processor.internals.ProcessorContextImpl;
62-
import org.apache.kafka.streams.processor.internals.ProcessorStateManager;
63-
import org.apache.kafka.streams.processor.internals.ProcessorTopology;
64-
import org.apache.kafka.streams.processor.internals.RecordCollector;
65-
import org.apache.kafka.streams.processor.internals.RecordCollectorImpl;
66-
import org.apache.kafka.streams.processor.internals.StateDirectory;
67-
import org.apache.kafka.streams.processor.internals.StreamTask;
68-
import org.apache.kafka.streams.processor.internals.StreamThread;
69-
import org.apache.kafka.streams.processor.internals.Task;
70-
import org.apache.kafka.streams.processor.internals.TestDriverProducer;
42+
import org.apache.kafka.streams.processor.*;
43+
import org.apache.kafka.streams.processor.internals.*;
7144
import org.apache.kafka.streams.processor.internals.metrics.StreamsMetricsImpl;
7245
import org.apache.kafka.streams.processor.internals.metrics.TaskMetrics;
73-
import org.apache.kafka.streams.state.KeyValueStore;
74-
import org.apache.kafka.streams.state.ReadOnlyKeyValueStore;
75-
import org.apache.kafka.streams.state.ReadOnlySessionStore;
76-
import org.apache.kafka.streams.state.ReadOnlyWindowStore;
77-
import org.apache.kafka.streams.state.SessionStore;
78-
import org.apache.kafka.streams.state.TimestampedKeyValueStore;
79-
import org.apache.kafka.streams.state.TimestampedWindowStore;
80-
import org.apache.kafka.streams.state.ValueAndTimestamp;
81-
import org.apache.kafka.streams.state.WindowStore;
46+
import org.apache.kafka.streams.state.*;
8247
import org.apache.kafka.streams.state.internals.ThreadCache;
8348
import org.apache.kafka.streams.test.TestRecord;
8449
import org.slf4j.Logger;
@@ -88,26 +53,13 @@
8853
import java.io.IOException;
8954
import java.time.Duration;
9055
import java.time.Instant;
91-
import java.util.Collection;
92-
import java.util.Collections;
93-
import java.util.HashMap;
94-
import java.util.HashSet;
95-
import java.util.LinkedList;
96-
import java.util.List;
97-
import java.util.Map;
98-
import java.util.NoSuchElementException;
99-
import java.util.Objects;
100-
import java.util.Properties;
101-
import java.util.Queue;
102-
import java.util.Set;
56+
import java.util.*;
10357
import java.util.concurrent.TimeUnit;
10458
import java.util.concurrent.atomic.AtomicLong;
10559
import java.util.function.Supplier;
10660
import java.util.regex.Pattern;
10761

108-
import static org.apache.kafka.streams.processor.internals.StreamThread.ProcessingMode.AT_LEAST_ONCE;
109-
import static org.apache.kafka.streams.processor.internals.StreamThread.ProcessingMode.EXACTLY_ONCE_ALPHA;
110-
import static org.apache.kafka.streams.processor.internals.StreamThread.ProcessingMode.EXACTLY_ONCE_BETA;
62+
import static org.apache.kafka.streams.processor.internals.StreamThread.ProcessingMode.*;
11163

11264
/**
11365
* This class makes it easier to write tests to verify the behavior of topologies created with {@link Topology} or

floodplain-direct/src/main/java/org/apache/kafka/streams/processor/MockProcessorContext.java

+2-10
Original file line numberDiff line numberDiff line change
@@ -22,11 +22,7 @@
2222
import org.apache.kafka.common.metrics.Sensor;
2323
import org.apache.kafka.common.serialization.Serde;
2424
import org.apache.kafka.common.utils.Time;
25-
import org.apache.kafka.streams.KeyValue;
26-
import org.apache.kafka.streams.StreamsConfig;
27-
import org.apache.kafka.streams.StreamsMetrics;
28-
import org.apache.kafka.streams.Topology;
29-
import org.apache.kafka.streams.TopologyTestDriver;
25+
import org.apache.kafka.streams.*;
3026
import org.apache.kafka.streams.internals.ApiUtils;
3127
import org.apache.kafka.streams.kstream.Transformer;
3228
import org.apache.kafka.streams.kstream.ValueTransformer;
@@ -38,11 +34,7 @@
3834

3935
import java.io.File;
4036
import java.time.Duration;
41-
import java.util.HashMap;
42-
import java.util.LinkedList;
43-
import java.util.List;
44-
import java.util.Map;
45-
import java.util.Properties;
37+
import java.util.*;
4638

4739
/**
4840
* {@link MockProcessorContext} is a mock of {@link ProcessorContext} for users to test their {@link Processor},

floodplain-dsl/src/main/java/io/floodplain/sink/LogSinkConnector.java

+4-1
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,10 @@
2424
import org.slf4j.Logger;
2525
import org.slf4j.LoggerFactory;
2626

27-
import java.util.*;
27+
import java.util.Collections;
28+
import java.util.HashMap;
29+
import java.util.List;
30+
import java.util.Map;
2831

2932
public class LogSinkConnector extends SinkConnector {
3033

floodplain-dsl/src/main/java/io/floodplain/sink/LogSinkTask.java

+3-1
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,9 @@
2222
import org.apache.kafka.connect.sink.SinkTask;
2323
import org.slf4j.Logger;
2424
import org.slf4j.LoggerFactory;
25-
import java.util.*;
25+
26+
import java.util.Collection;
27+
import java.util.Map;
2628

2729
public class LogSinkTask extends SinkTask {
2830

floodplain-dsl/src/main/kotlin/io/floodplain/kotlindsl/Floodplain.kt

-1
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,6 @@ import io.floodplain.streams.remotejoin.TopologyConstructor
4646
import org.apache.kafka.connect.sink.SinkConnector
4747
import org.apache.kafka.connect.sink.SinkRecord
4848
import org.apache.kafka.connect.sink.SinkTask
49-
import java.lang.RuntimeException
5049
import java.time.Duration
5150
import java.util.Optional
5251
import java.util.concurrent.atomic.AtomicLong

floodplain-dsl/src/main/kotlin/io/floodplain/kotlindsl/PostgresSource.kt

-1
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,6 @@ import io.floodplain.streams.api.TopologyContext
2525
import io.floodplain.streams.remotejoin.TopologyConstructor
2626
import kotlinx.coroutines.flow.Flow
2727
import kotlinx.coroutines.flow.collect
28-
import java.lang.IllegalArgumentException
2928

3029
private val logger = mu.KotlinLogging.logger {}
3130

floodplain-dsl/src/main/kotlin/io/floodplain/kotlindsl/Stream.kt

+33-73
Original file line numberDiff line numberDiff line change
@@ -29,17 +29,8 @@ import kotlinx.coroutines.runBlocking
2929
import org.apache.kafka.clients.consumer.ConsumerConfig
3030
import org.apache.kafka.clients.producer.ProducerConfig
3131
import org.apache.kafka.common.serialization.Serdes
32-
import org.apache.kafka.streams.KafkaStreams
33-
import org.apache.kafka.streams.StreamsConfig
34-
import org.apache.kafka.streams.Topology
35-
import org.apache.kafka.streams.processor.WallclockTimestampExtractor
36-
import java.io.InputStream
37-
import java.lang.StringBuilder
38-
import java.net.URL
39-
import java.util.Properties
40-
import java.util.Stack
41-
import java.util.UUID
4232
import org.apache.kafka.common.utils.Time
33+
import org.apache.kafka.common.utils.Utils
4334
import org.apache.kafka.connect.connector.policy.ConnectorClientConfigOverridePolicy
4435
import org.apache.kafka.connect.runtime.Connect
4536
import org.apache.kafka.connect.runtime.Herder
@@ -51,10 +42,16 @@ import org.apache.kafka.connect.runtime.standalone.StandaloneConfig
5142
import org.apache.kafka.connect.runtime.standalone.StandaloneHerder
5243
import org.apache.kafka.connect.storage.FileOffsetBackingStore
5344
import org.apache.kafka.connect.util.ConnectUtils
45+
import org.apache.kafka.streams.KafkaStreams
46+
import org.apache.kafka.streams.StreamsConfig
47+
import org.apache.kafka.streams.Topology
48+
import org.apache.kafka.streams.processor.WallclockTimestampExtractor
49+
import java.io.InputStream
5450
import java.net.URI
55-
56-
57-
51+
import java.net.URL
52+
import java.util.Properties
53+
import java.util.Stack
54+
import java.util.UUID
5855

5956
private val logger = mu.KotlinLogging.logger {}
6057

@@ -169,9 +166,7 @@ class Stream(override val topologyContext: TopologyContext, val topologyConstruc
169166
fun renderAndSchedule(connectorURL: URL?, settings: InputStream, force: Boolean = false): KafkaStreams {
170167
val prop = Properties()
171168
prop.load(settings)
172-
val propMap = mutableMapOf<String, String>()
173-
prop.forEach { (k, v) -> propMap.put(k as String, v as String) }
174-
return renderAndSchedule(connectorURL, prop[StreamsConfig.BOOTSTRAP_SERVERS_CONFIG] as String, force, propMap)
169+
return renderAndSchedule(connectorURL, prop[StreamsConfig.BOOTSTRAP_SERVERS_CONFIG] as String, force, Utils.propsToStringMap(prop))
175170
}
176171

177172
fun renderAndSchedule(connectorURL: URL?, kafkaHosts: String, kafkaUsername: String, kafkaPassword: String, replicationFactor: Int, force: Boolean = false): KafkaStreams {
@@ -186,41 +181,6 @@ class Stream(override val topologyContext: TopologyContext, val topologyConstruc
186181
return renderAndSchedule(connectorURL, kafkaHosts, force, properties)
187182
}
188183

189-
190-
/**
191-
* Will create an executable definition of the str
192-
* eam (@see render), then will start the topology by starting a streams
193-
* instance pointing at the kafka cluster at kafkaHosts, using the supplied clientId.
194-
* Finally, it will POST the supplied
195-
*/
196-
fun renderAndRun(kafkaHosts: String, replicationFactor: Int, extraSettings: Map<String, Any>? = null, kafkaUsername: String? = null, kafkaPassword: String? = null): KafkaStreams {
197-
val (topology, materializedConnectors) = renderLocal()
198-
val properties = mutableMapOf<String,Any>(
199-
StreamsConfig.BOOTSTRAP_SERVERS_CONFIG to kafkaHosts,
200-
"acks" to "all",
201-
StreamsConfig.REPLICATION_FACTOR_CONFIG to replicationFactor
202-
)
203-
if(kafkaUsername!=null && kafkaPassword!=null) {
204-
properties["security.protocol"] = "SASL_SSL"
205-
properties["sasl.jaas.config"] = "\"org.apache.kafka.common.security.plain.PlainLoginModule required username='$kafkaUsername' password='$kafkaPassword';\""
206-
properties["sasl.mechanism"] = "PLAIN"
207-
}
208-
209-
topologyConstructor.createTopicsAsNeeded(properties)
210-
211-
val appId = topologyContext.applicationId()
212-
val extra: MutableMap<String, Any> = mutableMapOf()
213-
if (extraSettings != null) {
214-
extra.putAll(extraSettings)
215-
}
216-
val streams = runTopology(topology, appId, kafkaHosts, "storagePath", extra)
217-
materializedConnectors.forEach {
218-
logger.info("Starting connector named: ${it.name} to settings: ${it.settings}")
219-
}
220-
logger.info { "Topology running!" }
221-
return streams
222-
}
223-
224184
/**
225185
* Will create an executable definition of the str
226186
* eam (@see render), then will start the topology by starting a streams
@@ -241,41 +201,45 @@ class Stream(override val topologyContext: TopologyContext, val topologyConstruc
241201
startConstructor(name, topologyContext, it, json, force)
242202
}
243203
}
204+
instantiateLocalSinks(settings)
205+
val appId = topologyContext.topicName("@applicationId")
206+
val extra: MutableMap<String, Any> = mutableMapOf()
207+
extra.putAll(settings)
208+
val streams = runTopology(topology, appId, kafkaHosts, "storagePath", extra)
209+
logger.info { "Topology running!" }
210+
runBlocking {
211+
monitor?.invoke(this@Stream,streams)
212+
}
213+
return streams
214+
}
215+
216+
private fun instantiateLocalSinks(settings: Map<String, String>) {
244217
var herder: Herder? = null
245-
if(localSinkConfigurations.isNotEmpty()) {
218+
if (localSinkConfigurations.isNotEmpty()) {
246219
herder = startLocalConnect(settings)
247220
}
248221
var count = 0
249222
localSinkConfigurations.flatMap {
250223
it.instantiateSinkElements()
251224
}.forEach {
252-
val localSettings = mutableMapOf<String,String>()
225+
val localSettings = mutableMapOf<String, String>()
253226
localSettings.putAll(it)
254227
val name = "conn-${count++}"
255228
localSettings["name"] = name
256-
herder?.putConnectorConfig(name,localSettings,true) { err,created ->
257-
if(err!=null) {
258-
logger.error("Error creating connector:",err)
229+
herder?.putConnectorConfig(name, localSettings, true) { err, created ->
230+
if (err != null) {
231+
logger.error("Error creating connector:", err)
259232
}
260-
logger.info("something happened")
233+
logger.info("Instantiated: ${created?.created()} result: ${created?.result()}")
261234
}
262235
}
263-
val appId = topologyContext.topicName("@applicationId")
264-
val extra: MutableMap<String, Any> = mutableMapOf()
265-
extra.putAll(settings)
266-
val streams = runTopology(topology, appId, kafkaHosts, "storagePath", extra)
267-
logger.info { "Topology running!" }
268-
runBlocking {
269-
monitor?.invoke(this@Stream,streams)
270-
}
271-
return streams
272236
}
273237

274238
private fun startLocalConnect(initialWorkerProps: Map<String, String>): Herder {
275239
val workerProps = mutableMapOf<String,String>()
276240
workerProps.putAll(initialWorkerProps)
277-
workerProps.put("key.converter","org.apache.kafka.connect.json.JsonConverter")
278-
workerProps.put("value.converter","org.apache.kafka.connect.json.JsonConverter")
241+
workerProps["key.converter"] = "org.apache.kafka.connect.json.JsonConverter"
242+
workerProps["value.converter"] = "org.apache.kafka.connect.json.JsonConverter"
279243
workerProps["offset.storage.file.filename"] = "offset"
280244
val plugins = Plugins(workerProps)
281245
val config = StandaloneConfig(workerProps)
@@ -290,7 +254,7 @@ class Stream(override val topologyContext: TopologyContext, val topologyConstruc
290254
val rest = RestServer(config)
291255
rest.initializeServer()
292256
val advertisedUrl: URI = rest.advertisedUrl()
293-
val workerId: String = advertisedUrl.getHost().toString() + ":" + advertisedUrl.getPort()
257+
val workerId: String = advertisedUrl.host.toString() + ":" + advertisedUrl.port
294258

295259
val worker = Worker(
296260
workerId, time, plugins, config, FileOffsetBackingStore(),
@@ -384,7 +348,6 @@ class Stream(override val topologyContext: TopologyContext, val topologyConstruc
384348
// Give the Streams application a unique name. The name must be unique in the Kafka cluster
385349
// against which the application is run.
386350
streamsConfiguration.putAll(extra)
387-
// logger.info("Starting instance in storagePath: {}", storagePath)
388351
streamsConfiguration.putIfAbsent(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().javaClass)
389352
streamsConfiguration.putIfAbsent(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().javaClass)
390353
streamsConfiguration.putIfAbsent(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, StreamOperators.replicationSerde.javaClass)
@@ -395,10 +358,8 @@ class Stream(override val topologyContext: TopologyContext, val topologyConstruc
395358
streamsConfiguration.putIfAbsent(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG, 7200000)
396359
streamsConfiguration.putIfAbsent(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 100)
397360
streamsConfiguration.putIfAbsent(ProducerConfig.COMPRESSION_TYPE_CONFIG, "lz4")
398-
// streamsConfiguration.putIfAbsent(StreamsConfig.STATE_DIR_CONFIG,storagePath)
399361
streamsConfiguration.putIfAbsent(StreamsConfig.NUM_STREAM_THREADS_CONFIG, 1)
400362
streamsConfiguration.putIfAbsent(StreamsConfig.NUM_STANDBY_REPLICAS_CONFIG, 0)
401-
streamsConfiguration.putIfAbsent(StreamsConfig.RETRIES_CONFIG, 50)
402363
streamsConfiguration.putIfAbsent(StreamsConfig.REPLICATION_FACTOR_CONFIG, CoreOperators.topicReplicationCount())
403364
streamsConfiguration.putIfAbsent(StreamsConfig.DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG, WallclockTimestampExtractor::class.java)
404365

@@ -423,7 +384,6 @@ class Stream(override val topologyContext: TopologyContext, val topologyConstruc
423384
runBlocking {
424385
io.floodplain.runtime.run(this@Stream, args, { after(it) }, { _, topologyContext -> after(topologyContext) })
425386
}
426-
// io.floodplain.runtime.runWithArguments(this@Stream, arrayOf(*args), { after(it) }, { after() })
427387
}
428388

429389
override val rootTopology: Stream

floodplain-dsl/src/main/kotlin/runtime/Runtime.kt

-1
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,6 @@ import io.floodplain.kotlindsl.LocalContext
2424
import io.floodplain.kotlindsl.Stream
2525
import io.floodplain.streams.api.TopologyContext
2626
import java.io.OutputStreamWriter
27-
import java.lang.RuntimeException
2827
import java.net.URL
2928
import java.nio.charset.StandardCharsets
3029

floodplain-dsl/src/test/kotlin/io/floodplain/kotlindsl/TestTopology.kt

-1
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,6 @@ import io.floodplain.streams.api.Topic
2727
import io.floodplain.streams.remotejoin.StoreStateProcessor
2828
import kotlinx.coroutines.delay
2929
import org.apache.kafka.streams.state.KeyValueStore
30-
import java.lang.IllegalArgumentException
3130
import java.math.BigDecimal
3231
import java.time.Duration
3332
import kotlin.test.Test

floodplain-example/src/main/kotlin/io/floodplain/kotlindsl/example/CreatePublicIdTransformer.java

-1
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
package io.floodplain.kotlindsl.example;
22

33
import java.math.BigInteger;
4-
import java.util.Map;
54

65
public class CreatePublicIdTransformer {
76
private static final int MAX_INT = Integer.MAX_VALUE;

0 commit comments

Comments
 (0)