Skip to content

Commit

Permalink
Merge branch '2.0.1' into splunk_connector
Browse files Browse the repository at this point in the history
  • Loading branch information
andmarios committed Jan 2, 2019
2 parents e01bdb7 + 2995703 commit 368a52a
Show file tree
Hide file tree
Showing 8 changed files with 60 additions and 20 deletions.
22 changes: 11 additions & 11 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,14 @@ WORKDIR /
ARG DEVARCH_USER
ARG DEVARCH_PASS
ARG ARCHIVE_SERVER=https://archive.landoop.com
ARG LKD_VERSION=2.0.0
ARG LKD_VERSION=2.0.1

############
# Add kafka/
############

# Add Apache Kafka (includes Connect and Zookeeper)
ARG KAFKA_VERSION=2.0.0
ARG KAFKA_VERSION=2.0.1
ARG KAFKA_LVERSION="${KAFKA_VERSION}-L0"
ARG KAFKA_URL="${ARCHIVE_SERVER}/lkd/packages/kafka/kafka-2.12-${KAFKA_LVERSION}-lkd.tar.gz"

Expand All @@ -35,13 +35,13 @@ RUN wget $DEVARCH_USER $DEVARCH_PASS "$KAFKA_URL" -O /opt/kafka.tar.gz \
&& rm -rf /opt/kafka.tar.gz

# Add Schema Registry and REST Proxy
ARG REGISTRY_VERSION=5.0.0-lkd-r0
ARG REGISTRY_VERSION=5.0.1-lkd-r0
ARG REGISTRY_URL="${ARCHIVE_SERVER}/lkd/packages/schema-registry/schema-registry-${REGISTRY_VERSION}.tar.gz"
RUN wget $DEVARCH_USER $DEVARCH_PASS "$REGISTRY_URL" -O /opt/registry.tar.gz \
&& tar --no-same-owner -xzf /opt/registry.tar.gz -C /opt/ \
&& rm -rf /opt/registry.tar.gz

ARG REST_VERSION=5.0.0-lkd-r0
ARG REST_VERSION=5.0.1-lkd-r0
ARG REST_URL="${ARCHIVE_SERVER}/lkd/packages/rest-proxy/rest-proxy-${REST_VERSION}.tar.gz"
RUN wget $DEVARCH_USER $DEVARCH_PASS "$REST_URL" -O /opt/rest.tar.gz \
&& tar --no-same-owner -xzf /opt/rest.tar.gz -C /opt/ \
Expand All @@ -59,8 +59,8 @@ RUN echo -e 'access.control.allow.methods=GET,POST,PUT,DELETE,OPTIONS\naccess.co
#################

# Add Stream Reactor and needed components
ARG STREAM_REACTOR_VERSION=1.1.0
ARG KAFKA_VERSION_4SR=1.1.0
ARG STREAM_REACTOR_VERSION=1.2.0
ARG KAFKA_VERSION_4SR=2.0.0
ARG STREAM_REACTOR_URL="https://archive.landoop.com/lkd/packages/connectors/stream-reactor/stream-reactor-${STREAM_REACTOR_VERSION}_connect${KAFKA_VERSION_4SR}.tar.gz"
ARG ELASTICSEARCH_2X_VERSION=2.4.6
ARG ACTIVEMQ_VERSION=5.12.3
Expand Down Expand Up @@ -92,7 +92,7 @@ RUN wget $DEVARCH_USER $DEVARCH_PASS "${STREAM_REACTOR_URL}" -O /stream-reactor.
done \
&& rm /calcite-linq4j-${CALCITE_LINQ4J_VERSION}.jar \
&& mkdir -p /opt/landoop/kafka/share/java/landoop-common \
&& for file in $(find /opt/landoop/connectors/stream-reactor -maxdepth 2 -type f -exec basename {} \; | grep -Ev "scala-logging|kafka-connect-common" | sort | uniq -c | grep -E "^\s+21 " | awk '{print $2}' ); do \
&& for file in $(find /opt/landoop/connectors/stream-reactor -maxdepth 2 -type f -exec basename {} \; | grep -Ev "scala-logging|kafka-connect-common|scala-" | sort | uniq -c | grep -E "^\s+22 " | awk '{print $2}' ); do \
cp /opt/landoop/connectors/stream-reactor/kafka-connect-elastic/$file /opt/landoop/kafka/share/java/landoop-common/; \
rm -f /opt/landoop/connectors/stream-reactor/kafka-connect-*/$file; \
done \
Expand All @@ -116,7 +116,7 @@ RUN mkdir -p /opt/landoop/connectors/third-party/kafka-connect-twitter \
&& wget "$TWITTER_CONNECTOR_URL" -P /opt/landoop/connectors/third-party/kafka-connect-twitter

## Kafka Connect JDBC
ARG KAFKA_CONNECT_JDBC_VERSION=5.0.0-lkd-r0
ARG KAFKA_CONNECT_JDBC_VERSION=5.0.1-lkd-r0
ARG KAFKA_CONNECT_JDBC_URL="${ARCHIVE_SERVER}/lkd/packages/connectors/third-party/kafka-connect-jdbc/kafka-connect-jdbc-${KAFKA_CONNECT_JDBC_VERSION}.tar.gz"
RUN wget $DEVARCH_USER $DEVARCH_PASS "$KAFKA_CONNECT_JDBC_URL" \
-O /opt/kafka-connect-jdbc.tar.gz \
Expand All @@ -126,7 +126,7 @@ RUN wget $DEVARCH_USER $DEVARCH_PASS "$KAFKA_CONNECT_JDBC_URL" \
&& rm -rf /opt/kafka-connect-jdbc.tar.gz

## Kafka Connect ELASTICSEARCH
ARG KAFKA_CONNECT_ELASTICSEARCH_VERSION=5.0.0-lkd-r0
ARG KAFKA_CONNECT_ELASTICSEARCH_VERSION=5.0.1-lkd-r0
ARG KAFKA_CONNECT_ELASTICSEARCH_URL="${ARCHIVE_SERVER}/lkd/packages/connectors/third-party/kafka-connect-elasticsearch/kafka-connect-elasticsearch-${KAFKA_CONNECT_ELASTICSEARCH_VERSION}.tar.gz"
RUN wget $DEVARCH_USER $DEVARCH_PASS "$KAFKA_CONNECT_ELASTICSEARCH_URL" \
-O /opt/kafka-connect-elasticsearch.tar.gz \
Expand All @@ -136,7 +136,7 @@ RUN wget $DEVARCH_USER $DEVARCH_PASS "$KAFKA_CONNECT_ELASTICSEARCH_URL" \
&& rm -rf /opt/kafka-connect-elasticsearch.tar.gz

## Kafka Connect HDFS
ARG KAFKA_CONNECT_HDFS_VERSION=5.0.0-lkd-r0
ARG KAFKA_CONNECT_HDFS_VERSION=5.0.1-lkd-r0
ARG KAFKA_CONNECT_HDFS_URL="${ARCHIVE_SERVER}/lkd/packages/connectors/third-party/kafka-connect-hdfs/kafka-connect-hdfs-${KAFKA_CONNECT_HDFS_VERSION}.tar.gz"
RUN wget $DEVARCH_USER $DEVARCH_PASS "$KAFKA_CONNECT_HDFS_URL" \
-O /opt/kafka-connect-hdfs.tar.gz \
Expand All @@ -146,7 +146,7 @@ RUN wget $DEVARCH_USER $DEVARCH_PASS "$KAFKA_CONNECT_HDFS_URL" \
&& rm -rf /opt/kafka-connect-hdfs.tar.gz

# Kafka Connect S3
ARG KAFKA_CONNECT_S3_VERSION=5.0.0-lkd-r0
ARG KAFKA_CONNECT_S3_VERSION=5.0.1-lkd-r0
ARG KAFKA_CONNECT_S3_URL="${ARCHIVE_SERVER}/lkd/packages/connectors/third-party/kafka-connect-s3/kafka-connect-s3-${KAFKA_CONNECT_S3_VERSION}.tar.gz"
RUN wget $DEVARCH_USER $DEVARCH_PASS "$KAFKA_CONNECT_S3_URL" \
-O /opt/kafka-connect-s3.tar.gz \
Expand Down
14 changes: 11 additions & 3 deletions filesystem/usr/local/share/landoop/sample-data/put.sh
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
# shellcheck source=variables.env
source variables.env

GENERATOR_BROKER=${GENERATOR_BROKER:-localhost}

# Create Topics
for key in 0 1 2 3 4 5; do
# Create topic with x partitions and a retention time of 10 years.
Expand All @@ -19,9 +21,11 @@ done

# Insert data with keys
for key in 0 1 4 5; do
unset SCHEMA_REGISTRY_OPTS
unset SCHEMA_REGISTRY_JMX_OPTS
/usr/local/bin/normcat -r 5000 "${DATA[key]}" | \
kafka-avro-console-producer \
--broker-list localhost:${BROKER_PORT} \
--broker-list ${GENERATOR_BROKER}:${BROKER_PORT} \
--topic "${TOPICS[key]}" \
--property parse.key=true \
--property key.schema="$(cat "${KEYS[key]}")" \
Expand All @@ -32,9 +36,11 @@ done
# Insert data without keys
# shellcheck disable=SC2043
for key in 2; do
unset SCHEMA_REGISTRY_OPTS
unset SCHEMA_REGISTRY_JMX_OPTS
/usr/local/bin/normcat -r 5000 "${DATA[key]}" | \
kafka-avro-console-producer \
--broker-list localhost:${BROKER_PORT} \
--broker-list ${GENERATOR_BROKER}:${BROKER_PORT} \
--topic "${TOPICS[key]}" \
--property value.schema="$(cat "${VALUES[key]}")" \
--property schema.registry.url=http://localhost:${REGISTRY_PORT}
Expand All @@ -43,10 +49,12 @@ done
# Insert json data with text keys converted to json keys
# shellcheck disable=SC2043
for key in 3; do
unset KAFKA_OPTS
unset KAFKA_JMX_OPTS
/usr/local/bin/normcat -r 5000 "${DATA[key]}" | \
sed -r -e 's/([A-Z0-9-]*):/{"serial_number":"\1"}#/' | \
kafka-console-producer \
--broker-list localhost:${BROKER_PORT} \
--broker-list ${GENERATOR_BROKER}:${BROKER_PORT} \
--topic "${TOPICS[key]}" \
--property parse.key=true \
--property "key.separator=#"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
# shellcheck source=variables.env
source variables.env

GENERATOR_BROKER=${GENERATOR_BROKER:-localhost}

# Create Topics
# shellcheck disable=SC2043
for key in 0; do
Expand All @@ -22,9 +24,11 @@ done
# Insert data with key
# shellcheck disable=SC2043
for key in 0; do
unset SCHEMA_REGISTRY_OPTS
unset SCHEMA_REGISTRY_JMX_OPTS
/usr/local/bin/normcat -r "${RATES[key]}" -j "${JITTER[key]}" -p "${PERIOD[key]}" -c -v "${DATA[key]}" | \
SCHEMA_REGISTRY_HEAP_OPTS="-Xmx50m" kafka-avro-console-producer \
--broker-list localhost:${BROKER_PORT} \
--broker-list ${GENERATOR_BROKER}:${BROKER_PORT} \
--topic "${TOPICS[key]}" \
--property parse.key=true \
--property key.schema="$(cat "${KEYS[key]}")" \
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
# shellcheck source=variables.env
source variables.env

GENERATOR_BROKER=${GENERATOR_BROKER:-localhost}

# Create Topics
# shellcheck disable=SC2043
for key in 1; do
Expand All @@ -22,9 +24,11 @@ done
# Insert data with key
# shellcheck disable=SC2043
for key in 1; do
unset SCHEMA_REGISTRY_OPTS
unset SCHEMA_REGISTRY_JMX_OPTS
/usr/local/bin/normcat -r "${RATES[key]}" -j "${JITTER[key]}" -p "${PERIOD[key]}" -c -v "${DATA[key]}" | \
SCHEMA_REGISTRY_HEAP_OPTS="-Xmx50m" kafka-avro-console-producer \
--broker-list localhost:${BROKER_PORT} \
--broker-list ${GENERATOR_BROKER}:${BROKER_PORT} \
--topic "${TOPICS[key]}" \
--property parse.key=true \
--property key.schema="$(cat "${KEYS[key]}")" \
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
# shellcheck source=variables.env
source variables.env

GENERATOR_BROKER=${GENERATOR_BROKER:-localhost}

# Create Topics
# shellcheck disable=SC2043
for key in 3; do
Expand All @@ -22,10 +24,12 @@ done
# Insert data with text key converted to json key
# shellcheck disable=SC2043
for key in 3; do
unset KAFKA_OPTS
unset KAFKA_JMX_OPTS
/usr/local/bin/normcat -r "${RATES[key]}" -j "${JITTER[key]}" -p "${PERIOD[key]}" -c -v "${DATA[key]}" | \
sed -r -e 's/([A-Z0-9-]*):/{"serial_number":"\1"}#/' | \
KAFKA_HEAP_OPTS="-Xmx50m" kafka-console-producer \
--broker-list localhost:${BROKER_PORT} \
--broker-list ${GENERATOR_BROKER}:${BROKER_PORT} \
--topic "${TOPICS[key]}" \
--property parse.key=true \
--property "key.separator=#"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
# shellcheck source=variables.env
source variables.env

GENERATOR_BROKER=${GENERATOR_BROKER:-localhost}

# Create Topics
# shellcheck disable=SC2043
for key in 2; do
Expand All @@ -22,9 +24,11 @@ done
# Insert data without key
# shellcheck disable=SC2043
for key in 2; do
unset SCHEMA_REGISTRY_OPTS
unset SCHEMA_REGISTRY_JMX_OPTS
/usr/local/bin/normcat -r "${RATES[key]}" -j "${JITTER[key]}" -p "${PERIOD[key]}" -c -v "${DATA[key]}" | \
SCHEMA_REGISTRY_HEAP_OPTS="-Xmx50m" kafka-avro-console-producer \
--broker-list localhost:${BROKER_PORT} \
--broker-list ${GENERATOR_BROKER}:${BROKER_PORT} \
--topic "${TOPICS[key]}" \
--property value.schema="$(cat "${VALUES[key]}")" \
--property schema.registry.url=http://localhost:${REGISTRY_PORT}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
# shellcheck source=variables.env
source variables.env

GENERATOR_BROKER=${GENERATOR_BROKER:-localhost}

# Create Topics
# shellcheck disable=SC2043
for key in 4 5; do
Expand All @@ -23,9 +25,11 @@ done
# Insert Grid Data
# shellcheck disable=SC2043
for key in 5; do
unset SCHEMA_REGISTRY_OPTS
unset SCHEMA_REGISTRY_JMX_OPTS
/usr/local/bin/normcat -v "${DATA[key]}" | \
SCHEMA_REGISTRY_HEAP_OPTS="-Xmx50m" kafka-avro-console-producer \
--broker-list localhost:${BROKER_PORT} \
--broker-list ${GENERATOR_BROKER}:${BROKER_PORT} \
--topic "${TOPICS[key]}" \
--property parse.key=true \
--property key.schema="$(cat "${KEYS[key]}")" \
Expand All @@ -36,9 +40,11 @@ done
# Insert data with key
# shellcheck disable=SC2043
for key in 4; do
unset SCHEMA_REGISTRY_OPTS
unset SCHEMA_REGISTRY_JMX_OPTS
/usr/local/bin/normcat -r "${RATES[key]}" -j "${JITTER[key]}" -p "${PERIOD[key]}" -c -v "${DATA[key]}" | \
SCHEMA_REGISTRY_HEAP_OPTS="-Xmx50m" kafka-avro-console-producer \
--broker-list localhost:${BROKER_PORT} \
--broker-list ${GENERATOR_BROKER}:${BROKER_PORT} \
--topic "${TOPICS[key]}" \
--property parse.key=true \
--property key.schema="$(cat "${KEYS[key]}")" \
Expand Down
10 changes: 10 additions & 0 deletions filesystem/var/www/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -280,6 +280,16 @@ <h5>Running...</h5>
<md-card-content>
<div flex style="font-family:Tahoma;font-size:12px;color:#666;padding-left:10px;margin:10px;font-weight:normal;overflow-y:auto;max-height:300px;">
<ul style="padding-left:10px;">
<li>
<h4>Kafka 2.0 and Stream Reactor 1.2.0 take the latest tag</h4>
<p>
The latest tag will now bring Kafka 2.0.1. It will also bring the latest Stream Reactor release, 1.2.0, which
includes enhancements, fixes and a brand new connector we know you will love: the Hive connector.
</p>
<p>
If you are eager to test Kafka 2.1.0, you may use the 2.1 and 2.1.0 tags.
</p>
</li>
<li>
<h4>Kafka 1.1 earns the latest tag</h4>
<p>
Expand Down

0 comments on commit 368a52a

Please sign in to comment.