Skip to content

Commit

Permalink
Parent pom (#4)
Browse files Browse the repository at this point in the history
* Corrected license header. Refactored to use the new parent pom.

* Change to use the common pipeline.
  • Loading branch information
jcustenborder authored Jan 24, 2017
1 parent 632aa72 commit 6b04934
Show file tree
Hide file tree
Showing 33 changed files with 157 additions and 508 deletions.
24 changes: 3 additions & 21 deletions Jenkinsfile
Original file line number Diff line number Diff line change
@@ -1,23 +1,5 @@
#!groovy
node {
def mvnBuildNumber = "0.1.${env.BUILD_NUMBER}"
@Library('jenkins-pipeline') import com.github.jcustenborder.jenkins.pipeline.KafkaConnectPipeline

def mvnHome = tool 'M3'

checkout scm

if (env.BRANCH_NAME == 'master') {
stage 'versioning'
sh "${mvnHome}/bin/mvn -B versions:set -DgenerateBackupPoms=false -DnewVersion=${mvnBuildNumber}"
}

stage 'build'
sh "${mvnHome}/bin/mvn -B -P maven-central clean verify package"

junit '**/target/surefire-reports/TEST-*.xml'

if (env.BRANCH_NAME == 'master') {
stage 'publishing'
sh "${mvnHome}/bin/mvn -B -P github,maven-central deploy"
}
}
def pipe = new KafkaConnectPipeline()
pipe.execute()
10 changes: 8 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,11 @@
This connector provides support for receiving messages via syslog. The 0.0.2 version did break compatibility with existing schemas.
A namespace was added to the connect schema.
## Introduction

This connector provides support for receiving messages via syslog.

## Important

The 0.2 release breaks compatibility with the existing schema.


| Name | Description | Type | Default | Valid Values | Importance |
|------------------------------|-------------------------------------------------------------------------------------------------------------------------------|---------|---------|--------------|------------|
Expand Down
11 changes: 8 additions & 3 deletions bin/debug.sh
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,13 @@
# limitations under the License.
#

mvn clean package

export CLASSPATH="$(find target/ -type f -name '*.jar'| grep '\-package' | tr '\n' ':')"
: ${SUSPEND:='n'}

set -e

mvn clean package
export KAFKA_JMX_OPTS="-Xdebug -agentlib:jdwp=transport=dt_socket,server=y,suspend=${SUSPEND},address=5005"
export CLASSPATH="$(find target/kafka-connect-target/share/java -type f -name '*.jar' | tr '\n' ':')"

$CONFLUENT_HOME/bin/connect-standalone connect/connect-avro-docker.properties config/udpsyslog.properties
$CONFLUENT_HOME/bin/connect-standalone config/connect-avro-docker.properties config/udpsyslog.properties
86 changes: 0 additions & 86 deletions checkstyle/checkstyle.xml

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -39,4 +39,4 @@ offset.storage.file.filename=/tmp/connect.offsets
# that will report audit data that can be displayed and analyzed in Confluent Control Center
# producer.interceptor.classes=io.confluent.monitoring.clients.interceptor.MonitoringProducerInterceptor
# consumer.interceptor.classes=io.confluent.monitoring.clients.interceptor.MonitoringConsumerInterceptor
rest.port=10001
rest.port=10002
42 changes: 42 additions & 0 deletions config/connect-json-docker.properties
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
#
# Copyright © 2016 Jeremy Custenborder ([email protected])
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

# Sample configuration for a standalone Kafka Connect worker that uses Avro serialization and
# integrates the the SchemaConfig Registry. This sample configuration assumes a local installation of
# Confluent Platform with all services running on their default ports.
# Bootstrap Kafka servers. If multiple servers are specified, they should be comma-separated.
bootstrap.servers=confluent:9092
# The converters specify the format of data in Kafka and how to translate it into Connect data.
# Every Connect user will need to configure these based on the format they want their data in
# when loaded from or stored into Kafka
key.converter=org.apache.kafka.connect.json.JsonConverter
key.converter.schema.registry.url=http://confluent:8081
value.converter=org.apache.kafka.connect.json.JsonConverter
value.converter.schema.registry.url=http://confluent:8081
# The internal converter used for offsets and config data is configurable and must be specified,
# but most users will always want to use the built-in default. Offset and config data is never
# visible outside of Connect in this format.
internal.key.converter=org.apache.kafka.connect.json.JsonConverter
internal.value.converter=org.apache.kafka.connect.json.JsonConverter
internal.key.converter.schemas.enable=false
internal.value.converter.schemas.enable=false
# Local storage file for offset data
offset.storage.file.filename=/tmp/connect.offsets
# Confuent Control Center Integration -- uncomment these lines to enable Kafka client interceptors
# that will report audit data that can be displayed and analyzed in Confluent Control Center
# producer.interceptor.classes=io.confluent.monitoring.clients.interceptor.MonitoringProducerInterceptor
# consumer.interceptor.classes=io.confluent.monitoring.clients.interceptor.MonitoringConsumerInterceptor
rest.port=10002
2 changes: 1 addition & 1 deletion config/ssltcpsyslog.properties
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

name=tcpsyslog
tasks.max=2
connector.class=io.confluent.kafka.connect.syslog.source.SSLTCPSyslogSourceConnector
connector.class=com.github.jcustenborder.kafka.connect.syslog.SSLTCPSyslogSourceConnector
kafka.topic=syslog-tcp
syslog.port=5514
syslog.keystore=/etc/security/keystore.tks
Expand Down
2 changes: 1 addition & 1 deletion config/tcpsyslog.properties
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

name=tcpsyslog
tasks.max=1
connector.class=io.confluent.kafka.connect.syslog.source.TCPSyslogSourceConnector
connector.class=com.github.jcustenborder.kafka.connect.syslog.TCPSyslogSourceConnector
kafka.topic=syslog-tcp
syslog.port=5514
syslog.reverse.dns.remote.ip=true
2 changes: 1 addition & 1 deletion config/udpsyslog.properties
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

name=udpsyslog
tasks.max=1
connector.class=io.confluent.kafka.connect.syslog.source.UDPSyslogSourceConnector
connector.class=com.github.jcustenborder.kafka.connect.syslog.UDPSyslogSourceConnector
kafka.topic=syslog-udp
syslog.port=5514
syslog.reverse.dns.remote.ip=true
13 changes: 6 additions & 7 deletions bin/suspend.sh → config/udpsyslog2.properties
100755 → 100644
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
#!/usr/bin/env bash
#
# Copyright © 2016 Jeremy Custenborder ([email protected])
#
Expand All @@ -15,9 +14,9 @@
# limitations under the License.
#

mvn clean package

export CLASSPATH="$(find target/ -type f -name '*.jar'| grep '\-package' | tr '\n' ':')"
export KAFKA_JMX_OPTS='-Xdebug -agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5005'

$CONFLUENT_HOME/bin/connect-standalone connect/connect-avro-docker.properties config/salesforce.properties
name=udpsyslog-json
tasks.max=1
connector.class=com.github.jcustenborder.kafka.connect.syslog.UDPSyslogSourceConnector
kafka.topic=syslog-udp-json
syslog.port=5514
syslog.reverse.dns.remote.ip=true
Loading

0 comments on commit 6b04934

Please sign in to comment.