Skip to content

Commit 0bfbba6

Browse files
author
Donald Tregonning
authored
Merge pull request splunk#113 from splunk/develop
Merge for Release branch creation
2 parents 77cd5ed + 27dd8eb commit 0bfbba6

File tree

89 files changed

+10131
-2
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

89 files changed

+10131
-2
lines changed

.gitignore

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,3 +20,10 @@
2020

2121
# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
2222
hs_err_pid*
23+
target/*
24+
.idea/*
25+
26+
kafka-connect-splunk/
27+
pom.xml.versionsBackup
28+
.classpath
29+
.project

README.md

Lines changed: 652 additions & 2 deletions
Large diffs are not rendered by default.

build.sh

Lines changed: 87 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,87 @@
1+
#!/bin/bash
2+
3+
# variables
4+
kafkaversion=0.11.0.2
5+
builddir=/tmp/kafka-connect-splunk-build/kafka-connect-splunk
6+
7+
githash=`git rev-parse --short HEAD 2>/dev/null | sed "s/\(.*\)/@\1/"` # get current git hash
8+
gitbranch=`git rev-parse --abbrev-ref HEAD` # get current git branch
9+
gitversion=`git describe --abbrev=0 --tags 2>/dev/null` # returns the latest tag from current commit
10+
jarversion=${gitversion}
11+
12+
# if no version found from git tag, it is a dev build
13+
if [[ -z "$gitversion" ]]; then
14+
gitversion="dev"
15+
jarversion=${gitversion}-SNAPSHOT
16+
fi
17+
18+
packagename=kafka-connect-splunk-${gitversion}.tar.gz
19+
20+
# record git info in version.properties file under resources folder
21+
resourcedir='src/main/resources'
22+
/bin/rm -f ${resourcedir}/version.properties
23+
echo githash=${githash} >> ${resourcedir}/version.properties
24+
echo gitbranch=${gitbranch} >> ${resourcedir}/version.properties
25+
echo gitversion=${gitversion} >> ${resourcedir}/version.properties
26+
27+
28+
curdir=`pwd`
29+
30+
/bin/rm -rf ${builddir}
31+
mkdir -p ${builddir}/connectors
32+
mkdir -p ${builddir}/bin
33+
mkdir -p ${builddir}/config
34+
mkdir -p ${builddir}/libs
35+
36+
# Build the package
37+
echo "Building the connector package ..."
38+
mvn versions:set -DnewVersion=${jarversion}
39+
mvn package > /dev/null
40+
41+
# Copy over the pacakge
42+
echo "Copy over kafka-connect-splunk jar ..."
43+
cp target/kafka-connect-splunk-${jarversion}.jar ${builddir}/connectors
44+
cp config/* ${builddir}/config
45+
cp README.md ${builddir}
46+
cp LICENSE ${builddir}
47+
48+
# Download kafka
49+
echo "Downloading kafka_2.11-${kafkaversion} ..."
50+
wget -q https://archive.apache.org/dist/kafka/${kafkaversion}/kafka_2.11-${kafkaversion}.tgz -P ${builddir}
51+
cd ${builddir} && tar xzf kafka_2.11-${kafkaversion}.tgz
52+
53+
# Copy over kafka connect runtime
54+
echo "Copy over kafka connect runtime ..."
55+
cp kafka_2.11-${kafkaversion}/bin/connect-distributed.sh ${builddir}/bin
56+
cp kafka_2.11-${kafkaversion}/bin/kafka-run-class.sh ${builddir}/bin
57+
cp kafka_2.11-${kafkaversion}/config/connect-log4j.properties ${builddir}/config
58+
cp kafka_2.11-${kafkaversion}/libs/*.jar ${builddir}/libs
59+
60+
# Download commons-logging jar
61+
echo "Downloading commons-logging jar"
62+
wget -q http://central.maven.org/maven2/commons-logging/commons-logging/1.2/commons-logging-1.2.jar -P ${builddir}/libs/
63+
64+
# Clean up
65+
echo "Clean up ..."
66+
/bin/rm -rf kafka_2.11-${kafkaversion}
67+
/bin/rm -f kafka_2.11-${kafkaversion}.tgz
68+
69+
# Package up
70+
echo "Package ${packagename} ..."
71+
cd .. && tar czf ${packagename} kafka-connect-splunk
72+
73+
echo "Copy package ${packagename} to ${curdir} ..."
74+
cp ${packagename} ${curdir}
75+
76+
/bin/rm -rf kafka-connect-splunk ${packagename}
77+
echo "Done with build & packaging"
78+
79+
echo
80+
81+
cat << EOP
82+
To run the kafka-connect-splunk, do the following steps:
83+
1. untar the package: tar xzf kafka-connect-splunk.tar.gz
84+
2. config config/connect-distributed.properties according to your env
85+
3. run: bash bin/connect-distributed.sh config/connect-distributed.properties
86+
4. Use Kafka Connect REST api to create data collection tasks
87+
EOP

ci/Jenkinsfile

Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
@Library('jenkinstools@master') _
2+
3+
import com.splunk.jenkins.DockerRequest;
4+
import com.splunk.tool.plugin.docker.extension.BadDockerExitCode;
5+
6+
def dockerReq = new DockerRequest(steps,
7+
currentBuild,
8+
env,
9+
[imageName: "repo.splunk.com/splunk/products/splact",
10+
userId: "10777",
11+
repoName: "[email protected]:splunk/kafka-connect-splunk.git",
12+
runner: "yarn",
13+
remotePath: "/build"])
14+
15+
16+
withSplunkWrapNode("master") {
17+
try {
18+
stage("run orca") {
19+
withCredentials([file(credentialsId: 'srv_releases_orca', variable: 'ORCA_CREDENTIALS')]) {
20+
sh "tar -ovxf $ORCA_CREDENTIALS";
21+
splunkPrepareAndCheckOut request: dockerReq,
22+
files: "${WORKSPACE}/.orca, ${WORKSPACE}/.ssh";
23+
}
24+
splunkRunScript request:dockerReq,
25+
script:
26+
"""
27+
pip install splunk_orca==0.8.0 -i https://repo.splunk.com/artifactory/api/pypi/pypi-virtual/simple
28+
cd ci
29+
python kafka_orca_gen.py --data_gen_size 1 --data_gen_eps 100000 --broker_size 3 --zookeeper_size 3 --kafka_connect_size 1 --kafka_connect_max_tasks 20 --indexer_size 3 --default_partitions 10 --perf 0
30+
splunk_orca create --sc kafka-connect
31+
""";
32+
}
33+
}
34+
catch (BadDockerExitCode e) {
35+
currentBuild.result = "FAILURE";
36+
echo "Exception Caught: ${e.getMessage()}";
37+
echo "Stack Trace: ${e.printStackTrace()}";
38+
}
39+
catch (Exception e) {
40+
currentBuild.result = "FAILURE";
41+
echo "Exception Caught: ${e.getMessage()}";
42+
echo "Stack Trace: ${e.printStackTrace()}";
43+
}
44+
finally {
45+
steps.cleanWs();
46+
}
47+
}

0 commit comments

Comments
 (0)