summaryrefslogtreecommitdiffstats
path: root/src/main/docker
diff options
context:
space:
mode:
Diffstat (limited to 'src/main/docker')
-rw-r--r--src/main/docker/Dockerfile39
-rw-r--r--src/main/docker/broker-list.sh5
-rw-r--r--src/main/docker/cadi.properties20
-rw-r--r--src/main/docker/consumer.properties41
-rw-r--r--src/main/docker/create-topics.sh36
-rw-r--r--src/main/docker/download-kafka.sh3
-rw-r--r--src/main/docker/include/etc/confluent/docker/ensure11
-rw-r--r--src/main/docker/include/etc/confluent/docker/kafka.properties.template19
-rw-r--r--src/main/docker/include/etc/confluent/docker/log4j.properties.template7
-rw-r--r--src/main/docker/include/etc/confluent/docker/run2
-rw-r--r--src/main/docker/kafka-run-class.sh245
-rw-r--r--src/main/docker/kafka_server_jaas.conf12
-rw-r--r--src/main/docker/mmagent.config5
-rw-r--r--src/main/docker/producer.properties70
-rw-r--r--src/main/docker/start-kafka.sh149
-rw-r--r--src/main/docker/start-kafkaOrMirrorMaker.sh7
-rw-r--r--src/main/docker/start-mirrormaker.sh150
17 files changed, 41 insertions, 780 deletions
diff --git a/src/main/docker/Dockerfile b/src/main/docker/Dockerfile
index e3becb8..930f5ba 100644
--- a/src/main/docker/Dockerfile
+++ b/src/main/docker/Dockerfile
@@ -1,39 +1,26 @@
-FROM confluentinc/cp-base:5.3.1
-
-# allow arg override of required env params
-ARG KAFKA_ZOOKEEPER_CONNECT
-ENV KAFKA_ZOOKEEPER_CONNECT=${KAFKA_ZOOKEEPER_CONNECT}
-ARG KAFKA_ADVERTISED_LISTENERS
-ENV KAFKA_ADVERTISED_LISTENERS=${KAFKA_ADVERTISED_LISTENERS}
+FROM confluentinc/cp-kafka:6.2.0
ENV COMPONENT=kafka \
- KAFKA_USER=mrkafka
-
-RUN echo "===> installing ${COMPONENT}..." \
- && wget -qO - http://packages.confluent.io/deb/3.0/archive.key | apt-key add - \
- && echo "deb [arch=amd64] http://packages.confluent.io/deb/3.0 stable main" | tee -a /etc/apt/sources.list \
- && apt-key update && apt-get update && apt-get install -y confluent-kafka-2.11 --force-yes \
- \
- && echo "===> clean up ..." \
- && apt-get autoremove -y && apt-get clean && rm -rf /tmp/* /var/lib/apt/lists/* \
- \
- && echo "===> Setting up ${COMPONENT} dirs..." \
- && mkdir -p /var/lib/${COMPONENT}/data /etc/${COMPONENT}/secrets/cert /etc/${COMPONENT}/secrets/jaas /etc/${COMPONENT}/data /var/log/kafka /var/log/confluent \
- && chmod -R ag+w /etc/${COMPONENT} /var/lib/${COMPONENT}/data /etc/${COMPONENT}/secrets /etc/${COMPONENT}/data /var/log/kafka /var/log/confluent \
- && chown -R root:root /var/log/kafka /var/log/confluent /var/lib/kafka /var/lib/zookeeper
-
-COPY include/etc/confluent/docker /etc/confluent/docker
-RUN chmod -R +x /etc/confluent/docker
+ KAFKA_USER=mrkafka \
+ KAFKA_GROUP=onap
COPY org.onap.dmaap.mr.trust.jks \
org.onap.dmaap.mr.p12 \
org.onap.dmaap.mr.keyfile \
/etc/${COMPONENT}/secrets/cert/
-COPY kafka11aaf-jar-with-dependencies.jar /usr/share/java/${COMPONENT}/
+USER root
+
+RUN userdel -r appuser && groupadd $KAFKA_GROUP && useradd $KAFKA_USER -u 1000 -G 1000,$KAFKA_GROUP
+
+WORKDIR /home/$KAFKA_USER
+COPY include/etc/confluent/docker/* /etc/confluent/docker/
+RUN chmod -R +x /etc/confluent/docker \
+&& mkdir -p /etc/${COMPONENT}/data /etc/${COMPONENT}/secrets \
+&& chown -R $KAFKA_USER:$KAFKA_GROUP /var/lib/${COMPONENT} /etc/${COMPONENT} /etc/confluent/docker /var/log/${COMPONENT} /var/lib/${COMPONENT} /var/log/confluent
-RUN useradd -u 1000 -g 0 $KAFKA_USER
+COPY kafka11aaf.jar /usr/share/java/${COMPONENT}/
USER $KAFKA_USER
diff --git a/src/main/docker/broker-list.sh b/src/main/docker/broker-list.sh
deleted file mode 100644
index 7f04639..0000000
--- a/src/main/docker/broker-list.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/bash
-
-CONTAINERS=$(docker ps | grep 9092 | awk '{print $1}')
-BROKERS=$(for CONTAINER in $CONTAINERS; do docker port $CONTAINER 9092 | sed -e "s/0.0.0.0:/$HOST_IP:/g"; done)
-echo $BROKERS | sed -e 's/ /,/g'
diff --git a/src/main/docker/cadi.properties b/src/main/docker/cadi.properties
deleted file mode 100644
index 15dcb4d..0000000
--- a/src/main/docker/cadi.properties
+++ /dev/null
@@ -1,20 +0,0 @@
-#aaf_locate_url=https://aaf-onap-test.osaaf.org:8095
-aaf_url=https://AAF_LOCATE_URL/onap.org.osaaf.aaf.service:2.1
-aaf_env=DEV
-aaf_lur=org.onap.aaf.cadi.aaf.v2_0.AAFLurPerm
-
-cadi_truststore=/etc/kafka/secrets/cert/org.onap.dmaap.mr.trust.jks
-cadi_truststore_password=enc:7U4uOSdXQblnjiDsrqyjXugG4nChBXBBjqZ5amRaCq5yeYzbC9hQpH7BwUzYTa59
-
-cadi_keyfile=/etc/kafka/secrets/cert/org.onap.dmaap.mr.keyfile
-
-cadi_alias=dmaapmr@mr.dmaap.onap.org
-cadi_keystore=/etc/kafka/secrets/cert/org.onap.dmaap.mr.p12
-cadi_keystore_password=enc:NHmvDrri9DSkZJ_-GLuOM0e-UGi_RpVgj9xYdpAamEILHm7I2E6rjbOif2G94UYW
-cadi_x509_issuers=CN=intermediateCA_1, OU=OSAAF, O=ONAP, C=US:CN=intermediateCA_7, OU=OSAAF, O=ONAP, C=US:CN=intermediateCA_9, OU=OSAAF, O=ONAP, C=US
-
-
-cadi_loglevel=INFO
-cadi_protocols=TLSv1.1,TLSv1.2
-cadi_latitude=37.78187
-cadi_longitude=-122.26147
diff --git a/src/main/docker/consumer.properties b/src/main/docker/consumer.properties
deleted file mode 100644
index 5ec6df2..0000000
--- a/src/main/docker/consumer.properties
+++ /dev/null
@@ -1,41 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# see kafka.consumer.ConsumerConfig for more details
-
-# Zookeeper connection string
-# comma separated host:port pairs, each corresponding to a zk
-# server. e.g. "127.0.0.1:3000,127.0.0.1:3001,127.0.0.1:3002"
-#zookeeper.connect=127.0.0.1:2181
-
-# timeout in ms for connecting to zookeeper
-#zookeeper.connection.timeout.ms=6000
-
-#consumer group id
-group.id=test-consumer-group
-
-#New MirrorMaker properties for Kafka 0.11 version
-#Kafka 0.11 uses Kafka to manage consumers instead of ZK.
-bootstrap.servers=127.0.0.1:9092
-client.id=mirror_maker_consumer
-
-#Following properties are required as MR 1.2 will use Kafka 0.11 with AAF Auth wrapper.
-security.protocol=SASL_PLAINTEXT
-sasl.mechanism=PLAIN
-#java.security.auth.login.config=/opt/app/dmaap/mmagent/etc/kafka_client_jaas.conf
-sasl.jaas.config=org.apache.kafka.common.security.plain.PlainLoginModule required username="admin" password="admin_secret";
-
-
-#consumer timeout:
-#consumer.timeout.ms=5000
diff --git a/src/main/docker/create-topics.sh b/src/main/docker/create-topics.sh
deleted file mode 100644
index 34945b3..0000000
--- a/src/main/docker/create-topics.sh
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/bin/bash
-
-
-if [[ -z "$START_TIMEOUT" ]]; then
- START_TIMEOUT=600
-fi
-
-start_timeout_exceeded=false
-count=0
-step=10
-while netstat -lnt | awk '$4 ~ /:'$KAFKA_PORT'$/ {exit 1}'; do
- echo "waiting for kafka to be ready"
- sleep $step;
- count=$(expr $count + $step)
- if [ $count -gt $START_TIMEOUT ]; then
- start_timeout_exceeded=true
- break
- fi
-done
-
-if $start_timeout_exceeded; then
- echo "Not able to auto-create topic (waited for $START_TIMEOUT sec)"
- exit 1
-fi
-
-if [[ -n $KAFKA_CREATE_TOPICS ]]; then
- IFS=','; for topicToCreate in $KAFKA_CREATE_TOPICS; do
- echo "creating topics: $topicToCreate"
- IFS=':' read -a topicConfig <<< "$topicToCreate"
- if [ ${topicConfig[3]} ]; then
- JMX_PORT='' $KAFKA_HOME/bin/kafka-topics.sh --create --zookeeper $KAFKA_ZOOKEEPER_CONNECT --replication-factor ${topicConfig[2]} --partitions ${topicConfig[1]} --topic "${topicConfig[0]}" --config cleanup.policy="${topicConfig[3]}" --if-not-exists
- else
- JMX_PORT='' $KAFKA_HOME/bin/kafka-topics.sh --create --zookeeper $KAFKA_ZOOKEEPER_CONNECT --replication-factor ${topicConfig[2]} --partitions ${topicConfig[1]} --topic "${topicConfig[0]}" --if-not-exists
- fi
- done
-fi
diff --git a/src/main/docker/download-kafka.sh b/src/main/docker/download-kafka.sh
deleted file mode 100644
index fcc3be1..0000000
--- a/src/main/docker/download-kafka.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/sh
-
-wget https://archive.apache.org/dist/kafka/${KAFKA_VERSION}/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz -O "/tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz" \ No newline at end of file
diff --git a/src/main/docker/include/etc/confluent/docker/ensure b/src/main/docker/include/etc/confluent/docker/ensure
index 4bc99f3..09160f0 100644
--- a/src/main/docker/include/etc/confluent/docker/ensure
+++ b/src/main/docker/include/etc/confluent/docker/ensure
@@ -1,6 +1,6 @@
#!/usr/bin/env bash
#
-# Copyright 2016 Confluent Inc.
+# Copyright 2020 Confluent Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,5 +20,10 @@ export KAFKA_DATA_DIRS=${KAFKA_DATA_DIRS:-"/var/lib/kafka/data"}
echo "===> Check if $KAFKA_DATA_DIRS is writable ..."
dub path "$KAFKA_DATA_DIRS" writable
-echo "===> Check if Zookeeper is healthy ..."
-cub zk-ready "$KAFKA_ZOOKEEPER_CONNECT" "${KAFKA_CUB_ZK_TIMEOUT:-40}"
+if [[ -n "${KAFKA_ZOOKEEPER_SSL_CLIENT_ENABLE-}" ]] && [[ $KAFKA_ZOOKEEPER_SSL_CLIENT_ENABLE == "true" ]]
+then
+ echo "===> Skipping Zookeeper health check for SSL connections..."
+else
+ echo "===> Check if Zookeeper is healthy ..."
+ cub zk-ready "$KAFKA_ZOOKEEPER_CONNECT" "${KAFKA_CUB_ZK_TIMEOUT:-40}"
+fi \ No newline at end of file
diff --git a/src/main/docker/include/etc/confluent/docker/kafka.properties.template b/src/main/docker/include/etc/confluent/docker/kafka.properties.template
index 242e393..5eeaea3 100644
--- a/src/main/docker/include/etc/confluent/docker/kafka.properties.template
+++ b/src/main/docker/include/etc/confluent/docker/kafka.properties.template
@@ -7,14 +7,27 @@
'KAFKA_GC_LOG_OPTS',
'KAFKA_LOG4J_ROOT_LOGLEVEL',
'KAFKA_LOG4J_LOGGERS',
- 'KAFKA_TOOLS_LOG4J_LOGLEVEL']
+ 'KAFKA_TOOLS_LOG4J_LOGLEVEL',
+ 'KAFKA_ZOOKEEPER_CLIENT_CNXN_SOCKET']
-%}
+
+{# properties that don't fit the standard format #}
+{% set other_props = {
+ 'KAFKA_ZOOKEEPER_CLIENT_CNXN_SOCKET' : 'zookeeper.clientCnxnSocket'
+ } -%}
+
{% set kafka_props = env_to_props('KAFKA_', '', exclude=excluded_props) -%}
-{% for name, value in kafka_props.iteritems() -%}
+{% for name, value in kafka_props.items() -%}
{{name}}={{value}}
{% endfor -%}
+{% for k, property in other_props.items() -%}
+{% if env.get(k) != None -%}
+{{property}}={{env[k]}}
+{% endif -%}
+{% endfor -%}
+
{% set confluent_support_props = env_to_props('CONFLUENT_SUPPORT_', 'confluent.support.') -%}
-{% for name, value in confluent_support_props.iteritems() -%}
+{% for name, value in confluent_support_props.items() -%}
{{name}}={{value}}
{% endfor -%}
diff --git a/src/main/docker/include/etc/confluent/docker/log4j.properties.template b/src/main/docker/include/etc/confluent/docker/log4j.properties.template
index bdd6e5b..445a05c 100644
--- a/src/main/docker/include/etc/confluent/docker/log4j.properties.template
+++ b/src/main/docker/include/etc/confluent/docker/log4j.properties.template
@@ -1,4 +1,4 @@
-:x
+
log4j.rootLogger={{ env["KAFKA_LOG4J_ROOT_LOGLEVEL"] | default('INFO') }}, stdout
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
@@ -13,8 +13,7 @@ log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n
'kafka.controller': 'TRACE',
'kafka.log.LogCleaner': 'INFO',
'state.change.logger': 'TRACE',
- 'kafka.authorizer.logger': 'WARN',
- 'org.onap': 'INFO'
+ 'kafka.authorizer.logger': 'WARN'
} -%}
@@ -22,6 +21,6 @@ log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n
{% set loggers = parse_log4j_loggers(env['KAFKA_LOG4J_LOGGERS'], loggers) %}
{% endif %}
-{% for logger,loglevel in loggers.iteritems() %}
+{% for logger,loglevel in loggers.items() %}
log4j.logger.{{logger}}={{loglevel}}
{% endfor %}
diff --git a/src/main/docker/include/etc/confluent/docker/run b/src/main/docker/include/etc/confluent/docker/run
index 4501e22..91ac16b 100644
--- a/src/main/docker/include/etc/confluent/docker/run
+++ b/src/main/docker/include/etc/confluent/docker/run
@@ -26,7 +26,7 @@ if [ $# -ne 0 ]; then
fi
echo "===> ENV Variables ..."
-show_env
+env
echo "===> User"
id
diff --git a/src/main/docker/kafka-run-class.sh b/src/main/docker/kafka-run-class.sh
deleted file mode 100644
index 481ebe1..0000000
--- a/src/main/docker/kafka-run-class.sh
+++ /dev/null
@@ -1,245 +0,0 @@
-#!/bin/bash
-
-if [ $# -lt 1 ];
-then
- echo "USAGE: $0 [-daemon] [-name servicename] [-loggc] classname [opts]"
- exit 1
-fi
-
-# CYGINW == 1 if Cygwin is detected, else 0.
-if [[ $(uname -a) =~ "CYGWIN" ]]; then
- CYGWIN=1
-else
- CYGWIN=0
-fi
-
-if [ -z "$INCLUDE_TEST_JARS" ]; then
- INCLUDE_TEST_JARS=false
-fi
-
-# Exclude jars not necessary for running commands.
-regex="(-(test|src|scaladoc|javadoc)\.jar|jar.asc)$"
-should_include_file() {
- if [ "$INCLUDE_TEST_JARS" = true ]; then
- return 0
- fi
- file=$1
- if [ -z "$(echo "$file" | egrep "$regex")" ] ; then
- return 0
- else
- return 1
- fi
-}
-
-base_dir=$(dirname $0)/..
-
-if [ -z "$SCALA_VERSION" ]; then
- SCALA_VERSION=2.11.11
-fi
-
-if [ -z "$SCALA_BINARY_VERSION" ]; then
- SCALA_BINARY_VERSION=$(echo $SCALA_VERSION | cut -f 1-2 -d '.')
-fi
-
-# run ./gradlew copyDependantLibs to get all dependant jars in a local dir
-shopt -s nullglob
-for dir in "$base_dir"/core/build/dependant-libs-${SCALA_VERSION}*;
-do
- if [ -z "$CLASSPATH" ] ; then
- CLASSPATH="$dir/*"
- else
- CLASSPATH="$CLASSPATH:$dir/*"
- fi
-done
-
-for file in "$base_dir"/examples/build/libs/kafka-examples*.jar;
-do
- if should_include_file "$file"; then
- CLASSPATH="$CLASSPATH":"$file"
- fi
-done
-
-for file in "$base_dir"/clients/build/libs/kafka-clients*.jar;
-do
- if should_include_file "$file"; then
- CLASSPATH="$CLASSPATH":"$file"
- fi
-done
-
-for file in "$base_dir"/streams/build/libs/kafka-streams*.jar;
-do
- if should_include_file "$file"; then
- CLASSPATH="$CLASSPATH":"$file"
- fi
-done
-
-for file in "$base_dir"/streams/examples/build/libs/kafka-streams-examples*.jar;
-do
- if should_include_file "$file"; then
- CLASSPATH="$CLASSPATH":"$file"
- fi
-done
-
-for file in "$base_dir"/streams/build/dependant-libs-${SCALA_VERSION}/rocksdb*.jar;
-do
- CLASSPATH="$CLASSPATH":"$file"
-done
-
-for file in "$base_dir"/tools/build/libs/kafka-tools*.jar;
-do
- if should_include_file "$file"; then
- CLASSPATH="$CLASSPATH":"$file"
- fi
-done
-
-for dir in "$base_dir"/tools/build/dependant-libs-${SCALA_VERSION}*;
-do
- CLASSPATH="$CLASSPATH:$dir/*"
-done
-
-for cc_pkg in "api" "transforms" "runtime" "file" "json" "tools"
-do
- for file in "$base_dir"/connect/${cc_pkg}/build/libs/connect-${cc_pkg}*.jar;
- do
- if should_include_file "$file"; then
- CLASSPATH="$CLASSPATH":"$file"
- fi
- done
- if [ -d "$base_dir/connect/${cc_pkg}/build/dependant-libs" ] ; then
- CLASSPATH="$CLASSPATH:$base_dir/connect/${cc_pkg}/build/dependant-libs/*"
- fi
-done
-
-# classpath addition for release
-for file in "$base_dir"/libs/*;
-do
- if should_include_file "$file"; then
- CLASSPATH="$CLASSPATH":"$file"
- fi
-done
-
-for file in "$base_dir"/core/build/libs/kafka_${SCALA_BINARY_VERSION}*.jar;
-do
- if should_include_file "$file"; then
- CLASSPATH="$CLASSPATH":"$file"
- fi
-done
-shopt -u nullglob
-
-# JMX settings
-if [ -z "$KAFKA_JMX_OPTS" ]; then
- KAFKA_JMX_OPTS="-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false "
-fi
-
-# JMX port to use
-if [ $JMX_PORT ]; then
- KAFKA_JMX_OPTS="$KAFKA_JMX_OPTS -Dcom.sun.management.jmxremote.port=$JMX_PORT "
-fi
-
-# Log directory to use
-if [ "x$LOG_DIR" = "x" ]; then
- LOG_DIR="$base_dir/logs"
-fi
-
-# Log4j settings
-if [ -z "$KAFKA_LOG4J_OPTS" ]; then
- # Log to console. This is a tool.
- LOG4J_DIR="$base_dir/config/tools-log4j.properties"
- # If Cygwin is detected, LOG4J_DIR is converted to Windows format.
- (( CYGWIN )) && LOG4J_DIR=$(cygpath --path --mixed "${LOG4J_DIR}")
- KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:${LOG4J_DIR}"
-else
- # create logs directory
- if [ ! -d "$LOG_DIR" ]; then
- mkdir -p "$LOG_DIR"
- fi
-fi
-
-# If Cygwin is detected, LOG_DIR is converted to Windows format.
-(( CYGWIN )) && LOG_DIR=$(cygpath --path --mixed "${LOG_DIR}")
-KAFKA_LOG4J_OPTS="-Dkafka.logs.dir=$LOG_DIR $KAFKA_LOG4J_OPTS"
-
-# Generic jvm settings you want to add
-if [ -z "$KAFKA_OPTS" ]; then
- KAFKA_OPTS=""
-fi
-
-# Set Debug options if enabled
-if [ "x$KAFKA_DEBUG" != "x" ]; then
-
- # Use default ports
- DEFAULT_JAVA_DEBUG_PORT="5005"
-
- if [ -z "$JAVA_DEBUG_PORT" ]; then
- JAVA_DEBUG_PORT="$DEFAULT_JAVA_DEBUG_PORT"
- fi
-
- # Use the defaults if JAVA_DEBUG_OPTS was not set
- DEFAULT_JAVA_DEBUG_OPTS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=${DEBUG_SUSPEND_FLAG:-n},address=$JAVA_DEBUG_PORT"
- if [ -z "$JAVA_DEBUG_OPTS" ]; then
- JAVA_DEBUG_OPTS="$DEFAULT_JAVA_DEBUG_OPTS"
- fi
-
- echo "Enabling Java debug options: $JAVA_DEBUG_OPTS"
- KAFKA_OPTS="$JAVA_DEBUG_OPTS $KAFKA_OPTS"
-fi
-
-# Which java to use
-if [ -z "$JAVA_HOME" ]; then
- JAVA="java"
-else
- JAVA="$JAVA_HOME/bin/java"
-fi
-
-# Memory options
-if [ -z "$KAFKA_HEAP_OPTS" ]; then
- KAFKA_HEAP_OPTS="-Xmx256M"
-fi
-
-# JVM performance options
-if [ -z "$KAFKA_JVM_PERFORMANCE_OPTS" ]; then
- KAFKA_JVM_PERFORMANCE_OPTS="-server -XX:+UseG1GC -XX:MaxGCPauseMillis=20 -XX:InitiatingHeapOccupancyPercent=35 -XX:+DisableExplicitGC -Djava.awt.headless=true"
-fi
-
-
-while [ $# -gt 0 ]; do
- COMMAND=$1
- case $COMMAND in
- -name)
- DAEMON_NAME=$2
- CONSOLE_OUTPUT_FILE=$LOG_DIR/$DAEMON_NAME.out
- shift 2
- ;;
- -loggc)
- if [ -z "$KAFKA_GC_LOG_OPTS" ]; then
- GC_LOG_ENABLED="true"
- fi
- shift
- ;;
- -daemon)
- DAEMON_MODE="true"
- shift
- ;;
- *)
- break
- ;;
- esac
-done
-
-# GC options
-GC_FILE_SUFFIX='-gc.log'
-GC_LOG_FILE_NAME=''
-if [ "x$GC_LOG_ENABLED" = "xtrue" ]; then
- GC_LOG_FILE_NAME=$DAEMON_NAME$GC_FILE_SUFFIX
- KAFKA_GC_LOG_OPTS="-Xloggc:$LOG_DIR/$GC_LOG_FILE_NAME -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps -XX:+PrintGCTimeStamps -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=10 -XX:GCLogFileSize=100M"
-fi
-
-# If Cygwin is detected, classpath is converted to Windows format.
-(( CYGWIN )) && CLASSPATH=$(cygpath --path --mixed "${CLASSPATH}")
-
-# Launch mode
-if [ "x$DAEMON_MODE" = "xtrue" ]; then
- nohup $JAVA $KAFKA_HEAP_OPTS $KAFKA_JVM_PERFORMANCE_OPTS $KAFKA_GC_LOG_OPTS $KAFKA_JMX_OPTS $KAFKA_LOG4J_OPTS -cp $CLASSPATH $KAFKA_OPTS "$@" > "$CONSOLE_OUTPUT_FILE" 2>&1 < /dev/null &
-else
- exec $JAVA $KAFKA_HEAP_OPTS $KAFKA_JVM_PERFORMANCE_OPTS $KAFKA_GC_LOG_OPTS $KAFKA_JMX_OPTS $KAFKA_LOG4J_OPTS $1 -cp $CLASSPATH $KAFKA_OPTS "$@"
-fi
diff --git a/src/main/docker/kafka_server_jaas.conf b/src/main/docker/kafka_server_jaas.conf
deleted file mode 100644
index 3e69fc6..0000000
--- a/src/main/docker/kafka_server_jaas.conf
+++ /dev/null
@@ -1,12 +0,0 @@
-KafkaServer {
- org.onap.dmaap.kafkaAuthorize.PlainLoginModule1 required
- username="admin"
- password="admin_secret"
- user_admin="admin_secret";
-};
-Client {
- org.apache.zookeeper.server.auth.DigestLoginModule required
- username="kafka"
- password="kafka_secret";
- };
-
diff --git a/src/main/docker/mmagent.config b/src/main/docker/mmagent.config
deleted file mode 100644
index 66984ca..0000000
--- a/src/main/docker/mmagent.config
+++ /dev/null
@@ -1,5 +0,0 @@
-kafkahome=/opt/kafka
-topicURL=http://message-router:3904
-topicname=org.onap.dmaap.mr.mirrormakeragent
-mechid=demo@people.osaaf.org
-password=YKCAVhSQ+nedsh1Nry57l19jJQSnk8gs \ No newline at end of file
diff --git a/src/main/docker/producer.properties b/src/main/docker/producer.properties
deleted file mode 100644
index 78ff7c7..0000000
--- a/src/main/docker/producer.properties
+++ /dev/null
@@ -1,70 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# see kafka.producer.ProducerConfig for more details
-
-############################# Producer Basics #############################
-
-# list of brokers used for bootstrapping knowledge about the rest of the cluster
-# format: host1:port1,host2:port2 ...
-#metadata.broker.list=172.16.96.14:9092
-
-# name of the partitioner class for partitioning events; default partition spreads data randomly
-#partitioner.class=
-
-# specifies whether the messages are sent asynchronously (async) or synchronously (sync)
-producer.type=sync
-
-# specify the compression codec for all data generated: none, gzip, snappy, lz4.
-# the old config values work as well: 0, 1, 2, 3 for none, gzip, snappy, lz4, respectively
-#compression.codec=none
-
-# message encoder
-#serializer.class=kafka.serializer.DefaultEncoder
-
-# allow topic level compression
-#compressed.topics=
-
-#New MirrorMaker properties for Kafka 0.11 version
-#list of brokers used for bootstrapping knowledge about the rest of the cluster
-# format: host1:port1,host2:port2 ...
-bootstrap.servers=172.16.96.14:9092
-
-#Following properties are required as MR 1.2 will use Kafka 0.11 with AAF Auth wrapper.
-security.protocol=SASL_PLAINTEXT
-sasl.mechanism=PLAIN
-#java.security.auth.login.config=/opt/app/dmaap/mmagent/etc/kafka_client_jaas.conf
-sasl.jaas.config=org.apache.kafka.common.security.plain.PlainLoginModule required username="admin" password="admin_secret";
-
-#Producer
-compression.type=none
-#serializer.class=kafka.serializer.DefaultEncoder
-batch.size=100
-client.id=mirror_maker_producer
-
-############################# Async Producer #############################
-# maximum time, in milliseconds, for buffering data on the producer queue
-#queue.buffering.max.ms=
-
-# the maximum size of the blocking queue for buffering on the producer
-#queue.buffering.max.messages=
-
-# Timeout for event enqueue:
-# 0: events will be enqueued immediately or dropped if the queue is full
-# -ve: enqueue will block indefinitely if the queue is full
-# +ve: enqueue will block up to this many milliseconds if the queue is full
-#queue.enqueue.timeout.ms=
-
-# the number of messages batched at the producer
-#batch.num.messages=
diff --git a/src/main/docker/start-kafka.sh b/src/main/docker/start-kafka.sh
deleted file mode 100644
index 6c58b74..0000000
--- a/src/main/docker/start-kafka.sh
+++ /dev/null
@@ -1,149 +0,0 @@
-#!/bin/bash
-
-if [[ -z "$KAFKA_PORT" ]]; then
- export KAFKA_PORT=9092
-fi
-
-create-topics.sh &
-
-if [[ -z "$KAFKA_ADVERTISED_PORT" && \
- -z "$KAFKA_LISTENERS" && \
- -z "$KAFKA_ADVERTISED_LISTENERS" && \
- -S /var/run/docker.sock ]]; then
- export KAFKA_ADVERTISED_PORT=$(docker port `hostname` $KAFKA_PORT | sed -r "s/.*:(.*)/\1/g")
-fi
-if [[ -z "$KAFKA_BROKER_ID" ]]; then
- if [[ -n "$BROKER_ID_COMMAND" ]]; then
- export KAFKA_BROKER_ID=$(eval $BROKER_ID_COMMAND)
- else
- # By default auto allocate broker ID
- export KAFKA_BROKER_ID=-1
- fi
-fi
-if [[ -z "$KAFKA_LOG_DIRS" ]]; then
- export KAFKA_LOG_DIRS="/kafka/kafka-logs-$HOSTNAME"
-fi
-if [[ -z "$KAFKA_ZOOKEEPER_CONNECT" ]]; then
- export KAFKA_ZOOKEEPER_CONNECT=$(env | grep ZK.*PORT_2181_TCP= | sed -e 's|.*tcp://||' | paste -sd ,)
-fi
-
-if [[ -n "$KAFKA_HEAP_OPTS" ]]; then
- sed -r -i "s/(export KAFKA_HEAP_OPTS)=\"(.*)\"/\1=\"$KAFKA_HEAP_OPTS\"/g" $KAFKA_HOME/bin/kafka-server-start.sh
- unset KAFKA_HEAP_OPTS
-fi
-
-if [[ -z "$KAFKA_ADVERTISED_HOST_NAME" && -n "$HOSTNAME_COMMAND" ]]; then
- export KAFKA_ADVERTISED_HOST_NAME=$(eval $HOSTNAME_COMMAND)
-fi
-
-#if [[ -n "$KAFKA_LISTENER_SECURITY_PROTOCOL_MAP" ]]; then
-# if [[ -n "$KAFKA_ADVERTISED_PORT" && -n "$KAFKA_ADVERTISED_PROTOCOL_NAME" ]]; then
-# export KAFKA_ADVERTISED_LISTENERS="${KAFKA_ADVERTISED_PROTOCOL_NAME}://${KAFKA_ADVERTISED_HOST_NAME-}:${KAFKA_ADVERTISED_PORT}"
-# export KAFKA_LISTENERS="$KAFKA_ADVERTISED_PROTOCOL_NAME://:$KAFKA_ADVERTISED_PORT"
-# fi
-
- if [[ -z "$KAFKA_PROTOCOL_NAME" ]]; then
- export KAFKA_PROTOCOL_NAME="${KAFKA_ADVERTISED_PROTOCOL_NAME}"
- fi
-
- if [[ -n "$KAFKA_PORT" && -n "$KAFKA_PROTOCOL_NAME" ]]; then
- export ADD_LISTENER="${KAFKA_PROTOCOL_NAME}://${KAFKA_HOST_NAME-}:${KAFKA_PORT}"
- fi
-
- if [[ -z "$KAFKA_INTER_BROKER_LISTENER_NAME" ]]; then
- export KAFKA_INTER_BROKER_LISTENER_NAME=$KAFKA_PROTOCOL_NAME
- fi
-#else
- #DEFAULT LISTENERS
-# export KAFKA_ADVERTISED_LISTENERS="PLAINTEXT://${KAFKA_ADVERTISED_HOST_NAME-}:${KAFKA_ADVERTISED_PORT-$KAFKA_PORT}"
-# export KAFKA_LISTENERS="PLAINTEXT://${KAFKA_HOST_NAME-}:${KAFKA_PORT-9092}"
-#fi
-
-#if [[ -n "$ADD_LISTENER" && -n "$KAFKA_LISTENERS" ]]; then
-# export KAFKA_LISTENERS="${KAFKA_LISTENERS},${ADD_LISTENER}"
-#fi
-
-#if [[ -n "$ADD_LISTENER" && -z "$KAFKA_LISTENERS" ]]; then
-# export KAFKA_LISTENERS="${ADD_LISTENER}"
-#fi
-
-#if [[ -n "$ADD_LISTENER" && -n "$KAFKA_ADVERTISED_LISTENERS" ]]; then
-# export KAFKA_ADVERTISED_LISTENERS="${KAFKA_ADVERTISED_LISTENERS},${ADD_LISTENER}"
-#fi
-
-#if [[ -n "$ADD_LISTENER" && -z "$KAFKA_ADVERTISED_LISTENERS" ]]; then
-# export KAFKA_ADVERTISED_LISTENERS="${ADD_LISTENER}"
-#fi
-
-if [[ -n "$KAFKA_INTER_BROKER_LISTENER_NAME" && ! "$KAFKA_INTER_BROKER_LISTENER_NAME"X = "$KAFKA_PROTOCOL_NAME"X ]]; then
- if [[ -n "$KAFKA_INTER_BROKER_PORT" ]]; then
- export KAFKA_INTER_BROKER_PORT=$(( $KAFKA_PORT + 1 ))
- fi
- #export INTER_BROKER_LISTENER="${KAFKA_INTER_BROKER_LISTENER_NAME}://:${KAFKA_INTER_BROKER_PORT}"
- #export KAFKA_LISTENERS="${KAFKA_LISTENERS},${INTER_BROKER_LISTENER}"
- #export KAFKA_ADVERTISED_LISTENERS="${KAFKA_ADVERTISED_LISTENERS},${INTER_BROKER_LISTENER}"
- unset KAFKA_INTER_BROKER_PORT
- unset KAFKA_SECURITY_INTER_BROKER_PROTOCOL
- unset INTER_BROKER_LISTENER
-fi
-
-if [[ -n "$RACK_COMMAND" && -z "$KAFKA_BROKER_RACK" ]]; then
- export KAFKA_BROKER_RACK=$(eval $RACK_COMMAND)
-fi
-
-#Issue newline to config file in case there is not one already
-echo -e "\n" >> $KAFKA_HOME/config/server.properties
-
-unset KAFKA_CREATE_TOPICS
-unset KAFKA_ADVERTISED_PROTOCOL_NAME
-unset KAFKA_PROTOCOL_NAME
-
-if [[ -n "$KAFKA_ADVERTISED_LISTENERS" ]]; then
- unset KAFKA_ADVERTISED_PORT
- unset KAFKA_ADVERTISED_HOST_NAME
-fi
-
-if [[ -n "$KAFKA_LISTENERS" ]]; then
- unset KAFKA_PORT
- unset KAFKA_HOST_NAME
-fi
-
-for VAR in `env`
-do
- if [[ $VAR =~ ^KAFKA_ && ! $VAR =~ ^KAFKA_HOME ]]; then
- kafka_name=`echo "$VAR" | sed -r "s/KAFKA_(.*)=.*/\1/g" | tr '[:upper:]' '[:lower:]' | tr _ .`
- env_var=`echo "$VAR" | sed -r "s/(.*)=.*/\1/g"`
- if egrep -q "(^|^#)$kafka_name=" $KAFKA_HOME/config/server.properties; then
- sed -r -i "s@(^|^#)($kafka_name)=(.*)@\2=${!env_var}@g" $KAFKA_HOME/config/server.properties #note that no config values may contain an '@' char
- else
- echo "$kafka_name=${!env_var}" >> $KAFKA_HOME/config/server.properties
- fi
- fi
-
- if [[ $VAR =~ ^LOG4J_ ]]; then
- log4j_name=`echo "$VAR" | sed -r "s/(LOG4J_.*)=.*/\1/g" | tr '[:upper:]' '[:lower:]' | tr _ .`
- log4j_env=`echo "$VAR" | sed -r "s/(.*)=.*/\1/g"`
- if egrep -q "(^|^#)$log4j_name=" $KAFKA_HOME/config/log4j.properties; then
- sed -r -i "s@(^|^#)($log4j_name)=(.*)@\2=${!log4j_env}@g" $KAFKA_HOME/config/log4j.properties #note that no config values may contain an '@' char
- else
- echo "$log4j_name=${!log4j_env}" >> $KAFKA_HOME/config/log4j.properties
- fi
- fi
-done
-
-if [[ -n "$CUSTOM_INIT_SCRIPT" ]] ; then
- eval $CUSTOM_INIT_SCRIPT
-fi
-cp /tmp/kafka11aaf-jar-with-dependencies.jar $KAFKA_HOME/libs
-cp /tmp/org.onap.dmaap.mr.keyfile $KAFKA_HOME/config
-cp /tmp/org.onap.dmaap.mr.trust.jks $KAFKA_HOME/config
-cp /tmp/org.onap.dmaap.mr.p12 $KAFKA_HOME/config
-cp /tmp/kafka_server_jaas.conf $KAFKA_HOME/config
-cp /tmp/cadi.properties $KAFKA_HOME/config
-export KAFKA_OPTS="-Djava.security.auth.login.config=$KAFKA_HOME/config/kafka_server_jaas.conf"
-
-
-exec $KAFKA_HOME/bin/kafka-server-start.sh $KAFKA_HOME/config/server.properties
-
-
-
diff --git a/src/main/docker/start-kafkaOrMirrorMaker.sh b/src/main/docker/start-kafkaOrMirrorMaker.sh
deleted file mode 100644
index 9bb2b8a..0000000
--- a/src/main/docker/start-kafkaOrMirrorMaker.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/bash
-
-if [[ -n "$START_MIRROR_MAKER" && "$START_MIRROR_MAKER" = "YES" ]]; then
- exec start-mirrormaker.sh
- else
- exec start-kafka.sh
- fi \ No newline at end of file
diff --git a/src/main/docker/start-mirrormaker.sh b/src/main/docker/start-mirrormaker.sh
deleted file mode 100644
index 355bac0..0000000
--- a/src/main/docker/start-mirrormaker.sh
+++ /dev/null
@@ -1,150 +0,0 @@
-#!/bin/bash
-
-if [[ -z "$KAFKA_PORT" ]]; then
- export KAFKA_PORT=9092
-fi
-
-
-if [[ -z "$KAFKA_ADVERTISED_PORT" && \
- -z "$KAFKA_LISTENERS" && \
- -z "$KAFKA_ADVERTISED_LISTENERS" && \
- -S /var/run/docker.sock ]]; then
- export KAFKA_ADVERTISED_PORT=$(docker port `hostname` $KAFKA_PORT | sed -r "s/.*:(.*)/\1/g")
-fi
-if [[ -z "$KAFKA_BROKER_ID" ]]; then
- if [[ -n "$BROKER_ID_COMMAND" ]]; then
- export KAFKA_BROKER_ID=$(eval $BROKER_ID_COMMAND)
- else
- # By default auto allocate broker ID
- export KAFKA_BROKER_ID=-1
- fi
-fi
-if [[ -z "$KAFKA_LOG_DIRS" ]]; then
- export KAFKA_LOG_DIRS="/kafka/kafka-logs-$HOSTNAME"
-fi
-if [[ -z "$KAFKA_ZOOKEEPER_CONNECT" ]]; then
- export KAFKA_ZOOKEEPER_CONNECT=$(env | grep ZK.*PORT_2181_TCP= | sed -e 's|.*tcp://||' | paste -sd ,)
-fi
-
-if [[ -n "$KAFKA_HEAP_OPTS" ]]; then
- sed -r -i "s/(export KAFKA_HEAP_OPTS)=\"(.*)\"/\1=\"$KAFKA_HEAP_OPTS\"/g" $KAFKA_HOME/bin/kafka-server-start.sh
- unset KAFKA_HEAP_OPTS
-fi
-
-if [[ -z "$KAFKA_ADVERTISED_HOST_NAME" && -n "$HOSTNAME_COMMAND" ]]; then
- export KAFKA_ADVERTISED_HOST_NAME=$(eval $HOSTNAME_COMMAND)
-fi
-
-#if [[ -n "$KAFKA_LISTENER_SECURITY_PROTOCOL_MAP" ]]; then
-# if [[ -n "$KAFKA_ADVERTISED_PORT" && -n "$KAFKA_ADVERTISED_PROTOCOL_NAME" ]]; then
-# export KAFKA_ADVERTISED_LISTENERS="${KAFKA_ADVERTISED_PROTOCOL_NAME}://${KAFKA_ADVERTISED_HOST_NAME-}:${KAFKA_ADVERTISED_PORT}"
-# export KAFKA_LISTENERS="$KAFKA_ADVERTISED_PROTOCOL_NAME://:$KAFKA_ADVERTISED_PORT"
-# fi
-
- if [[ -z "$KAFKA_PROTOCOL_NAME" ]]; then
- export KAFKA_PROTOCOL_NAME="${KAFKA_ADVERTISED_PROTOCOL_NAME}"
- fi
-
- if [[ -n "$KAFKA_PORT" && -n "$KAFKA_PROTOCOL_NAME" ]]; then
- export ADD_LISTENER="${KAFKA_PROTOCOL_NAME}://${KAFKA_HOST_NAME-}:${KAFKA_PORT}"
- fi
-
- if [[ -z "$KAFKA_INTER_BROKER_LISTENER_NAME" ]]; then
- export KAFKA_INTER_BROKER_LISTENER_NAME=$KAFKA_PROTOCOL_NAME
- fi
-#else
- #DEFAULT LISTENERS
-# export KAFKA_ADVERTISED_LISTENERS="PLAINTEXT://${KAFKA_ADVERTISED_HOST_NAME-}:${KAFKA_ADVERTISED_PORT-$KAFKA_PORT}"
-# export KAFKA_LISTENERS="PLAINTEXT://${KAFKA_HOST_NAME-}:${KAFKA_PORT-9092}"
-#fi
-
-#if [[ -n "$ADD_LISTENER" && -n "$KAFKA_LISTENERS" ]]; then
-# export KAFKA_LISTENERS="${KAFKA_LISTENERS},${ADD_LISTENER}"
-#fi
-
-#if [[ -n "$ADD_LISTENER" && -z "$KAFKA_LISTENERS" ]]; then
-# export KAFKA_LISTENERS="${ADD_LISTENER}"
-#fi
-
-#if [[ -n "$ADD_LISTENER" && -n "$KAFKA_ADVERTISED_LISTENERS" ]]; then
-# export KAFKA_ADVERTISED_LISTENERS="${KAFKA_ADVERTISED_LISTENERS},${ADD_LISTENER}"
-#fi
-
-#if [[ -n "$ADD_LISTENER" && -z "$KAFKA_ADVERTISED_LISTENERS" ]]; then
-# export KAFKA_ADVERTISED_LISTENERS="${ADD_LISTENER}"
-#fi
-
-if [[ -n "$KAFKA_INTER_BROKER_LISTENER_NAME" && ! "$KAFKA_INTER_BROKER_LISTENER_NAME"X = "$KAFKA_PROTOCOL_NAME"X ]]; then
- if [[ -n "$KAFKA_INTER_BROKER_PORT" ]]; then
- export KAFKA_INTER_BROKER_PORT=$(( $KAFKA_PORT + 1 ))
- fi
- #export INTER_BROKER_LISTENER="${KAFKA_INTER_BROKER_LISTENER_NAME}://:${KAFKA_INTER_BROKER_PORT}"
- #export KAFKA_LISTENERS="${KAFKA_LISTENERS},${INTER_BROKER_LISTENER}"
- #export KAFKA_ADVERTISED_LISTENERS="${KAFKA_ADVERTISED_LISTENERS},${INTER_BROKER_LISTENER}"
- unset KAFKA_INTER_BROKER_PORT
- unset KAFKA_SECURITY_INTER_BROKER_PROTOCOL
- unset INTER_BROKER_LISTENER
-fi
-
-if [[ -n "$RACK_COMMAND" && -z "$KAFKA_BROKER_RACK" ]]; then
- export KAFKA_BROKER_RACK=$(eval $RACK_COMMAND)
-fi
-
-#Issue newline to config file in case there is not one already
-echo -e "\n" >> $KAFKA_HOME/config/server.properties
-
-unset KAFKA_CREATE_TOPICS
-unset KAFKA_ADVERTISED_PROTOCOL_NAME
-unset KAFKA_PROTOCOL_NAME
-
-if [[ -n "$KAFKA_ADVERTISED_LISTENERS" ]]; then
- unset KAFKA_ADVERTISED_PORT
- unset KAFKA_ADVERTISED_HOST_NAME
-fi
-
-if [[ -n "$KAFKA_LISTENERS" ]]; then
- unset KAFKA_PORT
- unset KAFKA_HOST_NAME
-fi
-
-for VAR in `env`
-do
- if [[ $VAR =~ ^KAFKA_ && ! $VAR =~ ^KAFKA_HOME ]]; then
- kafka_name=`echo "$VAR" | sed -r "s/KAFKA_(.*)=.*/\1/g" | tr '[:upper:]' '[:lower:]' | tr _ .`
- env_var=`echo "$VAR" | sed -r "s/(.*)=.*/\1/g"`
- if egrep -q "(^|^#)$kafka_name=" $KAFKA_HOME/config/server.properties; then
- sed -r -i "s@(^|^#)($kafka_name)=(.*)@\2=${!env_var}@g" $KAFKA_HOME/config/server.properties #note that no config values may contain an '@' char
- else
- echo "$kafka_name=${!env_var}" >> $KAFKA_HOME/config/server.properties
- fi
- fi
-
- if [[ $VAR =~ ^LOG4J_ ]]; then
- log4j_name=`echo "$VAR" | sed -r "s/(LOG4J_.*)=.*/\1/g" | tr '[:upper:]' '[:lower:]' | tr _ .`
- log4j_env=`echo "$VAR" | sed -r "s/(.*)=.*/\1/g"`
- if egrep -q "(^|^#)$log4j_name=" $KAFKA_HOME/config/log4j.properties; then
- sed -r -i "s@(^|^#)($log4j_name)=(.*)@\2=${!log4j_env}@g" $KAFKA_HOME/config/log4j.properties #note that no config values may contain an '@' char
- else
- echo "$log4j_name=${!log4j_env}" >> $KAFKA_HOME/config/log4j.properties
- fi
- fi
-done
-
-if [[ -n "$CUSTOM_INIT_SCRIPT" ]] ; then
- eval $CUSTOM_INIT_SCRIPT
-fi
-cp /tmp/kafka11aaf-jar-with-dependencies.jar $KAFKA_HOME/libs
-cp /tmp/org.onap.dmaap.mr.keyfile $KAFKA_HOME/config
-cp /tmp/org.onap.dmaap.mr.trust.jks $KAFKA_HOME/config
-cp /tmp/org.onap.dmaap.mr.p12 $KAFKA_HOME/config
-cp /tmp/kafka_server_jaas.conf $KAFKA_HOME/config
-cp /tmp/cadi.properties $KAFKA_HOME/config
-export KAFKA_OPTS="-Djava.security.auth.login.config=$KAFKA_HOME/config/kafka_server_jaas.conf"
-
-
-
-cp /tmp/kafka-run-class.sh /opt/kafka/bin
-java -jar /tmp/dmaapMMAgent.jar
-
-
-