summaryrefslogtreecommitdiffstats
path: root/demo
diff options
context:
space:
mode:
authorsu622b <sunil.unnava@att.com>2018-04-23 12:02:24 -0400
committersu622b <sunil.unnava@att.com>2018-04-23 12:02:53 -0400
commitf5b6899dafee9f9473ee1d7405ede512ce1dc577 (patch)
tree93bf0506727554c9bd0b717c4cd2f5e67dbbc1cc /demo
parent0643517a9fd0d4d23ebef740e2b3cc18f41c9318 (diff)
Move HEAT files to MR project
Issue-ID: DMAAP-433 Change-Id: Ib7ac91d0fb3e99f345ba640ed4647b09d62ed5ff Signed-off-by: su622b <sunil.unnava@att.com>
Diffstat (limited to 'demo')
-rw-r--r--demo/deploy.sh41
-rw-r--r--demo/docker_files/Dockerfile22
-rw-r--r--demo/docker_files/Dockerfile-local22
-rw-r--r--demo/docker_files/__MsgRtrApi.properties140
-rw-r--r--demo/docker_files/__docker-compose.yml57
-rw-r--r--demo/docker_files/apikey-APPC1.key8
-rw-r--r--demo/docker_files/apikey-PORTAL1.key8
-rw-r--r--demo/docker_files/apikey-PORTALAPP1.key8
-rw-r--r--demo/docker_files/apikey-PORTALDBC1.key8
-rw-r--r--demo/docker_files/apikey-PORTALPOL1.key8
-rw-r--r--demo/docker_files/apikey-PORTALSDC1.key8
-rw-r--r--demo/docker_files/apikey-PORTALVID1.key8
-rw-r--r--demo/docker_files/apikey-SDC1.key8
-rw-r--r--demo/docker_files/broker-list.sh5
-rw-r--r--demo/docker_files/cadi.properties21
-rw-r--r--demo/docker_files/create-topics.sh32
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/.kafka_cleanshutdown0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/.lock0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/APPC-CL-0/00000000000000000000.index0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/APPC-CL-0/00000000000000000000.logbin0 -> 86 bytes
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/APPC-CL-1/00000000000000000000.index0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/APPC-CL-1/00000000000000000000.log0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/APPC-TEST1-0/00000000000000000000.index0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/APPC-TEST1-0/00000000000000000000.log0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/APPC-TEST2-0/00000000000000000000.index0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/APPC-TEST2-0/00000000000000000000.log0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/APPC-TEST2-1/00000000000000000000.index0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/APPC-TEST2-1/00000000000000000000.logbin0 -> 43 bytes
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-0/00000000000000000000.index0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-0/00000000000000000000.logbin0 -> 43 bytes
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-1/00000000000000000000.index0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-1/00000000000000000000.logbin0 -> 43 bytes
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-INBOX-0/00000000000000000000.index0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-INBOX-0/00000000000000000000.log0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-APP1-0/00000000000000000000.index0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-APP1-0/00000000000000000000.log0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-DBC1-0/00000000000000000000.index0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-DBC1-0/00000000000000000000.log0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-POL1-0/00000000000000000000.index0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-POL1-0/00000000000000000000.log0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-SDC1-0/00000000000000000000.index0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-SDC1-0/00000000000000000000.log0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-VID1-0/00000000000000000000.index0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-VID1-0/00000000000000000000.log0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-0/00000000000000000000.index0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-0/00000000000000000000.logbin0 -> 86 bytes
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-1/00000000000000000000.index0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-1/00000000000000000000.log0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-0/00000000000000000000.index0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-0/00000000000000000000.logbin0 -> 43 bytes
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-1/00000000000000000000.index0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-1/00000000000000000000.logbin0 -> 43 bytes
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/SDC-DISTR-NOTIF-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.index0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/SDC-DISTR-NOTIF-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.log0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/SDC-DISTR-STATUS-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.index0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/SDC-DISTR-STATUS-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.log0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-0/00000000000000000000.index0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-0/00000000000000000000.logbin0 -> 5150 bytes
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-1/00000000000000000000.indexbin0 -> 48 bytes
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-1/00000000000000000000.logbin0 -> 34764 bytes
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/recovery-point-offset-checkpoint27
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/replication-offset-checkpoint27
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/unauthenticated.SEC_MEASUREMENT_OUTPUT-0/00000000000000000000.index0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/unauthenticated.SEC_MEASUREMENT_OUTPUT-0/00000000000000000000.logbin0 -> 43 bytes
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/unauthenticated.SEC_MEASUREMENT_OUTPUT-1/00000000000000000000.index0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/unauthenticated.SEC_MEASUREMENT_OUTPUT-1/00000000000000000000.logbin0 -> 43 bytes
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/unauthenticated.TCA_EVENT_OUTPUT-0/00000000000000000000.index0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/unauthenticated.TCA_EVENT_OUTPUT-0/00000000000000000000.logbin0 -> 43 bytes
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/unauthenticated.TCA_EVENT_OUTPUT-1/00000000000000000000.index0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/unauthenticated.TCA_EVENT_OUTPUT-1/00000000000000000000.logbin0 -> 43 bytes
-rw-r--r--demo/docker_files/data-zookeeper/version-2/log.1bin0 -> 67108880 bytes
-rw-r--r--demo/docker_files/data-zookeeper/version-2/log.103bin0 -> 67108880 bytes
-rw-r--r--demo/docker_files/data-zookeeper/version-2/log.125bin0 -> 67108880 bytes
-rw-r--r--demo/docker_files/download-kafka.sh5
-rw-r--r--demo/docker_files/dump_mr_state.sh10
-rw-r--r--demo/docker_files/mykey27
-rw-r--r--demo/docker_files/preconfigure-ecomp-keystopics.sh191
-rw-r--r--demo/docker_files/start-kafka.sh69
-rw-r--r--demo/docker_files/state-20170301.tar.gzbin0 -> 212717 bytes
-rw-r--r--demo/docker_files/subscriber.sh4
-rw-r--r--demo/docker_files/tests/data.2.2.json1
-rw-r--r--demo/docker_files/tests/data.2.5.json1
-rw-r--r--demo/docker_files/tests/data.3.1.txt1
-rw-r--r--demo/docker_files/tests/data.3.3.json1
-rw-r--r--demo/docker_files/tests/key.req1
-rw-r--r--demo/docker_files/tests/out/1.1.out5
-rw-r--r--demo/docker_files/tests/out/2.1.out8
-rw-r--r--demo/docker_files/tests/out/2.2.out13
-rw-r--r--demo/docker_files/tests/out/2.3.out6
-rw-r--r--demo/docker_files/tests/out/2.4.out13
-rw-r--r--demo/docker_files/tests/out/2.5.out4
-rw-r--r--demo/docker_files/tests/out/2.6.out1
-rw-r--r--demo/docker_files/tests/out/3.1.out4
-rw-r--r--demo/docker_files/tests/out/3.2.out1
-rw-r--r--demo/docker_files/tests/out/3.3.out4
-rw-r--r--demo/docker_files/tests/out/3.4.out1
-rw-r--r--demo/docker_files/tests/regress.sh113
-rw-r--r--demo/docker_files/tests/test.sh50
-rw-r--r--demo/docker_files/uebapikey-sdc8
99 files changed, 1000 insertions, 0 deletions
diff --git a/demo/deploy.sh b/demo/deploy.sh
new file mode 100644
index 0000000..b11a1e0
--- /dev/null
+++ b/demo/deploy.sh
@@ -0,0 +1,41 @@
+#!/bin/bash
+
+set -e
+
+# do not change this, it is already matched with the git repo file structure
+DOCKER_FILE_DIR='./docker_files'
+
+KAFKA_VERSION='0.8.1.1'
+SCALA_VERSION='2.9.2'
+wget -q "http://www.namesdir.com/mirrors/apache/kafka/${KAFKA_VERSION}/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz" \
+ -O "./docker_files/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz"
+
+# commands to run docker and docker-compose
+DOCKER_COMPOSE_EXE='/opt/docker/docker-compose'
+
+cd "${DOCKER_FILE_DIR}"
+
+while ! ifconfig |grep "docker0" > /dev/null;
+ do sleep 1
+ echo 'waiting for docker operational'
+done
+
+echo "prep any files with local configurations"
+if ls __* 1> /dev/null 2>&1; then
+ IP_DOCKER0=$(ifconfig docker0 |grep "inet addr" | cut -d: -f2 |cut -d" " -f1)
+ TEMPLATES=$(ls -1 __*)
+ for TEMPLATE in $TEMPLATES
+ do
+ FILENAME=${TEMPLATE//_}
+ if [ ! -z "${IP_DOCKER0}" ]; then
+ sed -e "s/{{ ip.docker0 }}/${IP_DOCKER0}/" "$TEMPLATE" > "$FILENAME"
+ fi
+ done
+fi
+
+if [ -z "$MTU" ]; then
+ export MTU=$(ifconfig docker0 |grep MTU |sed -e 's/.*MTU://' -e 's/\s.*$//')
+fi
+
+echo "starting docker operations"
+${DOCKER_COMPOSE_EXE} up -d --build
diff --git a/demo/docker_files/Dockerfile b/demo/docker_files/Dockerfile
new file mode 100644
index 0000000..87e96ee
--- /dev/null
+++ b/demo/docker_files/Dockerfile
@@ -0,0 +1,22 @@
+FROM anapsix/alpine-java
+
+MAINTAINER Wurstmeister
+
+RUN apk add --update unzip wget curl docker jq coreutils
+
+ENV KAFKA_VERSION="0.8.1.1" SCALA_VERSION="2.9.2"
+ADD download-kafka.sh /tmp/download-kafka.sh
+RUN chmod a+x /tmp/download-kafka.sh && sync && /tmp/download-kafka.sh && tar xfz /tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz -C /opt && rm /tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz
+
+VOLUME ["/kafka"]
+
+ENV KAFKA_HOME /opt/kafka_${SCALA_VERSION}-${KAFKA_VERSION}
+ADD start-kafka.sh /usr/bin/start-kafka.sh
+ADD broker-list.sh /usr/bin/broker-list.sh
+ADD create-topics.sh /usr/bin/create-topics.sh
+# The scripts need to have executable permission
+RUN chmod a+x /usr/bin/start-kafka.sh && \
+ chmod a+x /usr/bin/broker-list.sh && \
+ chmod a+x /usr/bin/create-topics.sh
+# Use "exec" form so that it runs as PID 1 (useful for graceful shutdown)
+CMD ["start-kafka.sh"]
diff --git a/demo/docker_files/Dockerfile-local b/demo/docker_files/Dockerfile-local
new file mode 100644
index 0000000..4909af3
--- /dev/null
+++ b/demo/docker_files/Dockerfile-local
@@ -0,0 +1,22 @@
+FROM anapsix/alpine-java
+
+MAINTAINER Wurstmeister
+
+RUN apk add --update tar wget curl docker coreutils
+
+ENV KAFKA_VERSION="0.8.1.1" SCALA_VERSION="2.9.2"
+COPY kafka_2.9.2-0.8.1.1.tgz /tmp/kafka_2.9.2-0.8.1.1.tgz
+RUN tar xfz /tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz -C /opt
+
+VOLUME ["/kafka"]
+
+ENV KAFKA_HOME /opt/kafka_2.9.2-0.8.1.1
+ADD start-kafka.sh /usr/bin/start-kafka.sh
+ADD broker-list.sh /usr/bin/broker-list.sh
+ADD create-topics.sh /usr/bin/create-topics.sh
+# The scripts need to have executable permission
+RUN chmod a+x /usr/bin/start-kafka.sh && \
+ chmod a+x /usr/bin/broker-list.sh && \
+ chmod a+x /usr/bin/create-topics.sh
+# Use "exec" form so that it runs as PID 1 (useful for graceful shutdown)
+CMD ["start-kafka.sh"]
diff --git a/demo/docker_files/__MsgRtrApi.properties b/demo/docker_files/__MsgRtrApi.properties
new file mode 100644
index 0000000..47293a7
--- /dev/null
+++ b/demo/docker_files/__MsgRtrApi.properties
@@ -0,0 +1,140 @@
+###############################################################################
+##
+## Cambria API Server config
+##
+## - Default values are shown as commented settings.
+##
+
+###############################################################################
+##
+## HTTP service
+##
+## - 3904 is standard as of 7/29/14.
+#
+## Zookeeper Connection
+##
+## Both Cambria and Kafka make use of Zookeeper.
+##
+config.zk.servers=zookeeper:2181
+#config.zk.servers={{ ip.docker0 }}:2181
+#10.0.11.1:2181
+#10.208.128.229:2181
+#config.zk.root=/fe3c/cambria/config
+
+
+###############################################################################
+##
+## Kafka Connection
+##
+## Items below are passed through to Kafka's producer and consumer
+## configurations (after removing "kafka.")
+## if you want to change request.required.acks it can take this one value
+#kafka.metadata.broker.list=localhost:9092,localhost:9093
+kafka.metadata.broker.list=kafka:9092
+#kafka.metadata.broker.list={{ ip.docker0 }}:9092
+#10.0.11.1:9092
+#10.208.128.229:9092
+##kafka.request.required.acks=-1
+#kafka.client.zookeeper=${config.zk.servers}
+consumer.timeout.ms=100
+zookeeper.connection.timeout.ms=6000
+zookeeper.session.timeout.ms=6000
+zookeeper.sync.time.ms=2000
+auto.commit.interval.ms=1000
+fetch.message.max.bytes =1000000
+auto.commit.enable=false
+
+
+###############################################################################
+##
+## Secured Config
+##
+## Some data stored in the config system is sensitive -- API keys and secrets,
+## for example. to protect it, we use an encryption layer for this section
+## of the config.
+##
+## The key is a base64 encode AES key. This must be created/configured for
+## each installation.
+#cambria.secureConfig.key=
+##
+## The initialization vector is a 16 byte value specific to the secured store.
+## This must be created/configured for each installation.
+#cambria.secureConfig.iv=
+
+## Southfield Sandbox
+cambria.secureConfig.key=b/7ouTn9FfEw2PQwL0ov/Q==
+cambria.secureConfig.iv=wR9xP5k5vbz/xD0LmtqQLw==
+authentication.adminSecret=fe3cCompound
+#cambria.secureConfig.key[pc569h]=YT3XPyxEmKCTLI2NK+Sjbw==
+#cambria.secureConfig.iv[pc569h]=rMm2jhR3yVnU+u2V9Ugu3Q==
+
+
+###############################################################################
+##
+## Consumer Caching
+##
+## Kafka expects live connections from the consumer to the broker, which
+## obviously doesn't work over connectionless HTTP requests. The Cambria
+## server proxies HTTP requests into Kafka consumer sessions that are kept
+## around for later re-use. Not doing so is costly for setup per request,
+## which would substantially impact a high volume consumer's performance.
+##
+## This complicates Cambria server failover, because we often need server
+## A to close its connection before server B brings up the replacement.
+##
+
+## The consumer cache is normally enabled.
+#cambria.consumer.cache.enabled=true
+
+## Cached consumers are cleaned up after a period of disuse. The server inspects
+## consumers every sweepFreqSeconds and will clean up any connections that are
+## dormant for touchFreqMs.
+#cambria.consumer.cache.sweepFreqSeconds=15
+#cambria.consumer.cache.touchFreqMs=120000
+
+## The cache is managed through ZK. The default value for the ZK connection
+## string is the same as config.zk.servers.
+#cambria.consumer.cache.zkConnect=${config.zk.servers}
+
+##
+## Shared cache information is associated with this node's name. The default
+## name is the hostname plus the HTTP service port this host runs on. (The
+## hostname is determined via InetAddress.getLocalHost ().getCanonicalHostName(),
+## which is not always adequate.) You can set this value explicitly here.
+##
+#cambria.api.node.identifier=<use-something-unique-to-this-instance>
+
+###############################################################################
+##
+## Metrics Reporting
+##
+## This server can report its metrics periodically on a topic.
+##
+#metrics.send.cambria.enabled=true
+#metrics.send.cambria.topic=cambria.apinode.metrics #msgrtr.apinode.metrics.dmaap
+#metrics.send.cambria.sendEverySeconds=60
+
+cambria.consumer.cache.zkBasePath=/fe3c/cambria/consumerCache
+
+##############################################################################
+#100mb
+maxcontentlength=10000
+
+
+##############################################################################
+#AAF Properties
+msgRtr.namespace.aaf=org.openecomp.dcae.dmaap.mtnje2.mr.topic
+msgRtr.topicfactory.aaf=org.openecomp.dcae.dmaap.topicFactory|:org.openecomp.dcae.dmaap.mtnje2.mr.topic:
+enforced.topic.name.AAF=org.openecomp
+forceAAF=false
+transidUEBtopicreqd=false
+defaultNSforUEB=org.openecomp.dmaap.mr.ueb
+##############################################################################
+#Mirror Maker Agent
+msgRtr.mirrormakeradmin.aaf=org.openecomp.dmaap.mr.dev.mirrormaker|*|admin
+msgRtr.mirrormakeruser.aaf=org.openecomp.dmaap.mr.dev.mirrormaker|*|user
+msgRtr.mirrormakeruser.aaf.create=org.openecomp.dmaap.mr.dev.topicFactory|:org.openecomp.dmaap.mr.dev.topic:
+msgRtr.mirrormaker.timeout=15000
+msgRtr.mirrormaker.topic=org.openecomp.dmaap.mr.prod.mm.agent
+msgRtr.mirrormaker.consumergroup=mmagentserver
+msgRtr.mirrormaker.consumerid=1
diff --git a/demo/docker_files/__docker-compose.yml b/demo/docker_files/__docker-compose.yml
new file mode 100644
index 0000000..bf73b1d
--- /dev/null
+++ b/demo/docker_files/__docker-compose.yml
@@ -0,0 +1,57 @@
+version: '2'
+networks:
+ default:
+ driver: bridge
+ driver_opts:
+ com.docker.network.driver.mtu: ${MTU}
+services:
+ zookeeper:
+ image: wurstmeister/zookeeper
+ ports:
+ - "2181:2181"
+ volumes:
+ - ./data-zookeeper:/opt/zookeeper-3.4.9/data
+ logging:
+ driver: "json-file"
+ options:
+ max-size: "30m"
+ max-file: "5"
+
+ kafka:
+ build:
+ context: .
+ dockerfile: Dockerfile-local
+ ports:
+ - "9092:9092"
+ environment:
+ #KAFKA_ADVERTISED_HOST_NAME: {{ ip.docker0 }}
+ KAFKA_ADVERTISED_HOST_NAME: kafka
+ KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+ KAFKA_BROKER_ID: 1
+ volumes:
+ - /var/run/docker.sock:/var/run/docker.sock
+ - ./data-kafka:/kafka
+ - ./start-kafka.sh:/start-kafka.sh
+ logging:
+ driver: "json-file"
+ options:
+ max-size: "30m"
+ max-file: "5"
+
+ dmaap:
+ image: onap/dmaap/dmaap-mr:1.1.3
+ ports:
+ - "3904:3904"
+ - "3905:3905"
+ volumes:
+ - ./MsgRtrApi.properties:/appl/dmaapMR1/bundleconfig/etc/appprops/MsgRtrApi.properties
+ - ./cadi.properties:/appl/dmaapMR1/etc/cadi.properties
+ - ./mykey:/appl/dmaapMR1/etc/keyfile
+ depends_on:
+ - zookeeper
+ - kafka
+ logging:
+ driver: "json-file"
+ options:
+ max-size: "30m"
+ max-file: "5"
diff --git a/demo/docker_files/apikey-APPC1.key b/demo/docker_files/apikey-APPC1.key
new file mode 100644
index 0000000..2f77745
--- /dev/null
+++ b/demo/docker_files/apikey-APPC1.key
@@ -0,0 +1,8 @@
+{
+ "aux": {
+ "description": "API key for apikey-APPC1",
+ "email": "no email"
+ },
+ "secret": "64AG2hF4pYeG2pq7CT6XwUOT",
+ "key": "VIlbtVl6YLhNUrtU"
+} \ No newline at end of file
diff --git a/demo/docker_files/apikey-PORTAL1.key b/demo/docker_files/apikey-PORTAL1.key
new file mode 100644
index 0000000..068bed7
--- /dev/null
+++ b/demo/docker_files/apikey-PORTAL1.key
@@ -0,0 +1,8 @@
+{
+ "aux": {
+ "description": "API key for apikey-PORTAL1",
+ "email": "no email"
+ },
+ "secret": "uCYgKjWKK5IxPGNNZzYSSWo9",
+ "key": "7GkVcrO6sIDb3ngW"
+} \ No newline at end of file
diff --git a/demo/docker_files/apikey-PORTALAPP1.key b/demo/docker_files/apikey-PORTALAPP1.key
new file mode 100644
index 0000000..a27422f
--- /dev/null
+++ b/demo/docker_files/apikey-PORTALAPP1.key
@@ -0,0 +1,8 @@
+{
+ "aux": {
+ "description": "API key for apikey-PORTALAPP1",
+ "email": "no email"
+ },
+ "secret": "P0HpqEBhKJvxjRYdw2sCTUll",
+ "key": "jQd4a9zVNi4ePyBp"
+} \ No newline at end of file
diff --git a/demo/docker_files/apikey-PORTALDBC1.key b/demo/docker_files/apikey-PORTALDBC1.key
new file mode 100644
index 0000000..f29d959
--- /dev/null
+++ b/demo/docker_files/apikey-PORTALDBC1.key
@@ -0,0 +1,8 @@
+{
+ "aux": {
+ "description": "API key for apikey-PORTALDBC1",
+ "email": "no email"
+ },
+ "secret": "WB7AJICClKg9oZLsxhQnykXA",
+ "key": "MtRwsF16RdpHZ7eM"
+} \ No newline at end of file
diff --git a/demo/docker_files/apikey-PORTALPOL1.key b/demo/docker_files/apikey-PORTALPOL1.key
new file mode 100644
index 0000000..97b39a4
--- /dev/null
+++ b/demo/docker_files/apikey-PORTALPOL1.key
@@ -0,0 +1,8 @@
+{
+ "aux": {
+ "description": "API key for apikey-PORTALPOL1",
+ "email": "no email"
+ },
+ "secret": "P7ejzF4nS3LAsMmKKTvYYFpA",
+ "key": "Gsd3C3hLYaUcor6l"
+} \ No newline at end of file
diff --git a/demo/docker_files/apikey-PORTALSDC1.key b/demo/docker_files/apikey-PORTALSDC1.key
new file mode 100644
index 0000000..28bfb89
--- /dev/null
+++ b/demo/docker_files/apikey-PORTALSDC1.key
@@ -0,0 +1,8 @@
+{
+ "aux": {
+ "description": "API key for apikey-PORTALSDC1",
+ "email": "no email"
+ },
+ "secret": "XftIATw9Jr3VzAcPqt3NnJOu",
+ "key": "x9UfO7JsDn8BESVX"
+} \ No newline at end of file
diff --git a/demo/docker_files/apikey-PORTALVID1.key b/demo/docker_files/apikey-PORTALVID1.key
new file mode 100644
index 0000000..3373566
--- /dev/null
+++ b/demo/docker_files/apikey-PORTALVID1.key
@@ -0,0 +1,8 @@
+{
+ "aux": {
+ "description": "API key for apikey-PORTALVID1",
+ "email": "no email"
+ },
+ "secret": "S31PrbOzGgL4hg4owgtx47Da",
+ "key": "2Re7Pvdkgw5aeAUD"
+} \ No newline at end of file
diff --git a/demo/docker_files/apikey-SDC1.key b/demo/docker_files/apikey-SDC1.key
new file mode 100644
index 0000000..207431d
--- /dev/null
+++ b/demo/docker_files/apikey-SDC1.key
@@ -0,0 +1,8 @@
+{
+ "aux": {
+ "description": "API key for apikey-SDC1",
+ "email": "no email"
+ },
+ "secret": "Ehq3WyT4bkif4zwgEbvshGal",
+ "key": "iPIxkpAMI8qTcQj8"
+} \ No newline at end of file
diff --git a/demo/docker_files/broker-list.sh b/demo/docker_files/broker-list.sh
new file mode 100644
index 0000000..7f04639
--- /dev/null
+++ b/demo/docker_files/broker-list.sh
@@ -0,0 +1,5 @@
+#!/bin/bash
+
+CONTAINERS=$(docker ps | grep 9092 | awk '{print $1}')
+BROKERS=$(for CONTAINER in $CONTAINERS; do docker port $CONTAINER 9092 | sed -e "s/0.0.0.0:/$HOST_IP:/g"; done)
+echo $BROKERS | sed -e 's/ /,/g'
diff --git a/demo/docker_files/cadi.properties b/demo/docker_files/cadi.properties
new file mode 100644
index 0000000..1cb00a5
--- /dev/null
+++ b/demo/docker_files/cadi.properties
@@ -0,0 +1,21 @@
+basic_realm=openecomp.org
+basic_warn=TRUE
+
+cadi_loglevel=DEBUG
+#cadi_keyfile=target/swm/package/nix/dist_files/appl/${artifactId}/etc/keyfile2
+cadi_keyfile=/appl/dmaapMR1/etc/keyfile
+# Configure AAF
+aaf_url=https://DME2RESOLVE/service=org.openecomp.authz.AuthorizationService/version=2.0/envContext=DEV/routeOffer=BAU_SE
+
+aaf_id=dgl@openecomp.org
+aaf_password=enc:f2u5br1mh29M02-
+aaf_timeout=5000
+aaf_clean_interval=1200000
+aaf_user_expires=60000
+aaf_high_count=1000000
+
+
+# The following properties are being set by the AJSC Container and should NOT need to be set here.
+AFT_LATITUDE=33.823589
+AFT_LONGITUDE=-84.366982
+AFT_ENVIRONMENT=AFTUAT
diff --git a/demo/docker_files/create-topics.sh b/demo/docker_files/create-topics.sh
new file mode 100644
index 0000000..e07bf06
--- /dev/null
+++ b/demo/docker_files/create-topics.sh
@@ -0,0 +1,32 @@
+#!/bin/bash
+
+
+if [[ -z "$START_TIMEOUT" ]]; then
+ START_TIMEOUT=600
+fi
+
+start_timeout_exceeded=false
+count=0
+step=10
+while netstat -lnt | awk '$4 ~ /:'$KAFKA_PORT'$/ {exit 1}'; do
+ echo "waiting for kafka to be ready"
+ sleep $step;
+ count=$(expr $count + $step)
+ if [ $count -gt $START_TIMEOUT ]; then
+ start_timeout_exceeded=true
+ break
+ fi
+done
+
+if $start_timeout_exceeded; then
+ echo "Not able to auto-create topic (waited for $START_TIMEOUT sec)"
+ exit 1
+fi
+
+if [[ -n $KAFKA_CREATE_TOPICS ]]; then
+ IFS=','; for topicToCreate in $KAFKA_CREATE_TOPICS; do
+ echo "creating topics: $topicToCreate"
+ IFS=':' read -a topicConfig <<< "$topicToCreate"
+ JMX_PORT='' $KAFKA_HOME/bin/kafka-topics.sh --create --zookeeper $KAFKA_ZOOKEEPER_CONNECT --replication-factor ${topicConfig[2]} --partition ${topicConfig[1]} --topic "${topicConfig[0]}"
+ done
+fi
diff --git a/demo/docker_files/data-kafka/kafka-logs/.kafka_cleanshutdown b/demo/docker_files/data-kafka/kafka-logs/.kafka_cleanshutdown
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/.kafka_cleanshutdown
diff --git a/demo/docker_files/data-kafka/kafka-logs/.lock b/demo/docker_files/data-kafka/kafka-logs/.lock
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/.lock
diff --git a/demo/docker_files/data-kafka/kafka-logs/APPC-CL-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/APPC-CL-0/00000000000000000000.index
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/APPC-CL-0/00000000000000000000.index
diff --git a/demo/docker_files/data-kafka/kafka-logs/APPC-CL-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/APPC-CL-0/00000000000000000000.log
new file mode 100644
index 0000000..85ee8bf
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/APPC-CL-0/00000000000000000000.log
Binary files differ
diff --git a/demo/docker_files/data-kafka/kafka-logs/APPC-CL-1/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/APPC-CL-1/00000000000000000000.index
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/APPC-CL-1/00000000000000000000.index
diff --git a/demo/docker_files/data-kafka/kafka-logs/APPC-CL-1/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/APPC-CL-1/00000000000000000000.log
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/APPC-CL-1/00000000000000000000.log
diff --git a/demo/docker_files/data-kafka/kafka-logs/APPC-TEST1-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/APPC-TEST1-0/00000000000000000000.index
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/APPC-TEST1-0/00000000000000000000.index
diff --git a/demo/docker_files/data-kafka/kafka-logs/APPC-TEST1-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/APPC-TEST1-0/00000000000000000000.log
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/APPC-TEST1-0/00000000000000000000.log
diff --git a/demo/docker_files/data-kafka/kafka-logs/APPC-TEST2-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/APPC-TEST2-0/00000000000000000000.index
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/APPC-TEST2-0/00000000000000000000.index
diff --git a/demo/docker_files/data-kafka/kafka-logs/APPC-TEST2-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/APPC-TEST2-0/00000000000000000000.log
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/APPC-TEST2-0/00000000000000000000.log
diff --git a/demo/docker_files/data-kafka/kafka-logs/APPC-TEST2-1/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/APPC-TEST2-1/00000000000000000000.index
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/APPC-TEST2-1/00000000000000000000.index
diff --git a/demo/docker_files/data-kafka/kafka-logs/APPC-TEST2-1/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/APPC-TEST2-1/00000000000000000000.log
new file mode 100644
index 0000000..66dcea9
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/APPC-TEST2-1/00000000000000000000.log
Binary files differ
diff --git a/demo/docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-0/00000000000000000000.index
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-0/00000000000000000000.index
diff --git a/demo/docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-0/00000000000000000000.log
new file mode 100644
index 0000000..bb73f23
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-0/00000000000000000000.log
Binary files differ
diff --git a/demo/docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-1/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-1/00000000000000000000.index
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-1/00000000000000000000.index
diff --git a/demo/docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-1/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-1/00000000000000000000.log
new file mode 100644
index 0000000..53364c5
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-1/00000000000000000000.log
Binary files differ
diff --git a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-INBOX-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-INBOX-0/00000000000000000000.index
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-INBOX-0/00000000000000000000.index
diff --git a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-INBOX-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-INBOX-0/00000000000000000000.log
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-INBOX-0/00000000000000000000.log
diff --git a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-APP1-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-APP1-0/00000000000000000000.index
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-APP1-0/00000000000000000000.index
diff --git a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-APP1-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-APP1-0/00000000000000000000.log
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-APP1-0/00000000000000000000.log
diff --git a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-DBC1-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-DBC1-0/00000000000000000000.index
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-DBC1-0/00000000000000000000.index
diff --git a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-DBC1-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-DBC1-0/00000000000000000000.log
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-DBC1-0/00000000000000000000.log
diff --git a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-POL1-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-POL1-0/00000000000000000000.index
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-POL1-0/00000000000000000000.index
diff --git a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-POL1-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-POL1-0/00000000000000000000.log
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-POL1-0/00000000000000000000.log
diff --git a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-SDC1-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-SDC1-0/00000000000000000000.index
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-SDC1-0/00000000000000000000.index
diff --git a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-SDC1-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-SDC1-0/00000000000000000000.log
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-SDC1-0/00000000000000000000.log
diff --git a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-VID1-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-VID1-0/00000000000000000000.index
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-VID1-0/00000000000000000000.index
diff --git a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-VID1-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-VID1-0/00000000000000000000.log
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-VID1-0/00000000000000000000.log
diff --git a/demo/docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-0/00000000000000000000.index
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-0/00000000000000000000.index
diff --git a/demo/docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-0/00000000000000000000.log
new file mode 100644
index 0000000..b466eda
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-0/00000000000000000000.log
Binary files differ
diff --git a/demo/docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-1/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-1/00000000000000000000.index
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-1/00000000000000000000.index
diff --git a/demo/docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-1/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-1/00000000000000000000.log
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-1/00000000000000000000.log
diff --git a/demo/docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-0/00000000000000000000.index
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-0/00000000000000000000.index
diff --git a/demo/docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-0/00000000000000000000.log
new file mode 100644
index 0000000..bc5db56
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-0/00000000000000000000.log
Binary files differ
diff --git a/demo/docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-1/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-1/00000000000000000000.index
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-1/00000000000000000000.index
diff --git a/demo/docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-1/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-1/00000000000000000000.log
new file mode 100644
index 0000000..978eeb6
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-1/00000000000000000000.log
Binary files differ
diff --git a/demo/docker_files/data-kafka/kafka-logs/SDC-DISTR-NOTIF-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/SDC-DISTR-NOTIF-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.index
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/SDC-DISTR-NOTIF-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.index
diff --git a/demo/docker_files/data-kafka/kafka-logs/SDC-DISTR-NOTIF-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/SDC-DISTR-NOTIF-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.log
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/SDC-DISTR-NOTIF-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.log
diff --git a/demo/docker_files/data-kafka/kafka-logs/SDC-DISTR-STATUS-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/SDC-DISTR-STATUS-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.index
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/SDC-DISTR-STATUS-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.index
diff --git a/demo/docker_files/data-kafka/kafka-logs/SDC-DISTR-STATUS-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/SDC-DISTR-STATUS-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.log
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/SDC-DISTR-STATUS-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.log
diff --git a/demo/docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-0/00000000000000000000.index
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-0/00000000000000000000.index
diff --git a/demo/docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-0/00000000000000000000.log
new file mode 100644
index 0000000..7c1c0f6
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-0/00000000000000000000.log
Binary files differ
diff --git a/demo/docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-1/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-1/00000000000000000000.index
new file mode 100644
index 0000000..a0afe1d
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-1/00000000000000000000.index
Binary files differ
diff --git a/demo/docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-1/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-1/00000000000000000000.log
new file mode 100644
index 0000000..e3e471a
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-1/00000000000000000000.log
Binary files differ
diff --git a/demo/docker_files/data-kafka/kafka-logs/recovery-point-offset-checkpoint b/demo/docker_files/data-kafka/kafka-logs/recovery-point-offset-checkpoint
new file mode 100644
index 0000000..a003b5d
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/recovery-point-offset-checkpoint
@@ -0,0 +1,27 @@
+0
+25
+ECOMP-PORTAL-OUTBOX-VID1 0 0
+PDPD-CONFIGURATION 0 2
+msgrtr.apinode.metrics.dmaap 1 26
+unauthenticated.SEC_MEASUREMENT_OUTPUT 1 1
+APPC-TEST2 0 0
+unauthenticated.TCA_EVENT_OUTPUT 1 1
+APPC-TEST1 0 0
+APPC-CL 0 2
+ECOMP-PORTAL-INBOX 0 0
+APPC-CL 1 0
+APPC-TEST2 1 1
+unauthenticated.TCA_EVENT_OUTPUT 0 1
+unauthenticated.SEC_MEASUREMENT_OUTPUT 0 1
+SDC-DISTR-NOTIF-TOPIC-SDC-OPENSOURCE-ENV1 0 0
+POLICY-CL-MGT 1 1
+PDPD-CONFIGURATION 1 0
+DCAE-CL-EVENT 1 1
+msgrtr.apinode.metrics.dmaap 0 4
+ECOMP-PORTAL-OUTBOX-APP1 0 0
+ECOMP-PORTAL-OUTBOX-SDC1 0 0
+POLICY-CL-MGT 0 1
+SDC-DISTR-STATUS-TOPIC-SDC-OPENSOURCE-ENV1 0 0
+DCAE-CL-EVENT 0 1
+ECOMP-PORTAL-OUTBOX-DBC1 0 0
+ECOMP-PORTAL-OUTBOX-POL1 0 0
diff --git a/demo/docker_files/data-kafka/kafka-logs/replication-offset-checkpoint b/demo/docker_files/data-kafka/kafka-logs/replication-offset-checkpoint
new file mode 100644
index 0000000..a003b5d
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/replication-offset-checkpoint
@@ -0,0 +1,27 @@
+0
+25
+ECOMP-PORTAL-OUTBOX-VID1 0 0
+PDPD-CONFIGURATION 0 2
+msgrtr.apinode.metrics.dmaap 1 26
+unauthenticated.SEC_MEASUREMENT_OUTPUT 1 1
+APPC-TEST2 0 0
+unauthenticated.TCA_EVENT_OUTPUT 1 1
+APPC-TEST1 0 0
+APPC-CL 0 2
+ECOMP-PORTAL-INBOX 0 0
+APPC-CL 1 0
+APPC-TEST2 1 1
+unauthenticated.TCA_EVENT_OUTPUT 0 1
+unauthenticated.SEC_MEASUREMENT_OUTPUT 0 1
+SDC-DISTR-NOTIF-TOPIC-SDC-OPENSOURCE-ENV1 0 0
+POLICY-CL-MGT 1 1
+PDPD-CONFIGURATION 1 0
+DCAE-CL-EVENT 1 1
+msgrtr.apinode.metrics.dmaap 0 4
+ECOMP-PORTAL-OUTBOX-APP1 0 0
+ECOMP-PORTAL-OUTBOX-SDC1 0 0
+POLICY-CL-MGT 0 1
+SDC-DISTR-STATUS-TOPIC-SDC-OPENSOURCE-ENV1 0 0
+DCAE-CL-EVENT 0 1
+ECOMP-PORTAL-OUTBOX-DBC1 0 0
+ECOMP-PORTAL-OUTBOX-POL1 0 0
diff --git a/demo/docker_files/data-kafka/kafka-logs/unauthenticated.SEC_MEASUREMENT_OUTPUT-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/unauthenticated.SEC_MEASUREMENT_OUTPUT-0/00000000000000000000.index
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/unauthenticated.SEC_MEASUREMENT_OUTPUT-0/00000000000000000000.index
diff --git a/demo/docker_files/data-kafka/kafka-logs/unauthenticated.SEC_MEASUREMENT_OUTPUT-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/unauthenticated.SEC_MEASUREMENT_OUTPUT-0/00000000000000000000.log
new file mode 100644
index 0000000..33bee2d
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/unauthenticated.SEC_MEASUREMENT_OUTPUT-0/00000000000000000000.log
Binary files differ
diff --git a/demo/docker_files/data-kafka/kafka-logs/unauthenticated.SEC_MEASUREMENT_OUTPUT-1/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/unauthenticated.SEC_MEASUREMENT_OUTPUT-1/00000000000000000000.index
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/unauthenticated.SEC_MEASUREMENT_OUTPUT-1/00000000000000000000.index
diff --git a/demo/docker_files/data-kafka/kafka-logs/unauthenticated.SEC_MEASUREMENT_OUTPUT-1/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/unauthenticated.SEC_MEASUREMENT_OUTPUT-1/00000000000000000000.log
new file mode 100644
index 0000000..69b1e68
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/unauthenticated.SEC_MEASUREMENT_OUTPUT-1/00000000000000000000.log
Binary files differ
diff --git a/demo/docker_files/data-kafka/kafka-logs/unauthenticated.TCA_EVENT_OUTPUT-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/unauthenticated.TCA_EVENT_OUTPUT-0/00000000000000000000.index
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/unauthenticated.TCA_EVENT_OUTPUT-0/00000000000000000000.index
diff --git a/demo/docker_files/data-kafka/kafka-logs/unauthenticated.TCA_EVENT_OUTPUT-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/unauthenticated.TCA_EVENT_OUTPUT-0/00000000000000000000.log
new file mode 100644
index 0000000..68a76bc
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/unauthenticated.TCA_EVENT_OUTPUT-0/00000000000000000000.log
Binary files differ
diff --git a/demo/docker_files/data-kafka/kafka-logs/unauthenticated.TCA_EVENT_OUTPUT-1/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/unauthenticated.TCA_EVENT_OUTPUT-1/00000000000000000000.index
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/unauthenticated.TCA_EVENT_OUTPUT-1/00000000000000000000.index
diff --git a/demo/docker_files/data-kafka/kafka-logs/unauthenticated.TCA_EVENT_OUTPUT-1/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/unauthenticated.TCA_EVENT_OUTPUT-1/00000000000000000000.log
new file mode 100644
index 0000000..89ec482
--- /dev/null
+++ b/demo/docker_files/data-kafka/kafka-logs/unauthenticated.TCA_EVENT_OUTPUT-1/00000000000000000000.log
Binary files differ
diff --git a/demo/docker_files/data-zookeeper/version-2/log.1 b/demo/docker_files/data-zookeeper/version-2/log.1
new file mode 100644
index 0000000..f3cb136
--- /dev/null
+++ b/demo/docker_files/data-zookeeper/version-2/log.1
Binary files differ
diff --git a/demo/docker_files/data-zookeeper/version-2/log.103 b/demo/docker_files/data-zookeeper/version-2/log.103
new file mode 100644
index 0000000..9b648e2
--- /dev/null
+++ b/demo/docker_files/data-zookeeper/version-2/log.103
Binary files differ
diff --git a/demo/docker_files/data-zookeeper/version-2/log.125 b/demo/docker_files/data-zookeeper/version-2/log.125
new file mode 100644
index 0000000..0613642
--- /dev/null
+++ b/demo/docker_files/data-zookeeper/version-2/log.125
Binary files differ
diff --git a/demo/docker_files/download-kafka.sh b/demo/docker_files/download-kafka.sh
new file mode 100644
index 0000000..2ddc911
--- /dev/null
+++ b/demo/docker_files/download-kafka.sh
@@ -0,0 +1,5 @@
+#!/bin/sh
+
+mirror=$(curl --stderr /dev/null https://www.apache.org/dyn/closer.cgi\?as_json\=1 | jq -r '.preferred')
+url="${mirror}kafka/${KAFKA_VERSION}/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz"
+wget -q "${url}" -O "/tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz"
diff --git a/demo/docker_files/dump_mr_state.sh b/demo/docker_files/dump_mr_state.sh
new file mode 100644
index 0000000..75c16d6
--- /dev/null
+++ b/demo/docker_files/dump_mr_state.sh
@@ -0,0 +1,10 @@
+#!/bin/bash
+
+
+TIMESTAMP=`date +%Y%m%d%H%M`
+CONTAINERID=`docker ps |grep kafka |cut -b1-12`
+docker cp $CONTAINERID:/kafka ./data-kafka-$TIMESTAMP
+tar zcvf ./data-kafka-$TIMESTAMP.tgz ./data-kafka-$TIMESTAMP
+CONTAINERID=`docker ps |grep zookeeper |cut -b1-12`
+docker cp $CONTAINERID:/opt/zookeeper-3.4.9/data ./data-zookeeper-$TIMESTAMP
+tar zcvf ./data-zookeeper-$TIMESTAMP.tgz ./data-zookeeper-$TIMESTAMP
diff --git a/demo/docker_files/mykey b/demo/docker_files/mykey
new file mode 100644
index 0000000..c2b8b87
--- /dev/null
+++ b/demo/docker_files/mykey
@@ -0,0 +1,27 @@
+_sNOLphPzrU7L0L3oWv0pYwgV_ddGF1XoBsQEIAp34jfP-fGJFPfFYaMpDEZ3gwH59rNw6qyMZHk
+k-4irklvVcWk36lC3twNvc0DueRCVrws1bkuhOLCXdxHJx-YG-1xM8EJfRmzh79WPlPkbAdyPmFF
+Ah44V0GjAnInPOFZA6MHP9rNx9B9qECHRfmvzU13vJCcgTsrmOr-CEiWfRsnzPjsICxpq9OaVT_D
+zn6rNaroGm1OiZNCrCgvRkCUHPOOCw3j9G1GeaImoZNYtozbz9u4sj13PU-MxIIAa64b1bMMMjpz
+Upc8lVPI4FnJKg6axMmEGn5zJ6JUq9mtOVyPj__2GEuDgpx5H4AwodXXVjFsVgR8UJwI_BvS2JVp
+JoQk0J1RqXmAXVamlsMAfzmmbARXgmrBfnuhveZnh9ymFVU-YZeujdANniXAwBGI7c6hG_BXkH7i
+Eyf4Fn41_SV78PskP6qgqJahr9r3bqdjNbKBztIKCOEVrE_w3IM5r02l-iStk_NBRkj6cq_7VCpG
+afxZ2CtZMwuZMiypO_wOgbdpCSKNzsL-NH2b4b08OlKiWb263gz634KJmV5WEfCl-6eH-JUFbWOS
+JwQfActLNT2ZQPl2MyZQNBzJEWoJRgS6k7tPRO-zqeUtYYHGHVMCxMuMHGQcoilNNHEFeBCG_fBh
+yAKb9g9F86Cbx9voMLiyTX2T3rwVHiSJFOzfNxGmfN5JWOthIun_c5hEY1tLQ15BomzkDwk7BAj7
+VbRCrVD45B6xrmSTMBSWYmLyr6mnQxQqeh9cMbD-0ZAncE3roxRnRvPKjFFa208ykYUp2V83r_PJ
+fV5I9ZPKSjk9DwFyrjkcQQEYDhdK6IFqcd6nEthjYVkmunu2fsX0bIOm9GGdIbKGqBnpdgBO5hyT
+rBr9HSlZrHcGdti1R823ckDF0Ekcl6kioDr5NLIpLtg9zUEDRm3QrbX2mv5Zs8W0pYnOqglxy3lz
+bJZTN7oR7VasHUtjmp0RT9nLZkUs5TZ6MHhlIq3ZsQ6w_Q9Rv1-ofxfwfCC4EBrWKbWAGCf6By4K
+Ew8321-2YnodhmsK5BrT4zQ1DZlmUvK8BmYjZe7wTljKjgYcsLTBfX4eMhJ7MIW1kpnl8AbiBfXh
+QzN56Mki51Q8PSQWHm0W9tnQ0z6wKdck6zBJ8JyNzewZahFKueDTn-9DOqIDfr3YHvQLLzeXyJ8e
+h4AgjW-hvlLzRGtkCknjLIgXVa3rMTycseAwbW-mgdCqqkw3SdEG8feAcyntmvE8j2jbtSDStQMB
+9JdvyNLuQdNG4pxpusgvVso0-8NQF0YVa9VFwg9U6IPSx5p8FcW68OAHt_fEgT4ZtiH7o9aur4o9
+oYqUh2lALCY-__9QLq1KkNjMKs33Jz9E8LbRerG9PLclkTrxCjYAeUWBjCwSI7OB7xkuaYDSjkjj
+a46NLpdBN1GNcsFFcZ79GFAK0_DsyxGLX8Tq6q0Bvhs8whD8wlSxpTGxYkyqNX-vcb7SDN_0WkCE
+XSdZWkqTHXcYbOvoCOb_e6SFAztuMenuHWY0utX0gBfx_X5lPDFyoYXErxFQHiA7t27keshXNa6R
+ukQRRS8kMjre1U74sc-fRNXkXpl57rG4rgxaEX0eBeowa53KAsVvUAoSac2aC_nfzXrDvoyf9Xi3
+JpEZNhUDLpFCEycV4I7jGQ9wo9qNaosvlsr6kbLDNdb_1xrGVgjT3xEvRNJNPqslSAu-yD-UFhC3
+AmCdYUnugw_eEFqXCHTARcRkdPPvl2XsmEKY2IqEeO5tz4DyXQFaL-5hEVh6lYEU1EOWHk3UGIXe
+Vc5_Ttp82qNLmlJPbZvgmNTJzYTHDQ_27KBcp7IVVZgPDjVKdWqQvZ18KhxvfF3Idgy82LBZniFV
+IbtxllXiPRxoPQriSXMnXjh3XkvSDI2pFxXfEvLRn1tvcFOwPNCz3QfPIzYg8uYXN5bRt3ZOrR_g
+ZhIlrc7HO0VbNbeqEVPKMZ-cjkqGj4VAuDKoQc0eQ6X_wCoAGO78nPpLeIvZPx1X3z5YoqNA \ No newline at end of file
diff --git a/demo/docker_files/preconfigure-ecomp-keystopics.sh b/demo/docker_files/preconfigure-ecomp-keystopics.sh
new file mode 100644
index 0000000..03cf45c
--- /dev/null
+++ b/demo/docker_files/preconfigure-ecomp-keystopics.sh
@@ -0,0 +1,191 @@
+#!/bin/bash
+
+HOSTPORT="127.0.0.1:3904"
+KEYDIR="."
+
+
+# dictionary of API Keys and the tpics owned by each API key
+declare -A topics
+topics=( \
+["anonymous"]="APPC-CL APPC-TEST2 PDPD-CONFIGURATION POLICY-CL-MGT DCAE-CL-EVENT unauthenticated.SEC_MEASUREMENT_OUTPUT unauthenticated.TCA_EVENT_OUTPUT " \
+["apikey-SDC1"]="SDC-DISTR-NOTIF-TOPIC-SDC-OPENSOURCE-ENV1 SDC-DISTR-STATUS-TOPIC-SDC-OPENSOURCE-ENV1" \
+["apikey-APPC1"]="APPC-TEST1" \
+["apikey-PORTAL1"]="ECOMP-PORTAL-INBOX" \
+["apikey-PORTALAPP1"]="ECOMP-PORTAL-OUTBOX-APP1" \
+["apikey-PORTALDBC1"]="ECOMP-PORTAL-OUTBOX-DBC1" \
+["apikey-PORTALSDC1"]="ECOMP-PORTAL-OUTBOX-SDC1" \
+["apikey-PORTALVID1"]="ECOMP-PORTAL-OUTBOX-VID1" \
+["apikey-PORTALPOL1"]="ECOMP-PORTAL-OUTBOX-POL1" \
+)
+
+# dictionary of producers for each topic
+declare -A acl_producers
+acl_producers=(\
+["SDC-DISTR-NOTIF-TOPIC-SDC-OPENSOURCE-ENV1"]="apikey-sdc1" \
+["SDC-DISTR-STATUS-TOPIC-SDC-OPENSOURCE-ENV1"]="apikey-sdc1" \
+["ECOMP-PORTAL-INBOX"]="apikey-PORTALAPP1 apikey-PORTALDBC1 apikey-PORTALSDC1 apikey-PORTALVID1 apikey-PORTALPOL1" \
+["ECOMP-PORTAL-OUTBOX-APP1"]="apikey-PORTAL1" \
+["ECOMP-PORTAL-OUTBOX-DBC1"]="apikey-PORTAL1" \
+["ECOMP-PORTAL-OUTBOX-SDC1"]="apikey-PORTAL1" \
+["ECOMP-PORTAL-OUTBOX-VID1"]="apikey-PORTAL1" \
+["ECOMP-PORTAL-OUTBOX-POL1"]="apikey-PORTAL1" \
+["APPC-TEST1"]="apikey-APPC1" \
+)
+
+# dictionary of consumers for each topic
+declare -A acl_consumers
+acl_consumers=(\
+["SDC-DISTR-NOTIF-TOPIC-SDC-OPENSOURCE-ENV1"]="apikey-sdc1" \
+["SDC-DISTR-STATUS-TOPIC-SDC-OPENSOURCE-ENV1"]="apikey-sdc1" \
+["ECOMP-PORTAL-INBOX"]="apikey-PORTAL1" \
+["ECOMP-PORTAL-OUTBOX-APP1"]="apikey-PORTALAPP1" \
+["ECOMP-PORTAL-OUTBOX-DBC1"]="apikey-PORTALDBC1" \
+["ECOMP-PORTAL-OUTBOX-SDC1"]="apikey-PORTALSDC1" \
+["ECOMP-PORTAL-OUTBOX-VID1"]="apikey-PORTALVID1" \
+["ECOMP-PORTAL-OUTBOX-POL1"]="apikey-PORTALPOL1" \
+["APPC-TEST1"]="apikey-APPC1" \
+)
+
+myrun () {
+ CMD="$1"
+ echo "CMD:[$CMD]"
+ eval $CMD
+}
+
+getowner () {
+ local -n outowner=$2
+ target_topic="$1"
+ echo "look for owner for $target_topic"
+ for o in "${!topics[@]}"; do
+ keytopics=${topics[$o]}
+ for topic in ${keytopics}; do
+ if [ "$topic" == "-" ]; then
+ continue
+ fi
+ if [ "$topic" == "$target_topic" ]; then
+ echo "found owner $o"
+ outowner=$o
+ return
+ fi
+ done
+ done
+}
+
+add_acl () {
+ acl_group="$1"
+ topic="$2"
+ client="$3"
+ echo " adding $client to group $acl_group for topic $2"
+
+ getowner "$topic" owner
+ echo "==owner for $topic is $owner"
+
+
+ if [ -z "$owner" ]; then
+ echo "No owner API key found for topic $topic"
+ #exit
+ fi
+ OWNER_API_KEYFILE="${KEYDIR}/${owner}.key"
+ if [ ! -e $API_KEYFILE ]; then
+ echo "No API key file $OWNER_API_KEYFILE for owner $owner of topic $topic, exit "
+ #exit
+ fi
+
+ CLIENT_API_KEYFILE="${KEYDIR}/${client}.key"
+ if [ ! -e $CLIENT_API_KEYFILE ]; then
+ echo "No API key file $CLIENT_API_KEYFILE for client $client, exit "
+ #exit
+ else
+ CLIENTKEY=`cat ${CLIENT_API_KEYFILE} |jq -r ".key"`
+ UEBAPIKEYSECRET=`cat ${OWNER_API_KEYFILE} |jq -r ".secret"`
+ UEBAPIKEYKEY=`cat ${OWNER_API_KEYFILE} |jq -r ".key"`
+ time=`date --iso-8601=seconds`
+ signature=$(echo -n "$time" | openssl sha1 -hmac $UEBAPIKEYSECRET -binary | openssl base64)
+ xAuth=$UEBAPIKEYKEY:$signature
+ xDate="$time"
+ CMD="curl -i -H \"Content-Type: application/json\" -H \"X-CambriaAuth:$xAuth\" -H \"X-CambriaDate:$xDate\" -X PUT http://${HOSTPORT}/topics/${topic}/${acl_group}/${CLIENTKEY}"
+ myrun "$CMD"
+ fi
+}
+
+
+for key in "${!topics[@]}"; do
+ # try to create key if no such key exists
+ API_KEYFILE="${KEYDIR}/${key}.key"
+ if [ "$key" != "anonymous" ]; then
+ if [ -e ${API_KEYFILE} ]; then
+ echo "API key for $key already exists, no need to create new"
+ else
+ echo "generating API key $key"
+ echo '{"email":"no email","description":"API key for '$key'"}' > /tmp/input.txt
+
+ CMD="curl -s -o ${API_KEYFILE} -H \"Content-Type: application/json\" -X POST -d @/tmp/input.txt http://${HOSTPORT}/apiKeys/create"
+ myrun "$CMD"
+ echo "API key for $key has been created: "; cat ${API_KEYFILE}
+ echo "generating API key $key done"; echo
+ fi
+ fi
+
+ # create the topics for this key
+ keytopics=${topics[$key]}
+ for topic in ${keytopics}; do
+ if [ "$topic" == "-" ]; then
+ continue
+ fi
+ if [ "$key" == "anonymous" ]; then
+ echo "creating anonymous topic $topic"
+ CMD="curl -H \"Content-Type:text/plain\" -X POST -d @/tmp/sample.txt http://${HOSTPORT}/events/${topic}"
+ myrun "$CMD"
+ echo "done creating anonymous topic $topic"; echo
+ else
+ echo "creating API key secured topic $topic for API key $key"
+ UEBAPIKEYSECRET=`cat ${API_KEYFILE} |jq -r ".secret"`
+ UEBAPIKEYKEY=`cat ${API_KEYFILE} |jq -r ".key"`
+ echo '{"topicName":"'${topic}'","topicDescription":"'$key' API Key secure topic","partitionCount":"1","replicationCount":"1","transactionEnabled":"true"}' > /tmp/topicname.txt
+ time=`date --iso-8601=seconds`
+ signature=$(echo -n "$time" | openssl sha1 -hmac $UEBAPIKEYSECRET -binary | openssl base64)
+ xAuth=$UEBAPIKEYKEY:$signature
+ xDate="$time"
+ CMD="curl -i -H \"Content-Type: application/json\" -H \"X-CambriaAuth: $xAuth\" -H \"X-CambriaDate: $xDate\" -X POST -d @/tmp/topicname.txt http://${HOSTPORT}/topics/create"
+ myrun "$CMD"
+ echo "done creating api key topic $topic"
+ echo
+ fi
+ done
+done
+
+
+echo
+echo "============ post loading state of topics ================="
+CMD="curl http://${HOSTPORT}/topics"
+myrun "$CMD"
+for key in "${!topics[@]}"; do
+ keytopics=${topics[$key]}
+ echo "---------- key: ${key} "
+ for topic in ${keytopics}; do
+ if [ "$topic" == "-" ]; then
+ continue
+ fi
+ CMD="curl http://${HOSTPORT}/topics/${topic}"
+ myrun "$CMD"
+ echo
+ done
+ echo "end of key: ${key} secured topics"
+done
+
+
+# adding publisher and subscriber ACL
+for topic in "${!acl_consumers[@]}"; do
+ consumers=${acl_consumers[$topic]}
+ for consumer in ${consumers}; do
+ add_acl "consumers" "$topic" "$consumer"
+ done
+done
+
+for topic in "${!acl_producers[@]}"; do
+ producers=${acl_producers[$topic]}
+ for producer in ${producers}; do
+ add_acl "producers" "$topic" "$producer"
+ done
+done
+
diff --git a/demo/docker_files/start-kafka.sh b/demo/docker_files/start-kafka.sh
new file mode 100644
index 0000000..4d955da
--- /dev/null
+++ b/demo/docker_files/start-kafka.sh
@@ -0,0 +1,69 @@
+#!/bin/bash
+
+if [[ -z "$KAFKA_PORT" ]]; then
+ export KAFKA_PORT=9092
+fi
+if [[ -z "$KAFKA_ADVERTISED_PORT" ]]; then
+ export KAFKA_ADVERTISED_PORT=$(docker port `hostname` $KAFKA_PORT | sed -r "s/.*:(.*)/\1/g")
+fi
+if [[ -z "$KAFKA_BROKER_ID" ]]; then
+ # By default auto allocate broker ID
+ #export KAFKA_BROKER_ID=-1
+ export KAFKA_BROKER_ID=1
+fi
+#if [[ -z "$KAFKA_LOG_DIRS" ]]; then
+ #export KAFKA_LOG_DIRS="/kafka/kafka-logs-$HOSTNAME"
+ export KAFKA_LOG_DIRS="/kafka/kafka-logs"
+#fi
+if [[ -z "$KAFKA_ZOOKEEPER_CONNECT" ]]; then
+ export KAFKA_ZOOKEEPER_CONNECT=$(env | grep ZK.*PORT_2181_TCP= | sed -e 's|.*tcp://||' | paste -sd ,)
+fi
+
+if [[ -n "$KAFKA_HEAP_OPTS" ]]; then
+ sed -r -i "s/(export KAFKA_HEAP_OPTS)=\"(.*)\"/\1=\"$KAFKA_HEAP_OPTS\"/g" $KAFKA_HOME/bin/kafka-server-start.sh
+ unset KAFKA_HEAP_OPTS
+fi
+
+if [[ -z "$KAFKA_ADVERTISED_HOST_NAME" && -n "$HOSTNAME_COMMAND" ]]; then
+ export KAFKA_ADVERTISED_HOST_NAME=$(eval $HOSTNAME_COMMAND)
+fi
+
+for VAR in `env`
+do
+ if [[ $VAR =~ ^KAFKA_ && ! $VAR =~ ^KAFKA_HOME ]]; then
+ kafka_name=`echo "$VAR" | sed -r "s/KAFKA_(.*)=.*/\1/g" | tr '[:upper:]' '[:lower:]' | tr _ .`
+ env_var=`echo "$VAR" | sed -r "s/(.*)=.*/\1/g"`
+ if egrep -q "(^|^#)$kafka_name=" $KAFKA_HOME/config/server.properties; then
+ sed -r -i "s@(^|^#)($kafka_name)=(.*)@\2=${!env_var}@g" $KAFKA_HOME/config/server.properties #note that no config values may contain an '@' char
+ else
+ echo "$kafka_name=${!env_var}" >> $KAFKA_HOME/config/server.properties
+ fi
+ fi
+done
+
+if [[ -n "$CUSTOM_INIT_SCRIPT" ]] ; then
+ eval $CUSTOM_INIT_SCRIPT
+fi
+
+
+KAFKA_PID=0
+
+# see https://medium.com/@gchudnov/trapping-signals-in-docker-containers-7a57fdda7d86#.bh35ir4u5
+term_handler() {
+ echo 'Stopping Kafka....'
+ if [ $KAFKA_PID -ne 0 ]; then
+ kill -s TERM "$KAFKA_PID"
+ wait "$KAFKA_PID"
+ fi
+ echo 'Kafka stopped.'
+ exit
+}
+
+
+# Capture kill requests to stop properly
+trap "term_handler" SIGHUP SIGINT SIGTERM
+create-topics.sh &
+$KAFKA_HOME/bin/kafka-server-start.sh $KAFKA_HOME/config/server.properties &
+KAFKA_PID=$!
+
+wait "$KAFKA_PID"
diff --git a/demo/docker_files/state-20170301.tar.gz b/demo/docker_files/state-20170301.tar.gz
new file mode 100644
index 0000000..b36b05a
--- /dev/null
+++ b/demo/docker_files/state-20170301.tar.gz
Binary files differ
diff --git a/demo/docker_files/subscriber.sh b/demo/docker_files/subscriber.sh
new file mode 100644
index 0000000..3e193f0
--- /dev/null
+++ b/demo/docker_files/subscriber.sh
@@ -0,0 +1,4 @@
+#!/bin/bash
+
+
+GET/events/{topic}/{consumerGroup}/{clientId}
diff --git a/demo/docker_files/tests/data.2.2.json b/demo/docker_files/tests/data.2.2.json
new file mode 100644
index 0000000..c1bcdfd
--- /dev/null
+++ b/demo/docker_files/tests/data.2.2.json
@@ -0,0 +1 @@
+{ "topicName": "Topic-28592-2", "topicDescription": "topic for test 2.2", "partitionCount": "1", "replicationCount": "1", "transactionEnabled": "true" }
diff --git a/demo/docker_files/tests/data.2.5.json b/demo/docker_files/tests/data.2.5.json
new file mode 100644
index 0000000..75bade1
--- /dev/null
+++ b/demo/docker_files/tests/data.2.5.json
@@ -0,0 +1 @@
+{ "datestamp": "Thu Dec 15 19:50:28 UTC 2016", "appkey": "x100", "appval": "some value" }
diff --git a/demo/docker_files/tests/data.3.1.txt b/demo/docker_files/tests/data.3.1.txt
new file mode 100644
index 0000000..c6a738a
--- /dev/null
+++ b/demo/docker_files/tests/data.3.1.txt
@@ -0,0 +1 @@
+datestamp: Thu Dec 15 19:50:38 UTC 2016, key: 3.1, value: this is a test
diff --git a/demo/docker_files/tests/data.3.3.json b/demo/docker_files/tests/data.3.3.json
new file mode 100644
index 0000000..9866789
--- /dev/null
+++ b/demo/docker_files/tests/data.3.3.json
@@ -0,0 +1 @@
+{ "datestamp": "Thu Dec 15 19:50:40 UTC 2016", "key": "3.3", "value": "this is a test" }
diff --git a/demo/docker_files/tests/key.req b/demo/docker_files/tests/key.req
new file mode 100644
index 0000000..a7e4092
--- /dev/null
+++ b/demo/docker_files/tests/key.req
@@ -0,0 +1 @@
+{ "email": "no.email", "description": "request for direct response KEY" }
diff --git a/demo/docker_files/tests/out/1.1.out b/demo/docker_files/tests/out/1.1.out
new file mode 100644
index 0000000..a9488d8
--- /dev/null
+++ b/demo/docker_files/tests/out/1.1.out
@@ -0,0 +1,5 @@
+{"topics": [
+ "msgrtr.apinode.metrics.dmaap",
+ "28537.3",
+ "Topic-28537-2"
+]} \ No newline at end of file
diff --git a/demo/docker_files/tests/out/2.1.out b/demo/docker_files/tests/out/2.1.out
new file mode 100644
index 0000000..ef4eada
--- /dev/null
+++ b/demo/docker_files/tests/out/2.1.out
@@ -0,0 +1,8 @@
+{
+ "aux": {
+ "description": "request for direct response KEY",
+ "email": "no.email"
+ },
+ "secret": "5V6YSDm8R6v6TArrLLtJUx4L",
+ "key": "HnJm7b9Zr16hgpU5"
+} \ No newline at end of file
diff --git a/demo/docker_files/tests/out/2.2.out b/demo/docker_files/tests/out/2.2.out
new file mode 100644
index 0000000..d682023
--- /dev/null
+++ b/demo/docker_files/tests/out/2.2.out
@@ -0,0 +1,13 @@
+{
+ "owner": "HnJm7b9Zr16hgpU5",
+ "readerAcl": {
+ "enabled": true,
+ "users": []
+ },
+ "name": "Topic-28592-2",
+ "description": "topic for test 2.2",
+ "writerAcl": {
+ "enabled": true,
+ "users": []
+ }
+} \ No newline at end of file
diff --git a/demo/docker_files/tests/out/2.3.out b/demo/docker_files/tests/out/2.3.out
new file mode 100644
index 0000000..d62034e
--- /dev/null
+++ b/demo/docker_files/tests/out/2.3.out
@@ -0,0 +1,6 @@
+{"topics": [
+ "Topic-28592-2",
+ "msgrtr.apinode.metrics.dmaap",
+ "28537.3",
+ "Topic-28537-2"
+]} \ No newline at end of file
diff --git a/demo/docker_files/tests/out/2.4.out b/demo/docker_files/tests/out/2.4.out
new file mode 100644
index 0000000..d682023
--- /dev/null
+++ b/demo/docker_files/tests/out/2.4.out
@@ -0,0 +1,13 @@
+{
+ "owner": "HnJm7b9Zr16hgpU5",
+ "readerAcl": {
+ "enabled": true,
+ "users": []
+ },
+ "name": "Topic-28592-2",
+ "description": "topic for test 2.2",
+ "writerAcl": {
+ "enabled": true,
+ "users": []
+ }
+} \ No newline at end of file
diff --git a/demo/docker_files/tests/out/2.5.out b/demo/docker_files/tests/out/2.5.out
new file mode 100644
index 0000000..670bf46
--- /dev/null
+++ b/demo/docker_files/tests/out/2.5.out
@@ -0,0 +1,4 @@
+{
+ "serverTimeMs": 9,
+ "count": 1
+} \ No newline at end of file
diff --git a/demo/docker_files/tests/out/2.6.out b/demo/docker_files/tests/out/2.6.out
new file mode 100644
index 0000000..0637a08
--- /dev/null
+++ b/demo/docker_files/tests/out/2.6.out
@@ -0,0 +1 @@
+[] \ No newline at end of file
diff --git a/demo/docker_files/tests/out/3.1.out b/demo/docker_files/tests/out/3.1.out
new file mode 100644
index 0000000..d2a9b4e
--- /dev/null
+++ b/demo/docker_files/tests/out/3.1.out
@@ -0,0 +1,4 @@
+{
+ "serverTimeMs": 175,
+ "count": 1
+} \ No newline at end of file
diff --git a/demo/docker_files/tests/out/3.2.out b/demo/docker_files/tests/out/3.2.out
new file mode 100644
index 0000000..0637a08
--- /dev/null
+++ b/demo/docker_files/tests/out/3.2.out
@@ -0,0 +1 @@
+[] \ No newline at end of file
diff --git a/demo/docker_files/tests/out/3.3.out b/demo/docker_files/tests/out/3.3.out
new file mode 100644
index 0000000..b823f1c
--- /dev/null
+++ b/demo/docker_files/tests/out/3.3.out
@@ -0,0 +1,4 @@
+{
+ "serverTimeMs": 2,
+ "count": 1
+} \ No newline at end of file
diff --git a/demo/docker_files/tests/out/3.4.out b/demo/docker_files/tests/out/3.4.out
new file mode 100644
index 0000000..9930076
--- /dev/null
+++ b/demo/docker_files/tests/out/3.4.out
@@ -0,0 +1 @@
+["{\"datestamp\":\"Thu Dec 15 19:50:40 UTC 2016\",\"value\":\"this is a test\",\"key\":\"3.3\"}"] \ No newline at end of file
diff --git a/demo/docker_files/tests/regress.sh b/demo/docker_files/tests/regress.sh
new file mode 100644
index 0000000..758dd7c
--- /dev/null
+++ b/demo/docker_files/tests/regress.sh
@@ -0,0 +1,113 @@
+#!/bin/ksh
+#
+# depends on jq - https://stedolan.github.io/jq/
+
+PROTOCOL=http
+FQDN=127.0.0.1
+#vm1-message-router
+#FQDN=10.208.128.229
+PORT=3904
+URL=$PROTOCOL://$FQDN:$PORT
+
+rm -f out/*
+mkdir -p out
+
+results() {
+# echo "[debug] compare $1 to $2"
+ if [ $1 == $2 ]
+ then
+ echo -n "SUCCESS "
+ else
+ echo -n "FAIL ($1) "
+ fi
+ echo " :TEST $3 ($4)"
+}
+SUITE=0
+SUITE=$((SUITE + 1))
+echo "SUITE $SUITE: List topics"
+TN=0
+TN=$((TN + 1))
+TC=$SUITE.$TN
+expect=200
+rc=`curl -s -w "%{http_code}" -o out/$TC.out $URL/topics`
+results $rc $expect $TC "list"
+StartTopicCount=`cat out/$TC.out | wc -l`
+
+
+SUITE=$((SUITE + 1))
+echo
+echo "SUITE $SUITE: APIKEY authenticated topic"
+TOPIC=Topic-$$-$SUITE
+TN=0
+TN=$((TN + 1))
+TC=$SUITE.$TN
+OUT=out/$TC.out
+echo '{ "email": "no.email", "description": "request for direct response KEY" }' > key.req
+rc=`curl -s -w "%{http_code}" -o out/$TC.out -X POST -H "Content-Type: application/json" -d @key.req $URL/apiKeys/create`
+results $rc $expect $SUITE.$TN "gen apikey "
+TN=$((TN + 1))
+TC=$SUITE.$TN
+SECRET=$(jq ".secret" $OUT | cut -f 2 -d \")
+KEY=$(jq ".key" $OUT | cut -f 2 -d \")
+TIME=`date --iso-8601=seconds`
+SIG=$(echo -n "$TIME" | openssl sha1 -hmac $SECRET -binary | openssl base64)
+xAUTH=$KEY:$SIG
+#echo "[debug] $SECRET $KEY $TIME $SIG $xAUTH"
+DATA=data.$TC.json
+echo "{ \"topicName\": \"$TOPIC\", \"topicDescription\": \"topic for test $TC\", \"partitionCount\": \"1\", \"replicationCount\": \"1\", \"transactionEnabled\": \"true\" }" > $DATA
+rc=`curl -s -w "%{http_code}" -o out/$TC.out -X POST -H "Content-Type: application/json" -H "X-CambriaAuth: $xAUTH" -H "X-CambriaDate: $TIME" -d @$DATA $URL/topics/create`
+results $rc $expect $SUITE.$TN "create topic"
+TN=$((TN + 1))
+TC=$SUITE.$TN
+expect=200
+rc=`curl -s -w "%{http_code}" -o out/$TC.out $URL/topics`
+results $rc $expect $TC "list "
+TopicCnt=`cat out/$TC.out | wc -l`
+results $TopicCnt $((StartTopicCount + 1)) $TC "topic count"
+TN=$((TN + 1))
+TC=$SUITE.$TN
+expect=200
+rc=`curl -s -w "%{http_code}" -o out/$TC.out $URL/topics/$TOPIC`
+results $rc $expect $TC "list $TOPIC"
+TN=$((TN + 1))
+TC=$SUITE.$TN
+DATA=data.$TC.json
+echo "{ \"datestamp\": \"`date`\", \"appkey\": \"x100\", \"appval\": \"some value\" }" > $DATA
+rc=`curl -s -w "%{http_code}" -o out/$TC.out -X POST -H "Content-Type: application/json" -H "X-CambriaAuth: $xAUTH" -H "X-CambriaDate: $TIME" -d @$DATA $URL/events/$TOPIC`
+results $rc $expect $SUITE.$TN "pub APIKEY topic"
+TN=$((TN + 1))
+TC=$SUITE.$TN
+rc=`curl -s -w "%{http_code}" -o out/$TC.out -X GET -H "Content-Type: application/json" -H "X-CambriaAuth: $xAUTH" -H "X-CambriaDate: $TIME" $URL/events/$TOPIC/g0/u1`
+results $rc $expect $SUITE.$TN "sub APIKEY topic"
+
+
+SUITE=$((SUITE + 1))
+echo
+echo "SUITE $SUITE: anonymous topic"
+TOPIC=$$.$SUITE
+TN=0
+TN=$((TN + 1))
+TC=$SUITE.$TN
+DATA=data.$TC.txt
+echo "datestamp: `date`, key: $TC, value: this is a test " > $DATA
+expect=200
+rc=`curl -s -w "%{http_code}" -o out/$TC.out -X POST -H "Content-Type: text/plain" -d @$DATA $URL/events/$TOPIC`
+results $rc $expect $SUITE.$TN "pub text/plain"
+TN=$((TN + 1))
+TC=$SUITE.$TN
+expect=200
+rc=`curl -s -w "%{http_code}" -o out/$TC.out $URL/events/$TOPIC/group1/u$$?timeout=1000`
+results $rc $expect $SUITE.$TN "sub text/plain"
+TN=$((TN + 1))
+TC=$SUITE.$TN
+DATA=data.$TC.json
+echo "{ \"datestamp\": \"`date`\", \"key\": \"$TC\", \"value\": \"this is a test\" }" > $DATA
+expect=200
+rc=`curl -s -w "%{http_code}" -o out/$TC.out -X POST -H "Content-Type: application/json" -d @$DATA $URL/events/$TOPIC`
+results $rc $expect $SUITE.$TN "pub json"
+TN=$((TN + 1))
+TC=$SUITE.$TN
+expect=200
+rc=`curl -s -w "%{http_code}" -o out/$TC.out $URL/events/$TOPIC/group1/u$$?timeout=1000`
+results $rc $expect $SUITE.$TN "sub json"
+
diff --git a/demo/docker_files/tests/test.sh b/demo/docker_files/tests/test.sh
new file mode 100644
index 0000000..0e06d5a
--- /dev/null
+++ b/demo/docker_files/tests/test.sh
@@ -0,0 +1,50 @@
+#!/bin/bash
+# lji: this is basically what Dom has in his regtest. re-do it in bash instead of ksh
+
+HOSTPORT="127.0.0.1:3904"
+ANONTOPIC="anon-topic-$RANDOM"
+APITOPIC="api-topic-$RANDOM"
+APIKEYFILE="/tmp/key"
+
+echo "blah" > /tmp/sample.txt
+
+if [ ! -e /usr/bin/jq ]; then
+ apt-get update && apt-get -y install jq
+fi
+
+
+# list topics
+curl http://${HOSTPORT}/topics
+
+# publish to an anonymous topic (first publish creats the topic)
+curl -H "Content-Type:text/plain" -X POST -d @/tmp/sample.txt http://${HOSTPORT}/events/$ANONTOPIC
+
+# subscribe to an anonymous topic
+curl -H "Content-Type:text/plain" -X GET http://${HOSTPORT}/events/$ANONTOPIC/group1/C1?timeout=5000 &
+curl -H "Content-Type:text/plain" -X POST -d @/tmp/sample.txt http://${HOSTPORT}/events/$ANONTOPIC
+
+
+
+
+# create api key
+echo '{"email":"no email","description":"API key and secret both in reponse"}' > /tmp/input.txt
+curl -s -o ${APIKEYFILE} -H "Content-Type:application/json" -X POST -d @/tmp/input.txt http://${HOSTPORT}/apiKeys/create
+UEBAPIKEYSECRET=`cat ${APIKEYFILE} |jq -r ".secret"`
+UEBAPIKEYKEY=`cat ${APIKEYFILE} |jq -r ".key"`
+
+# create an api key secured topic
+# pay attendtion to replication count
+echo '{"topicName":"'${APITOPIC}'","topicDescription":"This is an API key securedTopic","partitionCount":"1","replicationCount":"1","transactionEnabled":"true"}' > /tmp/topicname.txt
+time=`date --iso-8601=seconds`
+signature=$(echo -n "$time" | openssl sha1 -hmac $UEBAPIKEYSECRET -binary | openssl base64)
+xAuth=$UEBAPIKEYKEY:$signature
+xDate="$time"
+curl -i -H "Content-Type: application/json" -H "X-CambriaAuth:$xAuth" -H "X-CambriaDate:$xDate" -X POST -d @/tmp/topicname.txt http://${HOSTPORT}/topics/create
+
+# first subscribe and run it in bg. then publish.
+time=`date --iso-8601=seconds`
+signature=$(echo -n "$time" | openssl sha1 -hmac $UEBAPIKEYSECRET -binary | openssl base64)
+xAuth=$UEBAPIKEYKEY:$signature
+xDate="$time"
+curl -H "X-CambriaAuth:$xAuth" -H "X-CambriaDate:$xDate" -X GET http://${HOSTPORT}/events/${APITOPIC}/g0/u1 &
+curl -H "Content-Type:text/plain" -H "X-CambriaAuth:$xAuth" -H "X-CambriaDate:$xDate" -X POST -d @/tmp/sample.txt http://${HOSTPORT}/events/${APITOPIC}
diff --git a/demo/docker_files/uebapikey-sdc b/demo/docker_files/uebapikey-sdc
new file mode 100644
index 0000000..0b3aa80
--- /dev/null
+++ b/demo/docker_files/uebapikey-sdc
@@ -0,0 +1,8 @@
+{
+ "aux": {
+ "description": "API key for SDC",
+ "email": "no email"
+ },
+ "secret": "KpMJB28vNduEJ0zHDWOQXBmQ",
+ "key": "779NflzwmkuKpqef"
+} \ No newline at end of file