aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.gitreview4
-rw-r--r--LICENSE.txt22
-rw-r--r--README.md10
-rwxr-xr-xdeploy.sh33
-rw-r--r--docker_files/Dockerfile22
-rwxr-xr-xdocker_files/__MsgRtrApi.properties138
-rw-r--r--docker_files/__docker-compose.yml50
-rw-r--r--docker_files/broker-list.sh5
-rw-r--r--docker_files/cadi.properties35
-rw-r--r--docker_files/create-topics.sh32
-rw-r--r--docker_files/data-kafka/kafka-logs/.kafka_cleanshutdown0
-rw-r--r--docker_files/data-kafka/kafka-logs/.lock0
-rw-r--r--docker_files/data-kafka/kafka-logs/APPC-CL-0/00000000000000000000.index0
-rw-r--r--docker_files/data-kafka/kafka-logs/APPC-CL-0/00000000000000000000.log0
-rw-r--r--docker_files/data-kafka/kafka-logs/APPC-CL-1/00000000000000000000.index0
-rw-r--r--docker_files/data-kafka/kafka-logs/APPC-CL-1/00000000000000000000.logbin0 -> 43 bytes
-rw-r--r--docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-0/00000000000000000000.index0
-rw-r--r--docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-0/00000000000000000000.logbin0 -> 43 bytes
-rw-r--r--docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-1/00000000000000000000.index0
-rw-r--r--docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-1/00000000000000000000.log0
-rw-r--r--docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-0/00000000000000000000.index0
-rw-r--r--docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-0/00000000000000000000.log0
-rw-r--r--docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-1/00000000000000000000.index0
-rw-r--r--docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-1/00000000000000000000.logbin0 -> 43 bytes
-rw-r--r--docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-0/00000000000000000000.index0
-rw-r--r--docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-0/00000000000000000000.logbin0 -> 43 bytes
-rw-r--r--docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-1/00000000000000000000.index0
-rw-r--r--docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-1/00000000000000000000.log0
-rw-r--r--docker_files/data-kafka/kafka-logs/SDC-DISTR-NOTIF-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.index0
-rw-r--r--docker_files/data-kafka/kafka-logs/SDC-DISTR-NOTIF-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.log0
-rw-r--r--docker_files/data-kafka/kafka-logs/SDC-DISTR-STATUS-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.index0
-rw-r--r--docker_files/data-kafka/kafka-logs/SDC-DISTR-STATUS-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.logbin0 -> 258 bytes
-rw-r--r--docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-0/00000000000000000000.indexbin0 -> 56 bytes
-rw-r--r--docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-0/00000000000000000000.logbin0 -> 33116 bytes
-rw-r--r--docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-1/00000000000000000000.indexbin0 -> 40 bytes
-rw-r--r--docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-1/00000000000000000000.logbin0 -> 29934 bytes
-rw-r--r--docker_files/data-kafka/kafka-logs/recovery-point-offset-checkpoint14
-rw-r--r--docker_files/data-kafka/kafka-logs/replication-offset-checkpoint14
-rw-r--r--docker_files/data-zookeeper/version-2/log.1bin0 -> 67108880 bytes
-rw-r--r--docker_files/data-zookeeper/version-2/log.98bin0 -> 67108880 bytes
-rw-r--r--docker_files/data-zookeeper/version-2/log.b0bin0 -> 67108880 bytes
-rw-r--r--docker_files/dgltest/data.2.2.json1
-rw-r--r--docker_files/dgltest/data.2.5.json1
-rw-r--r--docker_files/dgltest/data.3.1.txt1
-rw-r--r--docker_files/dgltest/data.3.3.json1
-rw-r--r--docker_files/dgltest/key.req1
-rw-r--r--docker_files/dgltest/out/1.1.out5
-rw-r--r--docker_files/dgltest/out/2.1.out8
-rw-r--r--docker_files/dgltest/out/2.2.out13
-rw-r--r--docker_files/dgltest/out/2.3.out6
-rw-r--r--docker_files/dgltest/out/2.4.out13
-rw-r--r--docker_files/dgltest/out/2.5.out4
-rw-r--r--docker_files/dgltest/out/2.6.out1
-rw-r--r--docker_files/dgltest/out/3.1.out4
-rw-r--r--docker_files/dgltest/out/3.2.out1
-rw-r--r--docker_files/dgltest/out/3.3.out4
-rw-r--r--docker_files/dgltest/out/3.4.out1
-rwxr-xr-xdocker_files/dgltest/regress.sh113
-rwxr-xr-xdocker_files/dme2reg/service=com.att.authz.AuthorizationService/version=2.0/envContext=DEV/.gitignore2
-rw-r--r--docker_files/download-kafka.sh5
-rwxr-xr-xdocker_files/dump_mr_state.sh10
-rw-r--r--docker_files/env.sh6
-rwxr-xr-xdocker_files/init-ecomp-topics.sh51
-rw-r--r--docker_files/ljitest/test.sh50
-rw-r--r--docker_files/mykey27
-rw-r--r--docker_files/start-kafka.sh69
-rw-r--r--docker_files/uebapikey-sdc8
67 files changed, 785 insertions, 0 deletions
diff --git a/.gitreview b/.gitreview
new file mode 100644
index 0000000..f1bca61
--- /dev/null
+++ b/.gitreview
@@ -0,0 +1,4 @@
+[gerrit]
+host=gerrit.openecomp.org
+port=29418
+project=dcae/demo/startup/message-router.git
diff --git a/LICENSE.txt b/LICENSE.txt
new file mode 100644
index 0000000..30471b5
--- /dev/null
+++ b/LICENSE.txt
@@ -0,0 +1,22 @@
+
+/*-
+ * ============LICENSE_START==========================================
+ * OPENECOMP - DCAE
+ * ===================================================================
+ * Copyright (c) 2017 AT&T Intellectual Property. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END============================================
+ */
+
+ECOMP and OpenECOMP are trademarks and service marks of AT&T Intellectual Property.
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..e5f22e2
--- /dev/null
+++ b/README.md
@@ -0,0 +1,10 @@
+This project hosts the configurations and start-up scripts for instantiating the Open eCOMP Message Router.
+
+To deploy an Open eCOMP Message Router to a host:
+
+0. prepare the docker host:
+ a. install the following software: git, docker, docker-compose
+1. login to the docker host
+2. git clone this project
+3. edit the deploy.sh file with docker registry info and local configurations such as docker-compose
+4. run the deploy.sh as root
diff --git a/deploy.sh b/deploy.sh
new file mode 100755
index 0000000..d92a270
--- /dev/null
+++ b/deploy.sh
@@ -0,0 +1,33 @@
+#!/bin/bash
+
+set -e
+
+# docker registry configurations
+# all docker container dependencies are now from docker hub, no need for our own any more
+
+# do not change this, it is already matched with the git repo file structure
+DOCKER_FILE_DIR="./docker_files"
+
+# commands to run docker and docker-compose
+DOCKER_COMPOSE_EXE="/opt/docker/docker-compose"
+DOCKER_EXE="docker"
+
+
+cd ${DOCKER_FILE_DIR}
+
+echo "prep any files with local configurations"
+if ls __* 1> /dev/null 2>&1; then
+ IP_DOCKER0=`ifconfig docker0 |grep "inet addr" | cut -d: -f2 |cut -d" " -f1`
+ TEMPLATES=`ls -1 __*`
+ for TEMPLATE in $TEMPLATES
+ do
+ FILENAME=${TEMPLATE//_}
+ if [ ! -z ${IP_DOCKER0} ]; then
+ sed -e "s/{{ ip.docker0 }}/${IP_DOCKER0}/" $TEMPLATE > $FILENAME
+ fi
+ done
+fi
+
+echo "starting docker operations"
+#${DOCKER_EXE} login --username=${DOCKER_REGISTRY_USERNAME} --password=${DOCKER_REGISTRY_PASSWORD} ${DOCKER_REGISTRY}
+${DOCKER_COMPOSE_EXE} up -d
diff --git a/docker_files/Dockerfile b/docker_files/Dockerfile
new file mode 100644
index 0000000..87e96ee
--- /dev/null
+++ b/docker_files/Dockerfile
@@ -0,0 +1,22 @@
+FROM anapsix/alpine-java
+
+MAINTAINER Wurstmeister
+
+RUN apk add --update unzip wget curl docker jq coreutils
+
+ENV KAFKA_VERSION="0.8.1.1" SCALA_VERSION="2.9.2"
+ADD download-kafka.sh /tmp/download-kafka.sh
+RUN chmod a+x /tmp/download-kafka.sh && sync && /tmp/download-kafka.sh && tar xfz /tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz -C /opt && rm /tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz
+
+VOLUME ["/kafka"]
+
+ENV KAFKA_HOME /opt/kafka_${SCALA_VERSION}-${KAFKA_VERSION}
+ADD start-kafka.sh /usr/bin/start-kafka.sh
+ADD broker-list.sh /usr/bin/broker-list.sh
+ADD create-topics.sh /usr/bin/create-topics.sh
+# The scripts need to have executable permission
+RUN chmod a+x /usr/bin/start-kafka.sh && \
+ chmod a+x /usr/bin/broker-list.sh && \
+ chmod a+x /usr/bin/create-topics.sh
+# Use "exec" form so that it runs as PID 1 (useful for graceful shutdown)
+CMD ["start-kafka.sh"]
diff --git a/docker_files/__MsgRtrApi.properties b/docker_files/__MsgRtrApi.properties
new file mode 100755
index 0000000..e972675
--- /dev/null
+++ b/docker_files/__MsgRtrApi.properties
@@ -0,0 +1,138 @@
+###############################################################################
+##
+## Cambria API Server config
+##
+## - Default values are shown as commented settings.
+##
+
+###############################################################################
+##
+## HTTP service
+##
+## - 3904 is standard as of 7/29/14.
+#
+## Zookeeper Connection
+##
+## Both Cambria and Kafka make use of Zookeeper.
+##
+config.zk.servers={{ ip.docker0 }}:2181
+#10.0.11.1:2181
+#10.208.128.229:2181
+#config.zk.root=/fe3c/cambria/config
+
+
+###############################################################################
+##
+## Kafka Connection
+##
+## Items below are passed through to Kafka's producer and consumer
+## configurations (after removing "kafka.")
+## if you want to change request.required.acks it can take this one value
+#kafka.metadata.broker.list=localhost:9092,localhost:9093
+kafka.metadata.broker.list={{ ip.docker0 }}:9092
+#10.0.11.1:9092
+#10.208.128.229:9092
+##kafka.request.required.acks=-1
+#kafka.client.zookeeper=${config.zk.servers}
+consumer.timeout.ms=100
+zookeeper.connection.timeout.ms=6000
+zookeeper.session.timeout.ms=6000
+zookeeper.sync.time.ms=2000
+auto.commit.interval.ms=1000
+fetch.message.max.bytes =1000000
+auto.commit.enable=false
+
+
+###############################################################################
+##
+## Secured Config
+##
+## Some data stored in the config system is sensitive -- API keys and secrets,
+## for example. to protect it, we use an encryption layer for this section
+## of the config.
+##
+## The key is a base64 encode AES key. This must be created/configured for
+## each installation.
+#cambria.secureConfig.key=
+##
+## The initialization vector is a 16 byte value specific to the secured store.
+## This must be created/configured for each installation.
+#cambria.secureConfig.iv=
+
+## Southfield Sandbox
+cambria.secureConfig.key=b/7ouTn9FfEw2PQwL0ov/Q==
+cambria.secureConfig.iv=wR9xP5k5vbz/xD0LmtqQLw==
+authentication.adminSecret=fe3cCompound
+#cambria.secureConfig.key[pc569h]=YT3XPyxEmKCTLI2NK+Sjbw==
+#cambria.secureConfig.iv[pc569h]=rMm2jhR3yVnU+u2V9Ugu3Q==
+
+
+###############################################################################
+##
+## Consumer Caching
+##
+## Kafka expects live connections from the consumer to the broker, which
+## obviously doesn't work over connectionless HTTP requests. The Cambria
+## server proxies HTTP requests into Kafka consumer sessions that are kept
+## around for later re-use. Not doing so is costly for setup per request,
+## which would substantially impact a high volume consumer's performance.
+##
+## This complicates Cambria server failover, because we often need server
+## A to close its connection before server B brings up the replacement.
+##
+
+## The consumer cache is normally enabled.
+#cambria.consumer.cache.enabled=true
+
+## Cached consumers are cleaned up after a period of disuse. The server inspects
+## consumers every sweepFreqSeconds and will clean up any connections that are
+## dormant for touchFreqMs.
+#cambria.consumer.cache.sweepFreqSeconds=15
+#cambria.consumer.cache.touchFreqMs=120000
+
+## The cache is managed through ZK. The default value for the ZK connection
+## string is the same as config.zk.servers.
+#cambria.consumer.cache.zkConnect=${config.zk.servers}
+
+##
+## Shared cache information is associated with this node's name. The default
+## name is the hostname plus the HTTP service port this host runs on. (The
+## hostname is determined via InetAddress.getLocalHost ().getCanonicalHostName(),
+## which is not always adequate.) You can set this value explicitly here.
+##
+#cambria.api.node.identifier=<use-something-unique-to-this-instance>
+
+###############################################################################
+##
+## Metrics Reporting
+##
+## This server can report its metrics periodically on a topic.
+##
+#metrics.send.cambria.enabled=true
+#metrics.send.cambria.topic=cambria.apinode.metrics #msgrtr.apinode.metrics.dmaap
+#metrics.send.cambria.sendEverySeconds=60
+
+cambria.consumer.cache.zkBasePath=/fe3c/cambria/consumerCache
+
+##############################################################################
+#100mb
+maxcontentlength=10000
+
+
+##############################################################################
+#AAF Properties
+msgRtr.namespace.aaf=org.openecomp.dcae.dmaap.mtnje2.mr.topic
+msgRtr.topicfactory.aaf=org.openecomp.dcae.dmaap.topicFactory|:org.openecomp.dcae.dmaap.mtnje2.mr.topic:
+enforced.topic.name.AAF=org.openecomp
+forceAAF=false
+transidUEBtopicreqd=false
+defaultNSforUEB=org.openecomp.dmaap.mr.ueb
+##############################################################################
+#Mirror Maker Agent
+msgRtr.mirrormakeradmin.aaf=org.openecomp.dmaap.mr.dev.mirrormaker|*|admin
+msgRtr.mirrormakeruser.aaf=org.openecomp.dmaap.mr.dev.mirrormaker|*|user
+msgRtr.mirrormakeruser.aaf.create=org.openecomp.dmaap.mr.dev.topicFactory|:org.openecomp.dmaap.mr.dev.topic:
+msgRtr.mirrormaker.timeout=15000
+msgRtr.mirrormaker.topic=org.openecomp.dmaap.mr.prod.mm.agent
+msgRtr.mirrormaker.consumergroup=mmagentserver
+msgRtr.mirrormaker.consumerid=1
diff --git a/docker_files/__docker-compose.yml b/docker_files/__docker-compose.yml
new file mode 100644
index 0000000..d9bbcd2
--- /dev/null
+++ b/docker_files/__docker-compose.yml
@@ -0,0 +1,50 @@
+version: '2'
+services:
+ zookeeper:
+ image: wurstmeister/zookeeper
+ ports:
+ - "2181:2181"
+ volumes:
+ - ./data-zookeeper:/opt/zookeeper-3.4.9/data
+ logging:
+ driver: "json-file"
+ options:
+ max-size: "30m"
+ max-file: "5"
+
+ kafka:
+ build: .
+ ports:
+ - "9092:9092"
+ environment:
+ KAFKA_ADVERTISED_HOST_NAME: {{ ip.docker0 }}
+ KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+ KAFKA_BROKER_ID: 1
+ volumes:
+ - /var/run/docker.sock:/var/run/docker.sock
+ - ./data-kafka:/kafka
+ - ./start-kafka.sh:/start-kafka.sh
+ logging:
+ driver: "json-file"
+ options:
+ max-size: "30m"
+ max-file: "5"
+
+ dmaap:
+ image: attos/dmaap
+ #image: ecomp-nexus:51212/dmaapnew1
+ ports:
+ - "3904:3904"
+ - "3905:3905"
+ volumes:
+ - ./MsgRtrApi.properties:/appl/dmaapMR1/bundleconfig/etc/appprops/MsgRtrApi.properties
+ - ./cadi.properties:/appl/dmaapMR1/etc/cadi.properties
+ - ./mykey:/appl/dmaapMR1/etc/keyfile
+ depends_on:
+ - zookeeper
+ - kafka
+ logging:
+ driver: "json-file"
+ options:
+ max-size: "30m"
+ max-file: "5"
diff --git a/docker_files/broker-list.sh b/docker_files/broker-list.sh
new file mode 100644
index 0000000..7f04639
--- /dev/null
+++ b/docker_files/broker-list.sh
@@ -0,0 +1,5 @@
+#!/bin/bash
+
+CONTAINERS=$(docker ps | grep 9092 | awk '{print $1}')
+BROKERS=$(for CONTAINER in $CONTAINERS; do docker port $CONTAINER 9092 | sed -e "s/0.0.0.0:/$HOST_IP:/g"; done)
+echo $BROKERS | sed -e 's/ /,/g'
diff --git a/docker_files/cadi.properties b/docker_files/cadi.properties
new file mode 100644
index 0000000..17aaabe
--- /dev/null
+++ b/docker_files/cadi.properties
@@ -0,0 +1,35 @@
+#This properties file is used for defining AAF properties related to the CADI framework. This file is used for running AAF framework
+#using the ajsc-cadi plugin. For more information on the ajsc-cadi plugin, please goto wiki link:
+#http://wiki.web.att.com/display/ajsc/CADI-AJSC-Plugin
+#For more information on utilizing the AAF framework, please goto wiki link:
+#AAF wiki link: http://wiki.web.att.com/display/aaf/AAF+Documentation
+
+#Setting csp_domain to PROD will allow for testing using your attuid and password through GLO.
+#csp_domain=PROD
+#csp_devl_localhost=true
+
+basic_realm=opencomp.org
+basic_warn=TRUE
+
+cadi_loglevel=DEBUG
+#cadi_keyfile=target/swm/package/nix/dist_files/appl/${artifactId}/etc/keyfile2
+cadi_keyfile=/appl/dmaapMR1/etc/keyfile
+# Configure AAF
+aaf_url=https://DME2RESOLVE/service=com.att.authz.AuthorizationService/version=2.0/envContext=DEV/routeOffer=BAU_SE
+
+#AJSC - MECHID
+aaf_id=dgl@openecomp.org
+aaf_password=enc:f2u5br1mh29M02-
+
+#aaf_id=m93659@ajsc.att.com
+#aaf_password=enc:NP_WI3mH4YPdWSrY4iLcbhRc4mQY
+aaf_timeout=5000
+aaf_clean_interval=1200000
+aaf_user_expires=60000
+aaf_high_count=1000000
+
+
+# The following properties are being set by the AJSC Container and should NOT need to be set here.
+AFT_LATITUDE=33.823589
+AFT_LONGITUDE=-84.366982
+AFT_ENVIRONMENT=AFTUAT
diff --git a/docker_files/create-topics.sh b/docker_files/create-topics.sh
new file mode 100644
index 0000000..e07bf06
--- /dev/null
+++ b/docker_files/create-topics.sh
@@ -0,0 +1,32 @@
+#!/bin/bash
+
+
+if [[ -z "$START_TIMEOUT" ]]; then
+ START_TIMEOUT=600
+fi
+
+start_timeout_exceeded=false
+count=0
+step=10
+while netstat -lnt | awk '$4 ~ /:'$KAFKA_PORT'$/ {exit 1}'; do
+ echo "waiting for kafka to be ready"
+ sleep $step;
+ count=$(expr $count + $step)
+ if [ $count -gt $START_TIMEOUT ]; then
+ start_timeout_exceeded=true
+ break
+ fi
+done
+
+if $start_timeout_exceeded; then
+ echo "Not able to auto-create topic (waited for $START_TIMEOUT sec)"
+ exit 1
+fi
+
+if [[ -n $KAFKA_CREATE_TOPICS ]]; then
+ IFS=','; for topicToCreate in $KAFKA_CREATE_TOPICS; do
+ echo "creating topics: $topicToCreate"
+ IFS=':' read -a topicConfig <<< "$topicToCreate"
+ JMX_PORT='' $KAFKA_HOME/bin/kafka-topics.sh --create --zookeeper $KAFKA_ZOOKEEPER_CONNECT --replication-factor ${topicConfig[2]} --partition ${topicConfig[1]} --topic "${topicConfig[0]}"
+ done
+fi
diff --git a/docker_files/data-kafka/kafka-logs/.kafka_cleanshutdown b/docker_files/data-kafka/kafka-logs/.kafka_cleanshutdown
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/docker_files/data-kafka/kafka-logs/.kafka_cleanshutdown
diff --git a/docker_files/data-kafka/kafka-logs/.lock b/docker_files/data-kafka/kafka-logs/.lock
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/docker_files/data-kafka/kafka-logs/.lock
diff --git a/docker_files/data-kafka/kafka-logs/APPC-CL-0/00000000000000000000.index b/docker_files/data-kafka/kafka-logs/APPC-CL-0/00000000000000000000.index
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/docker_files/data-kafka/kafka-logs/APPC-CL-0/00000000000000000000.index
diff --git a/docker_files/data-kafka/kafka-logs/APPC-CL-0/00000000000000000000.log b/docker_files/data-kafka/kafka-logs/APPC-CL-0/00000000000000000000.log
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/docker_files/data-kafka/kafka-logs/APPC-CL-0/00000000000000000000.log
diff --git a/docker_files/data-kafka/kafka-logs/APPC-CL-1/00000000000000000000.index b/docker_files/data-kafka/kafka-logs/APPC-CL-1/00000000000000000000.index
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/docker_files/data-kafka/kafka-logs/APPC-CL-1/00000000000000000000.index
diff --git a/docker_files/data-kafka/kafka-logs/APPC-CL-1/00000000000000000000.log b/docker_files/data-kafka/kafka-logs/APPC-CL-1/00000000000000000000.log
new file mode 100644
index 0000000..607c576
--- /dev/null
+++ b/docker_files/data-kafka/kafka-logs/APPC-CL-1/00000000000000000000.log
Binary files differ
diff --git a/docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-0/00000000000000000000.index b/docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-0/00000000000000000000.index
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-0/00000000000000000000.index
diff --git a/docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-0/00000000000000000000.log b/docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-0/00000000000000000000.log
new file mode 100644
index 0000000..51b4500
--- /dev/null
+++ b/docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-0/00000000000000000000.log
Binary files differ
diff --git a/docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-1/00000000000000000000.index b/docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-1/00000000000000000000.index
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-1/00000000000000000000.index
diff --git a/docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-1/00000000000000000000.log b/docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-1/00000000000000000000.log
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-1/00000000000000000000.log
diff --git a/docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-0/00000000000000000000.index b/docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-0/00000000000000000000.index
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-0/00000000000000000000.index
diff --git a/docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-0/00000000000000000000.log b/docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-0/00000000000000000000.log
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-0/00000000000000000000.log
diff --git a/docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-1/00000000000000000000.index b/docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-1/00000000000000000000.index
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-1/00000000000000000000.index
diff --git a/docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-1/00000000000000000000.log b/docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-1/00000000000000000000.log
new file mode 100644
index 0000000..14d61f3
--- /dev/null
+++ b/docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-1/00000000000000000000.log
Binary files differ
diff --git a/docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-0/00000000000000000000.index b/docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-0/00000000000000000000.index
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-0/00000000000000000000.index
diff --git a/docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-0/00000000000000000000.log b/docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-0/00000000000000000000.log
new file mode 100644
index 0000000..cd462c1
--- /dev/null
+++ b/docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-0/00000000000000000000.log
Binary files differ
diff --git a/docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-1/00000000000000000000.index b/docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-1/00000000000000000000.index
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-1/00000000000000000000.index
diff --git a/docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-1/00000000000000000000.log b/docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-1/00000000000000000000.log
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-1/00000000000000000000.log
diff --git a/docker_files/data-kafka/kafka-logs/SDC-DISTR-NOTIF-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.index b/docker_files/data-kafka/kafka-logs/SDC-DISTR-NOTIF-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.index
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/docker_files/data-kafka/kafka-logs/SDC-DISTR-NOTIF-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.index
diff --git a/docker_files/data-kafka/kafka-logs/SDC-DISTR-NOTIF-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.log b/docker_files/data-kafka/kafka-logs/SDC-DISTR-NOTIF-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.log
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/docker_files/data-kafka/kafka-logs/SDC-DISTR-NOTIF-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.log
diff --git a/docker_files/data-kafka/kafka-logs/SDC-DISTR-STATUS-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.index b/docker_files/data-kafka/kafka-logs/SDC-DISTR-STATUS-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.index
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/docker_files/data-kafka/kafka-logs/SDC-DISTR-STATUS-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.index
diff --git a/docker_files/data-kafka/kafka-logs/SDC-DISTR-STATUS-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.log b/docker_files/data-kafka/kafka-logs/SDC-DISTR-STATUS-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.log
new file mode 100644
index 0000000..15727d7
--- /dev/null
+++ b/docker_files/data-kafka/kafka-logs/SDC-DISTR-STATUS-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.log
Binary files differ
diff --git a/docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-0/00000000000000000000.index b/docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-0/00000000000000000000.index
new file mode 100644
index 0000000..21ee01e
--- /dev/null
+++ b/docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-0/00000000000000000000.index
Binary files differ
diff --git a/docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-0/00000000000000000000.log b/docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-0/00000000000000000000.log
new file mode 100644
index 0000000..cb02c46
--- /dev/null
+++ b/docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-0/00000000000000000000.log
Binary files differ
diff --git a/docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-1/00000000000000000000.index b/docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-1/00000000000000000000.index
new file mode 100644
index 0000000..d960b7c
--- /dev/null
+++ b/docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-1/00000000000000000000.index
Binary files differ
diff --git a/docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-1/00000000000000000000.log b/docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-1/00000000000000000000.log
new file mode 100644
index 0000000..edd435e
--- /dev/null
+++ b/docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-1/00000000000000000000.log
Binary files differ
diff --git a/docker_files/data-kafka/kafka-logs/recovery-point-offset-checkpoint b/docker_files/data-kafka/kafka-logs/recovery-point-offset-checkpoint
new file mode 100644
index 0000000..0f3e18d
--- /dev/null
+++ b/docker_files/data-kafka/kafka-logs/recovery-point-offset-checkpoint
@@ -0,0 +1,14 @@
+0
+12
+PDPD-CONFIGURATION 0 0
+msgrtr.apinode.metrics.dmaap 1 22
+APPC-CL 0 0
+APPC-CL 1 1
+SDC-DISTR-NOTIF-TOPIC-SDC-OPENSOURCE-ENV1 0 0
+POLICY-CL-MGT 1 0
+PDPD-CONFIGURATION 1 1
+DCAE-CL-EVENT 1 0
+msgrtr.apinode.metrics.dmaap 0 24
+POLICY-CL-MGT 0 1
+SDC-DISTR-STATUS-TOPIC-SDC-OPENSOURCE-ENV1 0 6
+DCAE-CL-EVENT 0 1
diff --git a/docker_files/data-kafka/kafka-logs/replication-offset-checkpoint b/docker_files/data-kafka/kafka-logs/replication-offset-checkpoint
new file mode 100644
index 0000000..0f3e18d
--- /dev/null
+++ b/docker_files/data-kafka/kafka-logs/replication-offset-checkpoint
@@ -0,0 +1,14 @@
+0
+12
+PDPD-CONFIGURATION 0 0
+msgrtr.apinode.metrics.dmaap 1 22
+APPC-CL 0 0
+APPC-CL 1 1
+SDC-DISTR-NOTIF-TOPIC-SDC-OPENSOURCE-ENV1 0 0
+POLICY-CL-MGT 1 0
+PDPD-CONFIGURATION 1 1
+DCAE-CL-EVENT 1 0
+msgrtr.apinode.metrics.dmaap 0 24
+POLICY-CL-MGT 0 1
+SDC-DISTR-STATUS-TOPIC-SDC-OPENSOURCE-ENV1 0 6
+DCAE-CL-EVENT 0 1
diff --git a/docker_files/data-zookeeper/version-2/log.1 b/docker_files/data-zookeeper/version-2/log.1
new file mode 100644
index 0000000..d925fca
--- /dev/null
+++ b/docker_files/data-zookeeper/version-2/log.1
Binary files differ
diff --git a/docker_files/data-zookeeper/version-2/log.98 b/docker_files/data-zookeeper/version-2/log.98
new file mode 100644
index 0000000..a101dca
--- /dev/null
+++ b/docker_files/data-zookeeper/version-2/log.98
Binary files differ
diff --git a/docker_files/data-zookeeper/version-2/log.b0 b/docker_files/data-zookeeper/version-2/log.b0
new file mode 100644
index 0000000..d19996d
--- /dev/null
+++ b/docker_files/data-zookeeper/version-2/log.b0
Binary files differ
diff --git a/docker_files/dgltest/data.2.2.json b/docker_files/dgltest/data.2.2.json
new file mode 100644
index 0000000..277690d
--- /dev/null
+++ b/docker_files/dgltest/data.2.2.json
@@ -0,0 +1 @@
+{ "topicName": "Topic-11538-2", "topicDescription": "topic for test 2.2", "partitionCount": "1", "replicationCount": "1", "transactionEnabled": "true" }
diff --git a/docker_files/dgltest/data.2.5.json b/docker_files/dgltest/data.2.5.json
new file mode 100644
index 0000000..949b5cb
--- /dev/null
+++ b/docker_files/dgltest/data.2.5.json
@@ -0,0 +1 @@
+{ "datestamp": "Wed Dec 14 06:14:23 UTC 2016", "appkey": "x100", "appval": "some value" }
diff --git a/docker_files/dgltest/data.3.1.txt b/docker_files/dgltest/data.3.1.txt
new file mode 100644
index 0000000..a03d04d
--- /dev/null
+++ b/docker_files/dgltest/data.3.1.txt
@@ -0,0 +1 @@
+datestamp: Wed Dec 14 06:14:33 UTC 2016, key: 3.1, value: this is a test
diff --git a/docker_files/dgltest/data.3.3.json b/docker_files/dgltest/data.3.3.json
new file mode 100644
index 0000000..5ec7d8e
--- /dev/null
+++ b/docker_files/dgltest/data.3.3.json
@@ -0,0 +1 @@
+{ "datestamp": "Wed Dec 14 06:14:35 UTC 2016", "key": "3.3", "value": "this is a test" }
diff --git a/docker_files/dgltest/key.req b/docker_files/dgltest/key.req
new file mode 100644
index 0000000..a7e4092
--- /dev/null
+++ b/docker_files/dgltest/key.req
@@ -0,0 +1 @@
+{ "email": "no.email", "description": "request for direct response KEY" }
diff --git a/docker_files/dgltest/out/1.1.out b/docker_files/dgltest/out/1.1.out
new file mode 100644
index 0000000..6fc17f8
--- /dev/null
+++ b/docker_files/dgltest/out/1.1.out
@@ -0,0 +1,5 @@
+{"topics": [
+ "msgrtr.apinode.metrics.dmaap",
+ "Topic-11479-2",
+ "11479.3"
+]} \ No newline at end of file
diff --git a/docker_files/dgltest/out/2.1.out b/docker_files/dgltest/out/2.1.out
new file mode 100644
index 0000000..514bdca
--- /dev/null
+++ b/docker_files/dgltest/out/2.1.out
@@ -0,0 +1,8 @@
+{
+ "aux": {
+ "description": "request for direct response KEY",
+ "email": "no.email"
+ },
+ "secret": "ckvKmWw5JAtxVYroWNM0bNZh",
+ "key": "Hje2R7zy89VTLufl"
+} \ No newline at end of file
diff --git a/docker_files/dgltest/out/2.2.out b/docker_files/dgltest/out/2.2.out
new file mode 100644
index 0000000..f636b9d
--- /dev/null
+++ b/docker_files/dgltest/out/2.2.out
@@ -0,0 +1,13 @@
+{
+ "owner": "Hje2R7zy89VTLufl",
+ "readerAcl": {
+ "enabled": true,
+ "users": []
+ },
+ "name": "Topic-11538-2",
+ "description": "topic for test 2.2",
+ "writerAcl": {
+ "enabled": true,
+ "users": []
+ }
+} \ No newline at end of file
diff --git a/docker_files/dgltest/out/2.3.out b/docker_files/dgltest/out/2.3.out
new file mode 100644
index 0000000..98ffddc
--- /dev/null
+++ b/docker_files/dgltest/out/2.3.out
@@ -0,0 +1,6 @@
+{"topics": [
+ "msgrtr.apinode.metrics.dmaap",
+ "Topic-11479-2",
+ "11479.3",
+ "Topic-11538-2"
+]} \ No newline at end of file
diff --git a/docker_files/dgltest/out/2.4.out b/docker_files/dgltest/out/2.4.out
new file mode 100644
index 0000000..f636b9d
--- /dev/null
+++ b/docker_files/dgltest/out/2.4.out
@@ -0,0 +1,13 @@
+{
+ "owner": "Hje2R7zy89VTLufl",
+ "readerAcl": {
+ "enabled": true,
+ "users": []
+ },
+ "name": "Topic-11538-2",
+ "description": "topic for test 2.2",
+ "writerAcl": {
+ "enabled": true,
+ "users": []
+ }
+} \ No newline at end of file
diff --git a/docker_files/dgltest/out/2.5.out b/docker_files/dgltest/out/2.5.out
new file mode 100644
index 0000000..78e55ec
--- /dev/null
+++ b/docker_files/dgltest/out/2.5.out
@@ -0,0 +1,4 @@
+{
+ "serverTimeMs": 8,
+ "count": 1
+} \ No newline at end of file
diff --git a/docker_files/dgltest/out/2.6.out b/docker_files/dgltest/out/2.6.out
new file mode 100644
index 0000000..0637a08
--- /dev/null
+++ b/docker_files/dgltest/out/2.6.out
@@ -0,0 +1 @@
+[] \ No newline at end of file
diff --git a/docker_files/dgltest/out/3.1.out b/docker_files/dgltest/out/3.1.out
new file mode 100644
index 0000000..d2a9b4e
--- /dev/null
+++ b/docker_files/dgltest/out/3.1.out
@@ -0,0 +1,4 @@
+{
+ "serverTimeMs": 175,
+ "count": 1
+} \ No newline at end of file
diff --git a/docker_files/dgltest/out/3.2.out b/docker_files/dgltest/out/3.2.out
new file mode 100644
index 0000000..0637a08
--- /dev/null
+++ b/docker_files/dgltest/out/3.2.out
@@ -0,0 +1 @@
+[] \ No newline at end of file
diff --git a/docker_files/dgltest/out/3.3.out b/docker_files/dgltest/out/3.3.out
new file mode 100644
index 0000000..ed2a213
--- /dev/null
+++ b/docker_files/dgltest/out/3.3.out
@@ -0,0 +1,4 @@
+{
+ "serverTimeMs": 3,
+ "count": 1
+} \ No newline at end of file
diff --git a/docker_files/dgltest/out/3.4.out b/docker_files/dgltest/out/3.4.out
new file mode 100644
index 0000000..f58f5f7
--- /dev/null
+++ b/docker_files/dgltest/out/3.4.out
@@ -0,0 +1 @@
+["{\"datestamp\":\"Wed Dec 14 06:14:35 UTC 2016\",\"value\":\"this is a test\",\"key\":\"3.3\"}"] \ No newline at end of file
diff --git a/docker_files/dgltest/regress.sh b/docker_files/dgltest/regress.sh
new file mode 100755
index 0000000..758dd7c
--- /dev/null
+++ b/docker_files/dgltest/regress.sh
@@ -0,0 +1,113 @@
+#!/bin/ksh
+#
+# depends on jq - https://stedolan.github.io/jq/
+
+PROTOCOL=http
+FQDN=127.0.0.1
+#vm1-message-router
+#FQDN=10.208.128.229
+PORT=3904
+URL=$PROTOCOL://$FQDN:$PORT
+
+rm -f out/*
+mkdir -p out
+
+results() {
+# echo "[debug] compare $1 to $2"
+ if [ $1 == $2 ]
+ then
+ echo -n "SUCCESS "
+ else
+ echo -n "FAIL ($1) "
+ fi
+ echo " :TEST $3 ($4)"
+}
+SUITE=0
+SUITE=$((SUITE + 1))
+echo "SUITE $SUITE: List topics"
+TN=0
+TN=$((TN + 1))
+TC=$SUITE.$TN
+expect=200
+rc=`curl -s -w "%{http_code}" -o out/$TC.out $URL/topics`
+results $rc $expect $TC "list"
+StartTopicCount=`cat out/$TC.out | wc -l`
+
+
+SUITE=$((SUITE + 1))
+echo
+echo "SUITE $SUITE: APIKEY authenticated topic"
+TOPIC=Topic-$$-$SUITE
+TN=0
+TN=$((TN + 1))
+TC=$SUITE.$TN
+OUT=out/$TC.out
+echo '{ "email": "no.email", "description": "request for direct response KEY" }' > key.req
+rc=`curl -s -w "%{http_code}" -o out/$TC.out -X POST -H "Content-Type: application/json" -d @key.req $URL/apiKeys/create`
+results $rc $expect $SUITE.$TN "gen apikey "
+TN=$((TN + 1))
+TC=$SUITE.$TN
+SECRET=$(jq ".secret" $OUT | cut -f 2 -d \")
+KEY=$(jq ".key" $OUT | cut -f 2 -d \")
+TIME=`date --iso-8601=seconds`
+SIG=$(echo -n "$TIME" | openssl sha1 -hmac $SECRET -binary | openssl base64)
+xAUTH=$KEY:$SIG
+#echo "[debug] $SECRET $KEY $TIME $SIG $xAUTH"
+DATA=data.$TC.json
+echo "{ \"topicName\": \"$TOPIC\", \"topicDescription\": \"topic for test $TC\", \"partitionCount\": \"1\", \"replicationCount\": \"1\", \"transactionEnabled\": \"true\" }" > $DATA
+rc=`curl -s -w "%{http_code}" -o out/$TC.out -X POST -H "Content-Type: application/json" -H "X-CambriaAuth: $xAUTH" -H "X-CambriaDate: $TIME" -d @$DATA $URL/topics/create`
+results $rc $expect $SUITE.$TN "create topic"
+TN=$((TN + 1))
+TC=$SUITE.$TN
+expect=200
+rc=`curl -s -w "%{http_code}" -o out/$TC.out $URL/topics`
+results $rc $expect $TC "list "
+TopicCnt=`cat out/$TC.out | wc -l`
+results $TopicCnt $((StartTopicCount + 1)) $TC "topic count"
+TN=$((TN + 1))
+TC=$SUITE.$TN
+expect=200
+rc=`curl -s -w "%{http_code}" -o out/$TC.out $URL/topics/$TOPIC`
+results $rc $expect $TC "list $TOPIC"
+TN=$((TN + 1))
+TC=$SUITE.$TN
+DATA=data.$TC.json
+echo "{ \"datestamp\": \"`date`\", \"appkey\": \"x100\", \"appval\": \"some value\" }" > $DATA
+rc=`curl -s -w "%{http_code}" -o out/$TC.out -X POST -H "Content-Type: application/json" -H "X-CambriaAuth: $xAUTH" -H "X-CambriaDate: $TIME" -d @$DATA $URL/events/$TOPIC`
+results $rc $expect $SUITE.$TN "pub APIKEY topic"
+TN=$((TN + 1))
+TC=$SUITE.$TN
+rc=`curl -s -w "%{http_code}" -o out/$TC.out -X GET -H "Content-Type: application/json" -H "X-CambriaAuth: $xAUTH" -H "X-CambriaDate: $TIME" $URL/events/$TOPIC/g0/u1`
+results $rc $expect $SUITE.$TN "sub APIKEY topic"
+
+
+SUITE=$((SUITE + 1))
+echo
+echo "SUITE $SUITE: anonymous topic"
+TOPIC=$$.$SUITE
+TN=0
+TN=$((TN + 1))
+TC=$SUITE.$TN
+DATA=data.$TC.txt
+echo "datestamp: `date`, key: $TC, value: this is a test " > $DATA
+expect=200
+rc=`curl -s -w "%{http_code}" -o out/$TC.out -X POST -H "Content-Type: text/plain" -d @$DATA $URL/events/$TOPIC`
+results $rc $expect $SUITE.$TN "pub text/plain"
+TN=$((TN + 1))
+TC=$SUITE.$TN
+expect=200
+rc=`curl -s -w "%{http_code}" -o out/$TC.out $URL/events/$TOPIC/group1/u$$?timeout=1000`
+results $rc $expect $SUITE.$TN "sub text/plain"
+TN=$((TN + 1))
+TC=$SUITE.$TN
+DATA=data.$TC.json
+echo "{ \"datestamp\": \"`date`\", \"key\": \"$TC\", \"value\": \"this is a test\" }" > $DATA
+expect=200
+rc=`curl -s -w "%{http_code}" -o out/$TC.out -X POST -H "Content-Type: application/json" -d @$DATA $URL/events/$TOPIC`
+results $rc $expect $SUITE.$TN "pub json"
+TN=$((TN + 1))
+TC=$SUITE.$TN
+expect=200
+rc=`curl -s -w "%{http_code}" -o out/$TC.out $URL/events/$TOPIC/group1/u$$?timeout=1000`
+results $rc $expect $SUITE.$TN "sub json"
+
diff --git a/docker_files/dme2reg/service=com.att.authz.AuthorizationService/version=2.0/envContext=DEV/.gitignore b/docker_files/dme2reg/service=com.att.authz.AuthorizationService/version=2.0/envContext=DEV/.gitignore
new file mode 100755
index 0000000..25b6eed
--- /dev/null
+++ b/docker_files/dme2reg/service=com.att.authz.AuthorizationService/version=2.0/envContext=DEV/.gitignore
@@ -0,0 +1,2 @@
+/routeOffer=BAU_SE.lock
+/routeOffer=BAU_SE.txt
diff --git a/docker_files/download-kafka.sh b/docker_files/download-kafka.sh
new file mode 100644
index 0000000..2ddc911
--- /dev/null
+++ b/docker_files/download-kafka.sh
@@ -0,0 +1,5 @@
+#!/bin/sh
+
+mirror=$(curl --stderr /dev/null https://www.apache.org/dyn/closer.cgi\?as_json\=1 | jq -r '.preferred')
+url="${mirror}kafka/${KAFKA_VERSION}/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz"
+wget -q "${url}" -O "/tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz"
diff --git a/docker_files/dump_mr_state.sh b/docker_files/dump_mr_state.sh
new file mode 100755
index 0000000..75c16d6
--- /dev/null
+++ b/docker_files/dump_mr_state.sh
@@ -0,0 +1,10 @@
+#!/bin/bash
+
+
+TIMESTAMP=`date +%Y%m%d%H%M`
+CONTAINERID=`docker ps |grep kafka |cut -b1-12`
+docker cp $CONTAINERID:/kafka ./data-kafka-$TIMESTAMP
+tar zcvf ./data-kafka-$TIMESTAMP.tgz ./data-kafka-$TIMESTAMP
+CONTAINERID=`docker ps |grep zookeeper |cut -b1-12`
+docker cp $CONTAINERID:/opt/zookeeper-3.4.9/data ./data-zookeeper-$TIMESTAMP
+tar zcvf ./data-zookeeper-$TIMESTAMP.tgz ./data-zookeeper-$TIMESTAMP
diff --git a/docker_files/env.sh b/docker_files/env.sh
new file mode 100644
index 0000000..c8039e4
--- /dev/null
+++ b/docker_files/env.sh
@@ -0,0 +1,6 @@
+alias dcup='/opt/docker/docker-compose up'
+alias dcdown='/opt/docker/docker-compose down'
+alias din-kafka='docker exec -it "$(docker ps |grep kafka |cut -b 1-12)" /bin/bash'
+alias din-zk='docker exec -it "$(docker ps |grep zookeeper |cut -b 1-12)" /bin/bash'
+alias dlogs-kafka='docker logs "$(docker ps |grep kafka |cut -b 1-12)"'
+alias dlogs-zk='docker logs "$(docker ps |grep zookeeper |cut -b 1-12)"'
diff --git a/docker_files/init-ecomp-topics.sh b/docker_files/init-ecomp-topics.sh
new file mode 100755
index 0000000..78fbeae
--- /dev/null
+++ b/docker_files/init-ecomp-topics.sh
@@ -0,0 +1,51 @@
+#!/bin/bash
+
+# lji: this is the script to run to initialize a MR from its 0 day state to eCOMP topics loaded
+
+HOSTPORT="127.0.0.1:3904"
+ANON_TOPICS="APPC-CL PDPD-CONFIGURATION POLICY-CL-MGT DCAE-CL-EVENT"
+API_TOPICS_SDC="SDC-DISTR-NOTIF-TOPIC-SDC-OPENSOURCE-ENV1 SDC-DISTR-STATUS-TOPIC-SDC-OPENSOURCE-ENV1"
+API_KEYFILE="./uebapikey-sdc"
+
+echo "blah" > /tmp/sample.txt
+
+# list topics
+curl http://${HOSTPORT}/topics
+
+declare -A TOPICS
+
+echo "adding anonymous topics"
+for ANON_TOPIC in $ANON_TOPICS ; do
+ echo "curl -H "Content-Type:text/plain" -X POST -d @/tmp/sample.txt http://${HOSTPORT}/events/${ANON_TOPIC}"
+ curl -H "Content-Type:text/plain" -X POST -d @/tmp/sample.txt http://${HOSTPORT}/events/${ANON_TOPIC}
+ echo "done creating anonymous topic $ANON_TOPIC"
+ echo
+done
+
+echo "generating API key"
+echo '{"email":"no email","description":"API key for SDC"}' > /tmp/input.txt
+curl -s -o ${API_KEYFILE} -H "Content-Type:application/json" -X POST -d @/tmp/input.txt http://${HOSTPORT}/apiKeys/create
+cat ${API_KEYFILE}
+echo
+
+echo "adding API key topics"
+UEBAPIKEYSECRET=`cat ${API_KEYFILE} |jq -r ".secret"`
+UEBAPIKEYKEY=`cat ${API_KEYFILE} |jq -r ".key"`
+for API_TOPIC in $API_TOPICS_SDC; do
+ echo '{"topicName":"'${API_TOPIC}'","topicDescription":"SDC API Key secure topic for ","partitionCount":"1","replicationCount":"1","transactionEnabled":"true"}' > /tmp/topicname.txt
+ time=`date --iso-8601=seconds`
+ signature=$(echo -n "$time" | openssl sha1 -hmac $UEBAPIKEYSECRET -binary | openssl base64)
+ xAuth=$UEBAPIKEYKEY:$signature
+ xDate="$time"
+ echo "curl -i -H "Content-Type: application/json" -H "X-CambriaAuth:$xAuth" -H "X-CambriaDate:$xDate" -X POST -d @/tmp/topicname.txt http://${HOSTPORT}/topics/create"
+ curl -i -H "Content-Type: application/json" -H "X-CambriaAuth:$xAuth" -H "X-CambriaDate:$xDate" -X POST -d @/tmp/topicname.txt http://${HOSTPORT}/topics/create
+ echo "done creating api key topic $API_TOPIC"
+ echo
+done
+
+
+echo
+echo "============ post loading state of topics ================="
+for TOPIC in "$API_TOPICS_SDC $ANON_TOPIC"; do
+ curl http://${HOSTPORT}/topics/${TOPIC}
+done
diff --git a/docker_files/ljitest/test.sh b/docker_files/ljitest/test.sh
new file mode 100644
index 0000000..0e06d5a
--- /dev/null
+++ b/docker_files/ljitest/test.sh
@@ -0,0 +1,50 @@
+#!/bin/bash
+# lji: this is basically what Dom has in his regtest. re-do it in bash instead of ksh
+
+HOSTPORT="127.0.0.1:3904"
+ANONTOPIC="anon-topic-$RANDOM"
+APITOPIC="api-topic-$RANDOM"
+APIKEYFILE="/tmp/key"
+
+echo "blah" > /tmp/sample.txt
+
+if [ ! -e /usr/bin/jq ]; then
+ apt-get update && apt-get -y install jq
+fi
+
+
+# list topics
+curl http://${HOSTPORT}/topics
+
+# publish to an anonymous topic (first publish creats the topic)
+curl -H "Content-Type:text/plain" -X POST -d @/tmp/sample.txt http://${HOSTPORT}/events/$ANONTOPIC
+
+# subscribe to an anonymous topic
+curl -H "Content-Type:text/plain" -X GET http://${HOSTPORT}/events/$ANONTOPIC/group1/C1?timeout=5000 &
+curl -H "Content-Type:text/plain" -X POST -d @/tmp/sample.txt http://${HOSTPORT}/events/$ANONTOPIC
+
+
+
+
+# create api key
+echo '{"email":"no email","description":"API key and secret both in reponse"}' > /tmp/input.txt
+curl -s -o ${APIKEYFILE} -H "Content-Type:application/json" -X POST -d @/tmp/input.txt http://${HOSTPORT}/apiKeys/create
+UEBAPIKEYSECRET=`cat ${APIKEYFILE} |jq -r ".secret"`
+UEBAPIKEYKEY=`cat ${APIKEYFILE} |jq -r ".key"`
+
+# create an api key secured topic
+# pay attendtion to replication count
+echo '{"topicName":"'${APITOPIC}'","topicDescription":"This is an API key securedTopic","partitionCount":"1","replicationCount":"1","transactionEnabled":"true"}' > /tmp/topicname.txt
+time=`date --iso-8601=seconds`
+signature=$(echo -n "$time" | openssl sha1 -hmac $UEBAPIKEYSECRET -binary | openssl base64)
+xAuth=$UEBAPIKEYKEY:$signature
+xDate="$time"
+curl -i -H "Content-Type: application/json" -H "X-CambriaAuth:$xAuth" -H "X-CambriaDate:$xDate" -X POST -d @/tmp/topicname.txt http://${HOSTPORT}/topics/create
+
+# first subscribe and run it in bg. then publish.
+time=`date --iso-8601=seconds`
+signature=$(echo -n "$time" | openssl sha1 -hmac $UEBAPIKEYSECRET -binary | openssl base64)
+xAuth=$UEBAPIKEYKEY:$signature
+xDate="$time"
+curl -H "X-CambriaAuth:$xAuth" -H "X-CambriaDate:$xDate" -X GET http://${HOSTPORT}/events/${APITOPIC}/g0/u1 &
+curl -H "Content-Type:text/plain" -H "X-CambriaAuth:$xAuth" -H "X-CambriaDate:$xDate" -X POST -d @/tmp/sample.txt http://${HOSTPORT}/events/${APITOPIC}
diff --git a/docker_files/mykey b/docker_files/mykey
new file mode 100644
index 0000000..c2b8b87
--- /dev/null
+++ b/docker_files/mykey
@@ -0,0 +1,27 @@
+_sNOLphPzrU7L0L3oWv0pYwgV_ddGF1XoBsQEIAp34jfP-fGJFPfFYaMpDEZ3gwH59rNw6qyMZHk
+k-4irklvVcWk36lC3twNvc0DueRCVrws1bkuhOLCXdxHJx-YG-1xM8EJfRmzh79WPlPkbAdyPmFF
+Ah44V0GjAnInPOFZA6MHP9rNx9B9qECHRfmvzU13vJCcgTsrmOr-CEiWfRsnzPjsICxpq9OaVT_D
+zn6rNaroGm1OiZNCrCgvRkCUHPOOCw3j9G1GeaImoZNYtozbz9u4sj13PU-MxIIAa64b1bMMMjpz
+Upc8lVPI4FnJKg6axMmEGn5zJ6JUq9mtOVyPj__2GEuDgpx5H4AwodXXVjFsVgR8UJwI_BvS2JVp
+JoQk0J1RqXmAXVamlsMAfzmmbARXgmrBfnuhveZnh9ymFVU-YZeujdANniXAwBGI7c6hG_BXkH7i
+Eyf4Fn41_SV78PskP6qgqJahr9r3bqdjNbKBztIKCOEVrE_w3IM5r02l-iStk_NBRkj6cq_7VCpG
+afxZ2CtZMwuZMiypO_wOgbdpCSKNzsL-NH2b4b08OlKiWb263gz634KJmV5WEfCl-6eH-JUFbWOS
+JwQfActLNT2ZQPl2MyZQNBzJEWoJRgS6k7tPRO-zqeUtYYHGHVMCxMuMHGQcoilNNHEFeBCG_fBh
+yAKb9g9F86Cbx9voMLiyTX2T3rwVHiSJFOzfNxGmfN5JWOthIun_c5hEY1tLQ15BomzkDwk7BAj7
+VbRCrVD45B6xrmSTMBSWYmLyr6mnQxQqeh9cMbD-0ZAncE3roxRnRvPKjFFa208ykYUp2V83r_PJ
+fV5I9ZPKSjk9DwFyrjkcQQEYDhdK6IFqcd6nEthjYVkmunu2fsX0bIOm9GGdIbKGqBnpdgBO5hyT
+rBr9HSlZrHcGdti1R823ckDF0Ekcl6kioDr5NLIpLtg9zUEDRm3QrbX2mv5Zs8W0pYnOqglxy3lz
+bJZTN7oR7VasHUtjmp0RT9nLZkUs5TZ6MHhlIq3ZsQ6w_Q9Rv1-ofxfwfCC4EBrWKbWAGCf6By4K
+Ew8321-2YnodhmsK5BrT4zQ1DZlmUvK8BmYjZe7wTljKjgYcsLTBfX4eMhJ7MIW1kpnl8AbiBfXh
+QzN56Mki51Q8PSQWHm0W9tnQ0z6wKdck6zBJ8JyNzewZahFKueDTn-9DOqIDfr3YHvQLLzeXyJ8e
+h4AgjW-hvlLzRGtkCknjLIgXVa3rMTycseAwbW-mgdCqqkw3SdEG8feAcyntmvE8j2jbtSDStQMB
+9JdvyNLuQdNG4pxpusgvVso0-8NQF0YVa9VFwg9U6IPSx5p8FcW68OAHt_fEgT4ZtiH7o9aur4o9
+oYqUh2lALCY-__9QLq1KkNjMKs33Jz9E8LbRerG9PLclkTrxCjYAeUWBjCwSI7OB7xkuaYDSjkjj
+a46NLpdBN1GNcsFFcZ79GFAK0_DsyxGLX8Tq6q0Bvhs8whD8wlSxpTGxYkyqNX-vcb7SDN_0WkCE
+XSdZWkqTHXcYbOvoCOb_e6SFAztuMenuHWY0utX0gBfx_X5lPDFyoYXErxFQHiA7t27keshXNa6R
+ukQRRS8kMjre1U74sc-fRNXkXpl57rG4rgxaEX0eBeowa53KAsVvUAoSac2aC_nfzXrDvoyf9Xi3
+JpEZNhUDLpFCEycV4I7jGQ9wo9qNaosvlsr6kbLDNdb_1xrGVgjT3xEvRNJNPqslSAu-yD-UFhC3
+AmCdYUnugw_eEFqXCHTARcRkdPPvl2XsmEKY2IqEeO5tz4DyXQFaL-5hEVh6lYEU1EOWHk3UGIXe
+Vc5_Ttp82qNLmlJPbZvgmNTJzYTHDQ_27KBcp7IVVZgPDjVKdWqQvZ18KhxvfF3Idgy82LBZniFV
+IbtxllXiPRxoPQriSXMnXjh3XkvSDI2pFxXfEvLRn1tvcFOwPNCz3QfPIzYg8uYXN5bRt3ZOrR_g
+ZhIlrc7HO0VbNbeqEVPKMZ-cjkqGj4VAuDKoQc0eQ6X_wCoAGO78nPpLeIvZPx1X3z5YoqNA \ No newline at end of file
diff --git a/docker_files/start-kafka.sh b/docker_files/start-kafka.sh
new file mode 100644
index 0000000..4d955da
--- /dev/null
+++ b/docker_files/start-kafka.sh
@@ -0,0 +1,69 @@
+#!/bin/bash
+
+if [[ -z "$KAFKA_PORT" ]]; then
+ export KAFKA_PORT=9092
+fi
+if [[ -z "$KAFKA_ADVERTISED_PORT" ]]; then
+ export KAFKA_ADVERTISED_PORT=$(docker port `hostname` $KAFKA_PORT | sed -r "s/.*:(.*)/\1/g")
+fi
+if [[ -z "$KAFKA_BROKER_ID" ]]; then
+ # By default auto allocate broker ID
+ #export KAFKA_BROKER_ID=-1
+ export KAFKA_BROKER_ID=1
+fi
+#if [[ -z "$KAFKA_LOG_DIRS" ]]; then
+ #export KAFKA_LOG_DIRS="/kafka/kafka-logs-$HOSTNAME"
+ export KAFKA_LOG_DIRS="/kafka/kafka-logs"
+#fi
+if [[ -z "$KAFKA_ZOOKEEPER_CONNECT" ]]; then
+ export KAFKA_ZOOKEEPER_CONNECT=$(env | grep ZK.*PORT_2181_TCP= | sed -e 's|.*tcp://||' | paste -sd ,)
+fi
+
+if [[ -n "$KAFKA_HEAP_OPTS" ]]; then
+ sed -r -i "s/(export KAFKA_HEAP_OPTS)=\"(.*)\"/\1=\"$KAFKA_HEAP_OPTS\"/g" $KAFKA_HOME/bin/kafka-server-start.sh
+ unset KAFKA_HEAP_OPTS
+fi
+
+if [[ -z "$KAFKA_ADVERTISED_HOST_NAME" && -n "$HOSTNAME_COMMAND" ]]; then
+ export KAFKA_ADVERTISED_HOST_NAME=$(eval $HOSTNAME_COMMAND)
+fi
+
+for VAR in `env`
+do
+ if [[ $VAR =~ ^KAFKA_ && ! $VAR =~ ^KAFKA_HOME ]]; then
+ kafka_name=`echo "$VAR" | sed -r "s/KAFKA_(.*)=.*/\1/g" | tr '[:upper:]' '[:lower:]' | tr _ .`
+ env_var=`echo "$VAR" | sed -r "s/(.*)=.*/\1/g"`
+ if egrep -q "(^|^#)$kafka_name=" $KAFKA_HOME/config/server.properties; then
+ sed -r -i "s@(^|^#)($kafka_name)=(.*)@\2=${!env_var}@g" $KAFKA_HOME/config/server.properties #note that no config values may contain an '@' char
+ else
+ echo "$kafka_name=${!env_var}" >> $KAFKA_HOME/config/server.properties
+ fi
+ fi
+done
+
+if [[ -n "$CUSTOM_INIT_SCRIPT" ]] ; then
+ eval $CUSTOM_INIT_SCRIPT
+fi
+
+
+KAFKA_PID=0
+
+# see https://medium.com/@gchudnov/trapping-signals-in-docker-containers-7a57fdda7d86#.bh35ir4u5
+term_handler() {
+ echo 'Stopping Kafka....'
+ if [ $KAFKA_PID -ne 0 ]; then
+ kill -s TERM "$KAFKA_PID"
+ wait "$KAFKA_PID"
+ fi
+ echo 'Kafka stopped.'
+ exit
+}
+
+
+# Capture kill requests to stop properly
+trap "term_handler" SIGHUP SIGINT SIGTERM
+create-topics.sh &
+$KAFKA_HOME/bin/kafka-server-start.sh $KAFKA_HOME/config/server.properties &
+KAFKA_PID=$!
+
+wait "$KAFKA_PID"
diff --git a/docker_files/uebapikey-sdc b/docker_files/uebapikey-sdc
new file mode 100644
index 0000000..0b3aa80
--- /dev/null
+++ b/docker_files/uebapikey-sdc
@@ -0,0 +1,8 @@
+{
+ "aux": {
+ "description": "API key for SDC",
+ "email": "no email"
+ },
+ "secret": "KpMJB28vNduEJ0zHDWOQXBmQ",
+ "key": "779NflzwmkuKpqef"
+} \ No newline at end of file