diff options
127 files changed, 1778 insertions, 1120 deletions
diff --git a/ajsc-shared-config/etc/keystore.jks b/ajsc-shared-config/etc/keystore.jks Binary files differindex 3bfe928..a9faf1b 100644 --- a/ajsc-shared-config/etc/keystore.jks +++ b/ajsc-shared-config/etc/keystore.jks diff --git a/bundleconfig-local/etc/appprops/MsgRtrApi.properties b/bundleconfig-local/etc/appprops/MsgRtrApi.properties index 583178a..1ccd9f0 100644 --- a/bundleconfig-local/etc/appprops/MsgRtrApi.properties +++ b/bundleconfig-local/etc/appprops/MsgRtrApi.properties @@ -139,12 +139,12 @@ maxcontentlength=10000 ############################################################################## #AAF Properties -msgRtr.namespace.aaf=com.onap.dmaap.mr.topic -msgRtr.topicfactory.aaf=org.openecomp.dmaapBC.topicFactory|:org.openecomp.dmaapBC.topic: -enforced.topic.name.AAF=com.onap +msgRtr.namespace.aaf=org.onap.dmaap.mr.topic +msgRtr.topicfactory.aaf=org.onap.dmaap.mr.topicFactory|:org.onap.dmaap.mr.topic: +enforced.topic.name.AAF=org.onap forceAAF=false transidUEBtopicreqd=false -defaultNSforUEB=com.onap.dmaap.mr.ueb +defaultNSforUEB=org.onap.dmaap.mr.ueb ############################################################################## #Mirror Maker Agent msgRtr.mirrormakeradmin.aaf=com.onap.dmaap.mr.dev.mirrormaker|*|admin diff --git a/demo/deploy.sh b/demo/deploy.sh new file mode 100644 index 0000000..b11a1e0 --- /dev/null +++ b/demo/deploy.sh @@ -0,0 +1,41 @@ +#!/bin/bash + +set -e + +# do not change this, it is already matched with the git repo file structure +DOCKER_FILE_DIR='./docker_files' + +KAFKA_VERSION='0.8.1.1' +SCALA_VERSION='2.9.2' +wget -q "http://www.namesdir.com/mirrors/apache/kafka/${KAFKA_VERSION}/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz" \ + -O "./docker_files/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz" + +# commands to run docker and docker-compose +DOCKER_COMPOSE_EXE='/opt/docker/docker-compose' + +cd "${DOCKER_FILE_DIR}" + +while ! ifconfig |grep "docker0" > /dev/null; + do sleep 1 + echo 'waiting for docker operational' +done + +echo "prep any files with local configurations" +if ls __* 1> /dev/null 2>&1; then + IP_DOCKER0=$(ifconfig docker0 |grep "inet addr" | cut -d: -f2 |cut -d" " -f1) + TEMPLATES=$(ls -1 __*) + for TEMPLATE in $TEMPLATES + do + FILENAME=${TEMPLATE//_} + if [ ! -z "${IP_DOCKER0}" ]; then + sed -e "s/{{ ip.docker0 }}/${IP_DOCKER0}/" "$TEMPLATE" > "$FILENAME" + fi + done +fi + +if [ -z "$MTU" ]; then + export MTU=$(ifconfig docker0 |grep MTU |sed -e 's/.*MTU://' -e 's/\s.*$//') +fi + +echo "starting docker operations" +${DOCKER_COMPOSE_EXE} up -d --build diff --git a/src/main/resources/docker-compose/Dockerfile b/demo/docker_files/Dockerfile index 71943d6..87e96ee 100644 --- a/src/main/resources/docker-compose/Dockerfile +++ b/demo/docker_files/Dockerfile @@ -6,8 +6,7 @@ RUN apk add --update unzip wget curl docker jq coreutils ENV KAFKA_VERSION="0.8.1.1" SCALA_VERSION="2.9.2" ADD download-kafka.sh /tmp/download-kafka.sh -RUN chmod a+x /tmp/download-kafka.sh; sync; -RUN /tmp/download-kafka.sh && tar xfz /tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz -C /opt && rm /tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz +RUN chmod a+x /tmp/download-kafka.sh && sync && /tmp/download-kafka.sh && tar xfz /tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz -C /opt && rm /tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz VOLUME ["/kafka"] diff --git a/demo/docker_files/Dockerfile-local b/demo/docker_files/Dockerfile-local new file mode 100644 index 0000000..4909af3 --- /dev/null +++ b/demo/docker_files/Dockerfile-local @@ -0,0 +1,22 @@ +FROM anapsix/alpine-java + +MAINTAINER Wurstmeister + +RUN apk add --update tar wget curl docker coreutils + +ENV KAFKA_VERSION="0.8.1.1" SCALA_VERSION="2.9.2" +COPY kafka_2.9.2-0.8.1.1.tgz /tmp/kafka_2.9.2-0.8.1.1.tgz +RUN tar xfz /tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz -C /opt + +VOLUME ["/kafka"] + +ENV KAFKA_HOME /opt/kafka_2.9.2-0.8.1.1 +ADD start-kafka.sh /usr/bin/start-kafka.sh +ADD broker-list.sh /usr/bin/broker-list.sh +ADD create-topics.sh /usr/bin/create-topics.sh +# The scripts need to have executable permission +RUN chmod a+x /usr/bin/start-kafka.sh && \ + chmod a+x /usr/bin/broker-list.sh && \ + chmod a+x /usr/bin/create-topics.sh +# Use "exec" form so that it runs as PID 1 (useful for graceful shutdown) +CMD ["start-kafka.sh"] diff --git a/demo/docker_files/__MsgRtrApi.properties b/demo/docker_files/__MsgRtrApi.properties new file mode 100644 index 0000000..47293a7 --- /dev/null +++ b/demo/docker_files/__MsgRtrApi.properties @@ -0,0 +1,140 @@ +############################################################################### +## +## Cambria API Server config +## +## - Default values are shown as commented settings. +## + +############################################################################### +## +## HTTP service +## +## - 3904 is standard as of 7/29/14. +# +## Zookeeper Connection +## +## Both Cambria and Kafka make use of Zookeeper. +## +config.zk.servers=zookeeper:2181 +#config.zk.servers={{ ip.docker0 }}:2181 +#10.0.11.1:2181 +#10.208.128.229:2181 +#config.zk.root=/fe3c/cambria/config + + +############################################################################### +## +## Kafka Connection +## +## Items below are passed through to Kafka's producer and consumer +## configurations (after removing "kafka.") +## if you want to change request.required.acks it can take this one value +#kafka.metadata.broker.list=localhost:9092,localhost:9093 +kafka.metadata.broker.list=kafka:9092 +#kafka.metadata.broker.list={{ ip.docker0 }}:9092 +#10.0.11.1:9092 +#10.208.128.229:9092 +##kafka.request.required.acks=-1 +#kafka.client.zookeeper=${config.zk.servers} +consumer.timeout.ms=100 +zookeeper.connection.timeout.ms=6000 +zookeeper.session.timeout.ms=6000 +zookeeper.sync.time.ms=2000 +auto.commit.interval.ms=1000 +fetch.message.max.bytes =1000000 +auto.commit.enable=false + + +############################################################################### +## +## Secured Config +## +## Some data stored in the config system is sensitive -- API keys and secrets, +## for example. to protect it, we use an encryption layer for this section +## of the config. +## +## The key is a base64 encode AES key. This must be created/configured for +## each installation. +#cambria.secureConfig.key= +## +## The initialization vector is a 16 byte value specific to the secured store. +## This must be created/configured for each installation. +#cambria.secureConfig.iv= + +## Southfield Sandbox +cambria.secureConfig.key=b/7ouTn9FfEw2PQwL0ov/Q== +cambria.secureConfig.iv=wR9xP5k5vbz/xD0LmtqQLw== +authentication.adminSecret=fe3cCompound +#cambria.secureConfig.key[pc569h]=YT3XPyxEmKCTLI2NK+Sjbw== +#cambria.secureConfig.iv[pc569h]=rMm2jhR3yVnU+u2V9Ugu3Q== + + +############################################################################### +## +## Consumer Caching +## +## Kafka expects live connections from the consumer to the broker, which +## obviously doesn't work over connectionless HTTP requests. The Cambria +## server proxies HTTP requests into Kafka consumer sessions that are kept +## around for later re-use. Not doing so is costly for setup per request, +## which would substantially impact a high volume consumer's performance. +## +## This complicates Cambria server failover, because we often need server +## A to close its connection before server B brings up the replacement. +## + +## The consumer cache is normally enabled. +#cambria.consumer.cache.enabled=true + +## Cached consumers are cleaned up after a period of disuse. The server inspects +## consumers every sweepFreqSeconds and will clean up any connections that are +## dormant for touchFreqMs. +#cambria.consumer.cache.sweepFreqSeconds=15 +#cambria.consumer.cache.touchFreqMs=120000 + +## The cache is managed through ZK. The default value for the ZK connection +## string is the same as config.zk.servers. +#cambria.consumer.cache.zkConnect=${config.zk.servers} + +## +## Shared cache information is associated with this node's name. The default +## name is the hostname plus the HTTP service port this host runs on. (The +## hostname is determined via InetAddress.getLocalHost ().getCanonicalHostName(), +## which is not always adequate.) You can set this value explicitly here. +## +#cambria.api.node.identifier=<use-something-unique-to-this-instance> + +############################################################################### +## +## Metrics Reporting +## +## This server can report its metrics periodically on a topic. +## +#metrics.send.cambria.enabled=true +#metrics.send.cambria.topic=cambria.apinode.metrics #msgrtr.apinode.metrics.dmaap +#metrics.send.cambria.sendEverySeconds=60 + +cambria.consumer.cache.zkBasePath=/fe3c/cambria/consumerCache + +############################################################################## +#100mb +maxcontentlength=10000 + + +############################################################################## +#AAF Properties +msgRtr.namespace.aaf=org.openecomp.dcae.dmaap.mtnje2.mr.topic +msgRtr.topicfactory.aaf=org.openecomp.dcae.dmaap.topicFactory|:org.openecomp.dcae.dmaap.mtnje2.mr.topic: +enforced.topic.name.AAF=org.openecomp +forceAAF=false +transidUEBtopicreqd=false +defaultNSforUEB=org.openecomp.dmaap.mr.ueb +############################################################################## +#Mirror Maker Agent +msgRtr.mirrormakeradmin.aaf=org.openecomp.dmaap.mr.dev.mirrormaker|*|admin +msgRtr.mirrormakeruser.aaf=org.openecomp.dmaap.mr.dev.mirrormaker|*|user +msgRtr.mirrormakeruser.aaf.create=org.openecomp.dmaap.mr.dev.topicFactory|:org.openecomp.dmaap.mr.dev.topic: +msgRtr.mirrormaker.timeout=15000 +msgRtr.mirrormaker.topic=org.openecomp.dmaap.mr.prod.mm.agent +msgRtr.mirrormaker.consumergroup=mmagentserver +msgRtr.mirrormaker.consumerid=1 diff --git a/demo/docker_files/__docker-compose.yml b/demo/docker_files/__docker-compose.yml new file mode 100644 index 0000000..cca2b64 --- /dev/null +++ b/demo/docker_files/__docker-compose.yml @@ -0,0 +1,57 @@ +version: '2' +networks: + default: + driver: bridge + driver_opts: + com.docker.network.driver.mtu: ${MTU} +services: + zookeeper: + image: wurstmeister/zookeeper + ports: + - "2181:2181" + volumes: + - ./data-zookeeper:/opt/zookeeper-3.4.9/data + logging: + driver: "json-file" + options: + max-size: "30m" + max-file: "5" + + kafka: + build: + context: . + dockerfile: Dockerfile-local + ports: + - "9092:9092" + environment: + #KAFKA_ADVERTISED_HOST_NAME: {{ ip.docker0 }} + KAFKA_ADVERTISED_HOST_NAME: kafka + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_BROKER_ID: 1 + volumes: + - /var/run/docker.sock:/var/run/docker.sock + - ./data-kafka:/kafka + - ./start-kafka.sh:/start-kafka.sh + logging: + driver: "json-file" + options: + max-size: "30m" + max-file: "5" + + dmaap: + image: nexus3.onap.org:10001/onap/dmaap/dmaap-mr:1.1.4 + ports: + - "3904:3904" + - "3905:3905" + volumes: + - ./MsgRtrApi.properties:/appl/dmaapMR1/bundleconfig/etc/appprops/MsgRtrApi.properties + - ./cadi.properties:/appl/dmaapMR1/etc/cadi.properties + - ./keystore.jks:/appl/dmaapMR1/bundleconfig/etc/keystore.jks + depends_on: + - zookeeper + - kafka + logging: + driver: "json-file" + options: + max-size: "30m" + max-file: "5" diff --git a/demo/docker_files/apikey-APPC1.key b/demo/docker_files/apikey-APPC1.key new file mode 100644 index 0000000..2f77745 --- /dev/null +++ b/demo/docker_files/apikey-APPC1.key @@ -0,0 +1,8 @@ +{ + "aux": { + "description": "API key for apikey-APPC1", + "email": "no email" + }, + "secret": "64AG2hF4pYeG2pq7CT6XwUOT", + "key": "VIlbtVl6YLhNUrtU" +}
\ No newline at end of file diff --git a/demo/docker_files/apikey-PORTAL1.key b/demo/docker_files/apikey-PORTAL1.key new file mode 100644 index 0000000..068bed7 --- /dev/null +++ b/demo/docker_files/apikey-PORTAL1.key @@ -0,0 +1,8 @@ +{ + "aux": { + "description": "API key for apikey-PORTAL1", + "email": "no email" + }, + "secret": "uCYgKjWKK5IxPGNNZzYSSWo9", + "key": "7GkVcrO6sIDb3ngW" +}
\ No newline at end of file diff --git a/demo/docker_files/apikey-PORTALAPP1.key b/demo/docker_files/apikey-PORTALAPP1.key new file mode 100644 index 0000000..a27422f --- /dev/null +++ b/demo/docker_files/apikey-PORTALAPP1.key @@ -0,0 +1,8 @@ +{ + "aux": { + "description": "API key for apikey-PORTALAPP1", + "email": "no email" + }, + "secret": "P0HpqEBhKJvxjRYdw2sCTUll", + "key": "jQd4a9zVNi4ePyBp" +}
\ No newline at end of file diff --git a/demo/docker_files/apikey-PORTALDBC1.key b/demo/docker_files/apikey-PORTALDBC1.key new file mode 100644 index 0000000..f29d959 --- /dev/null +++ b/demo/docker_files/apikey-PORTALDBC1.key @@ -0,0 +1,8 @@ +{ + "aux": { + "description": "API key for apikey-PORTALDBC1", + "email": "no email" + }, + "secret": "WB7AJICClKg9oZLsxhQnykXA", + "key": "MtRwsF16RdpHZ7eM" +}
\ No newline at end of file diff --git a/demo/docker_files/apikey-PORTALPOL1.key b/demo/docker_files/apikey-PORTALPOL1.key new file mode 100644 index 0000000..97b39a4 --- /dev/null +++ b/demo/docker_files/apikey-PORTALPOL1.key @@ -0,0 +1,8 @@ +{ + "aux": { + "description": "API key for apikey-PORTALPOL1", + "email": "no email" + }, + "secret": "P7ejzF4nS3LAsMmKKTvYYFpA", + "key": "Gsd3C3hLYaUcor6l" +}
\ No newline at end of file diff --git a/demo/docker_files/apikey-PORTALSDC1.key b/demo/docker_files/apikey-PORTALSDC1.key new file mode 100644 index 0000000..28bfb89 --- /dev/null +++ b/demo/docker_files/apikey-PORTALSDC1.key @@ -0,0 +1,8 @@ +{ + "aux": { + "description": "API key for apikey-PORTALSDC1", + "email": "no email" + }, + "secret": "XftIATw9Jr3VzAcPqt3NnJOu", + "key": "x9UfO7JsDn8BESVX" +}
\ No newline at end of file diff --git a/demo/docker_files/apikey-PORTALVID1.key b/demo/docker_files/apikey-PORTALVID1.key new file mode 100644 index 0000000..3373566 --- /dev/null +++ b/demo/docker_files/apikey-PORTALVID1.key @@ -0,0 +1,8 @@ +{ + "aux": { + "description": "API key for apikey-PORTALVID1", + "email": "no email" + }, + "secret": "S31PrbOzGgL4hg4owgtx47Da", + "key": "2Re7Pvdkgw5aeAUD" +}
\ No newline at end of file diff --git a/demo/docker_files/apikey-SDC1.key b/demo/docker_files/apikey-SDC1.key new file mode 100644 index 0000000..207431d --- /dev/null +++ b/demo/docker_files/apikey-SDC1.key @@ -0,0 +1,8 @@ +{ + "aux": { + "description": "API key for apikey-SDC1", + "email": "no email" + }, + "secret": "Ehq3WyT4bkif4zwgEbvshGal", + "key": "iPIxkpAMI8qTcQj8" +}
\ No newline at end of file diff --git a/demo/docker_files/broker-list.sh b/demo/docker_files/broker-list.sh new file mode 100644 index 0000000..7f04639 --- /dev/null +++ b/demo/docker_files/broker-list.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +CONTAINERS=$(docker ps | grep 9092 | awk '{print $1}') +BROKERS=$(for CONTAINER in $CONTAINERS; do docker port $CONTAINER 9092 | sed -e "s/0.0.0.0:/$HOST_IP:/g"; done) +echo $BROKERS | sed -e 's/ /,/g' diff --git a/demo/docker_files/cadi.properties b/demo/docker_files/cadi.properties new file mode 100644 index 0000000..1cb00a5 --- /dev/null +++ b/demo/docker_files/cadi.properties @@ -0,0 +1,21 @@ +basic_realm=openecomp.org +basic_warn=TRUE + +cadi_loglevel=DEBUG +#cadi_keyfile=target/swm/package/nix/dist_files/appl/${artifactId}/etc/keyfile2 +cadi_keyfile=/appl/dmaapMR1/etc/keyfile +# Configure AAF +aaf_url=https://DME2RESOLVE/service=org.openecomp.authz.AuthorizationService/version=2.0/envContext=DEV/routeOffer=BAU_SE + +aaf_id=dgl@openecomp.org +aaf_password=enc:f2u5br1mh29M02- +aaf_timeout=5000 +aaf_clean_interval=1200000 +aaf_user_expires=60000 +aaf_high_count=1000000 + + +# The following properties are being set by the AJSC Container and should NOT need to be set here. +AFT_LATITUDE=33.823589 +AFT_LONGITUDE=-84.366982 +AFT_ENVIRONMENT=AFTUAT diff --git a/demo/docker_files/create-topics.sh b/demo/docker_files/create-topics.sh new file mode 100644 index 0000000..e07bf06 --- /dev/null +++ b/demo/docker_files/create-topics.sh @@ -0,0 +1,32 @@ +#!/bin/bash + + +if [[ -z "$START_TIMEOUT" ]]; then + START_TIMEOUT=600 +fi + +start_timeout_exceeded=false +count=0 +step=10 +while netstat -lnt | awk '$4 ~ /:'$KAFKA_PORT'$/ {exit 1}'; do + echo "waiting for kafka to be ready" + sleep $step; + count=$(expr $count + $step) + if [ $count -gt $START_TIMEOUT ]; then + start_timeout_exceeded=true + break + fi +done + +if $start_timeout_exceeded; then + echo "Not able to auto-create topic (waited for $START_TIMEOUT sec)" + exit 1 +fi + +if [[ -n $KAFKA_CREATE_TOPICS ]]; then + IFS=','; for topicToCreate in $KAFKA_CREATE_TOPICS; do + echo "creating topics: $topicToCreate" + IFS=':' read -a topicConfig <<< "$topicToCreate" + JMX_PORT='' $KAFKA_HOME/bin/kafka-topics.sh --create --zookeeper $KAFKA_ZOOKEEPER_CONNECT --replication-factor ${topicConfig[2]} --partition ${topicConfig[1]} --topic "${topicConfig[0]}" + done +fi diff --git a/demo/docker_files/data-kafka/kafka-logs/.kafka_cleanshutdown b/demo/docker_files/data-kafka/kafka-logs/.kafka_cleanshutdown new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/.kafka_cleanshutdown diff --git a/demo/docker_files/data-kafka/kafka-logs/.lock b/demo/docker_files/data-kafka/kafka-logs/.lock new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/.lock diff --git a/demo/docker_files/data-kafka/kafka-logs/APPC-CL-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/APPC-CL-0/00000000000000000000.index new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/APPC-CL-0/00000000000000000000.index diff --git a/demo/docker_files/data-kafka/kafka-logs/APPC-CL-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/APPC-CL-0/00000000000000000000.log Binary files differnew file mode 100644 index 0000000..85ee8bf --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/APPC-CL-0/00000000000000000000.log diff --git a/demo/docker_files/data-kafka/kafka-logs/APPC-CL-1/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/APPC-CL-1/00000000000000000000.index new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/APPC-CL-1/00000000000000000000.index diff --git a/demo/docker_files/data-kafka/kafka-logs/APPC-CL-1/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/APPC-CL-1/00000000000000000000.log new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/APPC-CL-1/00000000000000000000.log diff --git a/demo/docker_files/data-kafka/kafka-logs/APPC-TEST1-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/APPC-TEST1-0/00000000000000000000.index new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/APPC-TEST1-0/00000000000000000000.index diff --git a/demo/docker_files/data-kafka/kafka-logs/APPC-TEST1-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/APPC-TEST1-0/00000000000000000000.log new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/APPC-TEST1-0/00000000000000000000.log diff --git a/demo/docker_files/data-kafka/kafka-logs/APPC-TEST2-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/APPC-TEST2-0/00000000000000000000.index new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/APPC-TEST2-0/00000000000000000000.index diff --git a/demo/docker_files/data-kafka/kafka-logs/APPC-TEST2-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/APPC-TEST2-0/00000000000000000000.log new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/APPC-TEST2-0/00000000000000000000.log diff --git a/demo/docker_files/data-kafka/kafka-logs/APPC-TEST2-1/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/APPC-TEST2-1/00000000000000000000.index new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/APPC-TEST2-1/00000000000000000000.index diff --git a/demo/docker_files/data-kafka/kafka-logs/APPC-TEST2-1/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/APPC-TEST2-1/00000000000000000000.log Binary files differnew file mode 100644 index 0000000..66dcea9 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/APPC-TEST2-1/00000000000000000000.log diff --git a/demo/docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-0/00000000000000000000.index new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-0/00000000000000000000.index diff --git a/demo/docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-0/00000000000000000000.log Binary files differnew file mode 100644 index 0000000..bb73f23 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-0/00000000000000000000.log diff --git a/demo/docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-1/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-1/00000000000000000000.index new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-1/00000000000000000000.index diff --git a/demo/docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-1/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-1/00000000000000000000.log Binary files differnew file mode 100644 index 0000000..53364c5 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/DCAE-CL-EVENT-1/00000000000000000000.log diff --git a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-INBOX-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-INBOX-0/00000000000000000000.index new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-INBOX-0/00000000000000000000.index diff --git a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-INBOX-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-INBOX-0/00000000000000000000.log new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-INBOX-0/00000000000000000000.log diff --git a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-APP1-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-APP1-0/00000000000000000000.index new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-APP1-0/00000000000000000000.index diff --git a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-APP1-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-APP1-0/00000000000000000000.log new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-APP1-0/00000000000000000000.log diff --git a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-DBC1-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-DBC1-0/00000000000000000000.index new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-DBC1-0/00000000000000000000.index diff --git a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-DBC1-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-DBC1-0/00000000000000000000.log new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-DBC1-0/00000000000000000000.log diff --git a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-POL1-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-POL1-0/00000000000000000000.index new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-POL1-0/00000000000000000000.index diff --git a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-POL1-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-POL1-0/00000000000000000000.log new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-POL1-0/00000000000000000000.log diff --git a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-SDC1-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-SDC1-0/00000000000000000000.index new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-SDC1-0/00000000000000000000.index diff --git a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-SDC1-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-SDC1-0/00000000000000000000.log new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-SDC1-0/00000000000000000000.log diff --git a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-VID1-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-VID1-0/00000000000000000000.index new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-VID1-0/00000000000000000000.index diff --git a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-VID1-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-VID1-0/00000000000000000000.log new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-VID1-0/00000000000000000000.log diff --git a/demo/docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-0/00000000000000000000.index new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-0/00000000000000000000.index diff --git a/demo/docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-0/00000000000000000000.log Binary files differnew file mode 100644 index 0000000..b466eda --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-0/00000000000000000000.log diff --git a/demo/docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-1/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-1/00000000000000000000.index new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-1/00000000000000000000.index diff --git a/demo/docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-1/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-1/00000000000000000000.log new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/PDPD-CONFIGURATION-1/00000000000000000000.log diff --git a/demo/docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-0/00000000000000000000.index new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-0/00000000000000000000.index diff --git a/demo/docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-0/00000000000000000000.log Binary files differnew file mode 100644 index 0000000..bc5db56 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-0/00000000000000000000.log diff --git a/demo/docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-1/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-1/00000000000000000000.index new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-1/00000000000000000000.index diff --git a/demo/docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-1/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-1/00000000000000000000.log Binary files differnew file mode 100644 index 0000000..978eeb6 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/POLICY-CL-MGT-1/00000000000000000000.log diff --git a/demo/docker_files/data-kafka/kafka-logs/SDC-DISTR-NOTIF-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/SDC-DISTR-NOTIF-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.index new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/SDC-DISTR-NOTIF-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.index diff --git a/demo/docker_files/data-kafka/kafka-logs/SDC-DISTR-NOTIF-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/SDC-DISTR-NOTIF-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.log new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/SDC-DISTR-NOTIF-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.log diff --git a/demo/docker_files/data-kafka/kafka-logs/SDC-DISTR-STATUS-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/SDC-DISTR-STATUS-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.index new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/SDC-DISTR-STATUS-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.index diff --git a/demo/docker_files/data-kafka/kafka-logs/SDC-DISTR-STATUS-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/SDC-DISTR-STATUS-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.log new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/SDC-DISTR-STATUS-TOPIC-SDC-OPENSOURCE-ENV1-0/00000000000000000000.log diff --git a/demo/docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-0/00000000000000000000.index new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-0/00000000000000000000.index diff --git a/demo/docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-0/00000000000000000000.log Binary files differnew file mode 100644 index 0000000..7c1c0f6 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-0/00000000000000000000.log diff --git a/demo/docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-1/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-1/00000000000000000000.index Binary files differnew file mode 100644 index 0000000..a0afe1d --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-1/00000000000000000000.index diff --git a/demo/docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-1/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-1/00000000000000000000.log Binary files differnew file mode 100644 index 0000000..e3e471a --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/msgrtr.apinode.metrics.dmaap-1/00000000000000000000.log diff --git a/demo/docker_files/data-kafka/kafka-logs/recovery-point-offset-checkpoint b/demo/docker_files/data-kafka/kafka-logs/recovery-point-offset-checkpoint new file mode 100644 index 0000000..a003b5d --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/recovery-point-offset-checkpoint @@ -0,0 +1,27 @@ +0 +25 +ECOMP-PORTAL-OUTBOX-VID1 0 0 +PDPD-CONFIGURATION 0 2 +msgrtr.apinode.metrics.dmaap 1 26 +unauthenticated.SEC_MEASUREMENT_OUTPUT 1 1 +APPC-TEST2 0 0 +unauthenticated.TCA_EVENT_OUTPUT 1 1 +APPC-TEST1 0 0 +APPC-CL 0 2 +ECOMP-PORTAL-INBOX 0 0 +APPC-CL 1 0 +APPC-TEST2 1 1 +unauthenticated.TCA_EVENT_OUTPUT 0 1 +unauthenticated.SEC_MEASUREMENT_OUTPUT 0 1 +SDC-DISTR-NOTIF-TOPIC-SDC-OPENSOURCE-ENV1 0 0 +POLICY-CL-MGT 1 1 +PDPD-CONFIGURATION 1 0 +DCAE-CL-EVENT 1 1 +msgrtr.apinode.metrics.dmaap 0 4 +ECOMP-PORTAL-OUTBOX-APP1 0 0 +ECOMP-PORTAL-OUTBOX-SDC1 0 0 +POLICY-CL-MGT 0 1 +SDC-DISTR-STATUS-TOPIC-SDC-OPENSOURCE-ENV1 0 0 +DCAE-CL-EVENT 0 1 +ECOMP-PORTAL-OUTBOX-DBC1 0 0 +ECOMP-PORTAL-OUTBOX-POL1 0 0 diff --git a/demo/docker_files/data-kafka/kafka-logs/replication-offset-checkpoint b/demo/docker_files/data-kafka/kafka-logs/replication-offset-checkpoint new file mode 100644 index 0000000..a003b5d --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/replication-offset-checkpoint @@ -0,0 +1,27 @@ +0 +25 +ECOMP-PORTAL-OUTBOX-VID1 0 0 +PDPD-CONFIGURATION 0 2 +msgrtr.apinode.metrics.dmaap 1 26 +unauthenticated.SEC_MEASUREMENT_OUTPUT 1 1 +APPC-TEST2 0 0 +unauthenticated.TCA_EVENT_OUTPUT 1 1 +APPC-TEST1 0 0 +APPC-CL 0 2 +ECOMP-PORTAL-INBOX 0 0 +APPC-CL 1 0 +APPC-TEST2 1 1 +unauthenticated.TCA_EVENT_OUTPUT 0 1 +unauthenticated.SEC_MEASUREMENT_OUTPUT 0 1 +SDC-DISTR-NOTIF-TOPIC-SDC-OPENSOURCE-ENV1 0 0 +POLICY-CL-MGT 1 1 +PDPD-CONFIGURATION 1 0 +DCAE-CL-EVENT 1 1 +msgrtr.apinode.metrics.dmaap 0 4 +ECOMP-PORTAL-OUTBOX-APP1 0 0 +ECOMP-PORTAL-OUTBOX-SDC1 0 0 +POLICY-CL-MGT 0 1 +SDC-DISTR-STATUS-TOPIC-SDC-OPENSOURCE-ENV1 0 0 +DCAE-CL-EVENT 0 1 +ECOMP-PORTAL-OUTBOX-DBC1 0 0 +ECOMP-PORTAL-OUTBOX-POL1 0 0 diff --git a/demo/docker_files/data-kafka/kafka-logs/unauthenticated.SEC_MEASUREMENT_OUTPUT-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/unauthenticated.SEC_MEASUREMENT_OUTPUT-0/00000000000000000000.index new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/unauthenticated.SEC_MEASUREMENT_OUTPUT-0/00000000000000000000.index diff --git a/demo/docker_files/data-kafka/kafka-logs/unauthenticated.SEC_MEASUREMENT_OUTPUT-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/unauthenticated.SEC_MEASUREMENT_OUTPUT-0/00000000000000000000.log Binary files differnew file mode 100644 index 0000000..33bee2d --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/unauthenticated.SEC_MEASUREMENT_OUTPUT-0/00000000000000000000.log diff --git a/demo/docker_files/data-kafka/kafka-logs/unauthenticated.SEC_MEASUREMENT_OUTPUT-1/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/unauthenticated.SEC_MEASUREMENT_OUTPUT-1/00000000000000000000.index new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/unauthenticated.SEC_MEASUREMENT_OUTPUT-1/00000000000000000000.index diff --git a/demo/docker_files/data-kafka/kafka-logs/unauthenticated.SEC_MEASUREMENT_OUTPUT-1/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/unauthenticated.SEC_MEASUREMENT_OUTPUT-1/00000000000000000000.log Binary files differnew file mode 100644 index 0000000..69b1e68 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/unauthenticated.SEC_MEASUREMENT_OUTPUT-1/00000000000000000000.log diff --git a/demo/docker_files/data-kafka/kafka-logs/unauthenticated.TCA_EVENT_OUTPUT-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/unauthenticated.TCA_EVENT_OUTPUT-0/00000000000000000000.index new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/unauthenticated.TCA_EVENT_OUTPUT-0/00000000000000000000.index diff --git a/demo/docker_files/data-kafka/kafka-logs/unauthenticated.TCA_EVENT_OUTPUT-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/unauthenticated.TCA_EVENT_OUTPUT-0/00000000000000000000.log Binary files differnew file mode 100644 index 0000000..68a76bc --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/unauthenticated.TCA_EVENT_OUTPUT-0/00000000000000000000.log diff --git a/demo/docker_files/data-kafka/kafka-logs/unauthenticated.TCA_EVENT_OUTPUT-1/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/unauthenticated.TCA_EVENT_OUTPUT-1/00000000000000000000.index new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/unauthenticated.TCA_EVENT_OUTPUT-1/00000000000000000000.index diff --git a/demo/docker_files/data-kafka/kafka-logs/unauthenticated.TCA_EVENT_OUTPUT-1/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/unauthenticated.TCA_EVENT_OUTPUT-1/00000000000000000000.log Binary files differnew file mode 100644 index 0000000..89ec482 --- /dev/null +++ b/demo/docker_files/data-kafka/kafka-logs/unauthenticated.TCA_EVENT_OUTPUT-1/00000000000000000000.log diff --git a/demo/docker_files/data-zookeeper/version-2/log.1 b/demo/docker_files/data-zookeeper/version-2/log.1 Binary files differnew file mode 100644 index 0000000..f3cb136 --- /dev/null +++ b/demo/docker_files/data-zookeeper/version-2/log.1 diff --git a/demo/docker_files/data-zookeeper/version-2/log.103 b/demo/docker_files/data-zookeeper/version-2/log.103 Binary files differnew file mode 100644 index 0000000..9b648e2 --- /dev/null +++ b/demo/docker_files/data-zookeeper/version-2/log.103 diff --git a/demo/docker_files/data-zookeeper/version-2/log.125 b/demo/docker_files/data-zookeeper/version-2/log.125 Binary files differnew file mode 100644 index 0000000..0613642 --- /dev/null +++ b/demo/docker_files/data-zookeeper/version-2/log.125 diff --git a/demo/docker_files/download-kafka.sh b/demo/docker_files/download-kafka.sh new file mode 100644 index 0000000..2ddc911 --- /dev/null +++ b/demo/docker_files/download-kafka.sh @@ -0,0 +1,5 @@ +#!/bin/sh + +mirror=$(curl --stderr /dev/null https://www.apache.org/dyn/closer.cgi\?as_json\=1 | jq -r '.preferred') +url="${mirror}kafka/${KAFKA_VERSION}/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz" +wget -q "${url}" -O "/tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz" diff --git a/demo/docker_files/dump_mr_state.sh b/demo/docker_files/dump_mr_state.sh new file mode 100644 index 0000000..75c16d6 --- /dev/null +++ b/demo/docker_files/dump_mr_state.sh @@ -0,0 +1,10 @@ +#!/bin/bash + + +TIMESTAMP=`date +%Y%m%d%H%M` +CONTAINERID=`docker ps |grep kafka |cut -b1-12` +docker cp $CONTAINERID:/kafka ./data-kafka-$TIMESTAMP +tar zcvf ./data-kafka-$TIMESTAMP.tgz ./data-kafka-$TIMESTAMP +CONTAINERID=`docker ps |grep zookeeper |cut -b1-12` +docker cp $CONTAINERID:/opt/zookeeper-3.4.9/data ./data-zookeeper-$TIMESTAMP +tar zcvf ./data-zookeeper-$TIMESTAMP.tgz ./data-zookeeper-$TIMESTAMP diff --git a/demo/docker_files/keystore.jks b/demo/docker_files/keystore.jks Binary files differnew file mode 100644 index 0000000..a9faf1b --- /dev/null +++ b/demo/docker_files/keystore.jks diff --git a/demo/docker_files/mykey b/demo/docker_files/mykey new file mode 100644 index 0000000..c2b8b87 --- /dev/null +++ b/demo/docker_files/mykey @@ -0,0 +1,27 @@ +_sNOLphPzrU7L0L3oWv0pYwgV_ddGF1XoBsQEIAp34jfP-fGJFPfFYaMpDEZ3gwH59rNw6qyMZHk +k-4irklvVcWk36lC3twNvc0DueRCVrws1bkuhOLCXdxHJx-YG-1xM8EJfRmzh79WPlPkbAdyPmFF +Ah44V0GjAnInPOFZA6MHP9rNx9B9qECHRfmvzU13vJCcgTsrmOr-CEiWfRsnzPjsICxpq9OaVT_D +zn6rNaroGm1OiZNCrCgvRkCUHPOOCw3j9G1GeaImoZNYtozbz9u4sj13PU-MxIIAa64b1bMMMjpz +Upc8lVPI4FnJKg6axMmEGn5zJ6JUq9mtOVyPj__2GEuDgpx5H4AwodXXVjFsVgR8UJwI_BvS2JVp +JoQk0J1RqXmAXVamlsMAfzmmbARXgmrBfnuhveZnh9ymFVU-YZeujdANniXAwBGI7c6hG_BXkH7i +Eyf4Fn41_SV78PskP6qgqJahr9r3bqdjNbKBztIKCOEVrE_w3IM5r02l-iStk_NBRkj6cq_7VCpG +afxZ2CtZMwuZMiypO_wOgbdpCSKNzsL-NH2b4b08OlKiWb263gz634KJmV5WEfCl-6eH-JUFbWOS +JwQfActLNT2ZQPl2MyZQNBzJEWoJRgS6k7tPRO-zqeUtYYHGHVMCxMuMHGQcoilNNHEFeBCG_fBh +yAKb9g9F86Cbx9voMLiyTX2T3rwVHiSJFOzfNxGmfN5JWOthIun_c5hEY1tLQ15BomzkDwk7BAj7 +VbRCrVD45B6xrmSTMBSWYmLyr6mnQxQqeh9cMbD-0ZAncE3roxRnRvPKjFFa208ykYUp2V83r_PJ +fV5I9ZPKSjk9DwFyrjkcQQEYDhdK6IFqcd6nEthjYVkmunu2fsX0bIOm9GGdIbKGqBnpdgBO5hyT +rBr9HSlZrHcGdti1R823ckDF0Ekcl6kioDr5NLIpLtg9zUEDRm3QrbX2mv5Zs8W0pYnOqglxy3lz +bJZTN7oR7VasHUtjmp0RT9nLZkUs5TZ6MHhlIq3ZsQ6w_Q9Rv1-ofxfwfCC4EBrWKbWAGCf6By4K +Ew8321-2YnodhmsK5BrT4zQ1DZlmUvK8BmYjZe7wTljKjgYcsLTBfX4eMhJ7MIW1kpnl8AbiBfXh +QzN56Mki51Q8PSQWHm0W9tnQ0z6wKdck6zBJ8JyNzewZahFKueDTn-9DOqIDfr3YHvQLLzeXyJ8e +h4AgjW-hvlLzRGtkCknjLIgXVa3rMTycseAwbW-mgdCqqkw3SdEG8feAcyntmvE8j2jbtSDStQMB +9JdvyNLuQdNG4pxpusgvVso0-8NQF0YVa9VFwg9U6IPSx5p8FcW68OAHt_fEgT4ZtiH7o9aur4o9 +oYqUh2lALCY-__9QLq1KkNjMKs33Jz9E8LbRerG9PLclkTrxCjYAeUWBjCwSI7OB7xkuaYDSjkjj +a46NLpdBN1GNcsFFcZ79GFAK0_DsyxGLX8Tq6q0Bvhs8whD8wlSxpTGxYkyqNX-vcb7SDN_0WkCE +XSdZWkqTHXcYbOvoCOb_e6SFAztuMenuHWY0utX0gBfx_X5lPDFyoYXErxFQHiA7t27keshXNa6R +ukQRRS8kMjre1U74sc-fRNXkXpl57rG4rgxaEX0eBeowa53KAsVvUAoSac2aC_nfzXrDvoyf9Xi3 +JpEZNhUDLpFCEycV4I7jGQ9wo9qNaosvlsr6kbLDNdb_1xrGVgjT3xEvRNJNPqslSAu-yD-UFhC3 +AmCdYUnugw_eEFqXCHTARcRkdPPvl2XsmEKY2IqEeO5tz4DyXQFaL-5hEVh6lYEU1EOWHk3UGIXe +Vc5_Ttp82qNLmlJPbZvgmNTJzYTHDQ_27KBcp7IVVZgPDjVKdWqQvZ18KhxvfF3Idgy82LBZniFV +IbtxllXiPRxoPQriSXMnXjh3XkvSDI2pFxXfEvLRn1tvcFOwPNCz3QfPIzYg8uYXN5bRt3ZOrR_g +ZhIlrc7HO0VbNbeqEVPKMZ-cjkqGj4VAuDKoQc0eQ6X_wCoAGO78nPpLeIvZPx1X3z5YoqNA
\ No newline at end of file diff --git a/demo/docker_files/preconfigure-ecomp-keystopics.sh b/demo/docker_files/preconfigure-ecomp-keystopics.sh new file mode 100644 index 0000000..03cf45c --- /dev/null +++ b/demo/docker_files/preconfigure-ecomp-keystopics.sh @@ -0,0 +1,191 @@ +#!/bin/bash + +HOSTPORT="127.0.0.1:3904" +KEYDIR="." + + +# dictionary of API Keys and the tpics owned by each API key +declare -A topics +topics=( \ +["anonymous"]="APPC-CL APPC-TEST2 PDPD-CONFIGURATION POLICY-CL-MGT DCAE-CL-EVENT unauthenticated.SEC_MEASUREMENT_OUTPUT unauthenticated.TCA_EVENT_OUTPUT " \ +["apikey-SDC1"]="SDC-DISTR-NOTIF-TOPIC-SDC-OPENSOURCE-ENV1 SDC-DISTR-STATUS-TOPIC-SDC-OPENSOURCE-ENV1" \ +["apikey-APPC1"]="APPC-TEST1" \ +["apikey-PORTAL1"]="ECOMP-PORTAL-INBOX" \ +["apikey-PORTALAPP1"]="ECOMP-PORTAL-OUTBOX-APP1" \ +["apikey-PORTALDBC1"]="ECOMP-PORTAL-OUTBOX-DBC1" \ +["apikey-PORTALSDC1"]="ECOMP-PORTAL-OUTBOX-SDC1" \ +["apikey-PORTALVID1"]="ECOMP-PORTAL-OUTBOX-VID1" \ +["apikey-PORTALPOL1"]="ECOMP-PORTAL-OUTBOX-POL1" \ +) + +# dictionary of producers for each topic +declare -A acl_producers +acl_producers=(\ +["SDC-DISTR-NOTIF-TOPIC-SDC-OPENSOURCE-ENV1"]="apikey-sdc1" \ +["SDC-DISTR-STATUS-TOPIC-SDC-OPENSOURCE-ENV1"]="apikey-sdc1" \ +["ECOMP-PORTAL-INBOX"]="apikey-PORTALAPP1 apikey-PORTALDBC1 apikey-PORTALSDC1 apikey-PORTALVID1 apikey-PORTALPOL1" \ +["ECOMP-PORTAL-OUTBOX-APP1"]="apikey-PORTAL1" \ +["ECOMP-PORTAL-OUTBOX-DBC1"]="apikey-PORTAL1" \ +["ECOMP-PORTAL-OUTBOX-SDC1"]="apikey-PORTAL1" \ +["ECOMP-PORTAL-OUTBOX-VID1"]="apikey-PORTAL1" \ +["ECOMP-PORTAL-OUTBOX-POL1"]="apikey-PORTAL1" \ +["APPC-TEST1"]="apikey-APPC1" \ +) + +# dictionary of consumers for each topic +declare -A acl_consumers +acl_consumers=(\ +["SDC-DISTR-NOTIF-TOPIC-SDC-OPENSOURCE-ENV1"]="apikey-sdc1" \ +["SDC-DISTR-STATUS-TOPIC-SDC-OPENSOURCE-ENV1"]="apikey-sdc1" \ +["ECOMP-PORTAL-INBOX"]="apikey-PORTAL1" \ +["ECOMP-PORTAL-OUTBOX-APP1"]="apikey-PORTALAPP1" \ +["ECOMP-PORTAL-OUTBOX-DBC1"]="apikey-PORTALDBC1" \ +["ECOMP-PORTAL-OUTBOX-SDC1"]="apikey-PORTALSDC1" \ +["ECOMP-PORTAL-OUTBOX-VID1"]="apikey-PORTALVID1" \ +["ECOMP-PORTAL-OUTBOX-POL1"]="apikey-PORTALPOL1" \ +["APPC-TEST1"]="apikey-APPC1" \ +) + +myrun () { + CMD="$1" + echo "CMD:[$CMD]" + eval $CMD +} + +getowner () { + local -n outowner=$2 + target_topic="$1" + echo "look for owner for $target_topic" + for o in "${!topics[@]}"; do + keytopics=${topics[$o]} + for topic in ${keytopics}; do + if [ "$topic" == "-" ]; then + continue + fi + if [ "$topic" == "$target_topic" ]; then + echo "found owner $o" + outowner=$o + return + fi + done + done +} + +add_acl () { + acl_group="$1" + topic="$2" + client="$3" + echo " adding $client to group $acl_group for topic $2" + + getowner "$topic" owner + echo "==owner for $topic is $owner" + + + if [ -z "$owner" ]; then + echo "No owner API key found for topic $topic" + #exit + fi + OWNER_API_KEYFILE="${KEYDIR}/${owner}.key" + if [ ! -e $API_KEYFILE ]; then + echo "No API key file $OWNER_API_KEYFILE for owner $owner of topic $topic, exit " + #exit + fi + + CLIENT_API_KEYFILE="${KEYDIR}/${client}.key" + if [ ! -e $CLIENT_API_KEYFILE ]; then + echo "No API key file $CLIENT_API_KEYFILE for client $client, exit " + #exit + else + CLIENTKEY=`cat ${CLIENT_API_KEYFILE} |jq -r ".key"` + UEBAPIKEYSECRET=`cat ${OWNER_API_KEYFILE} |jq -r ".secret"` + UEBAPIKEYKEY=`cat ${OWNER_API_KEYFILE} |jq -r ".key"` + time=`date --iso-8601=seconds` + signature=$(echo -n "$time" | openssl sha1 -hmac $UEBAPIKEYSECRET -binary | openssl base64) + xAuth=$UEBAPIKEYKEY:$signature + xDate="$time" + CMD="curl -i -H \"Content-Type: application/json\" -H \"X-CambriaAuth:$xAuth\" -H \"X-CambriaDate:$xDate\" -X PUT http://${HOSTPORT}/topics/${topic}/${acl_group}/${CLIENTKEY}" + myrun "$CMD" + fi +} + + +for key in "${!topics[@]}"; do + # try to create key if no such key exists + API_KEYFILE="${KEYDIR}/${key}.key" + if [ "$key" != "anonymous" ]; then + if [ -e ${API_KEYFILE} ]; then + echo "API key for $key already exists, no need to create new" + else + echo "generating API key $key" + echo '{"email":"no email","description":"API key for '$key'"}' > /tmp/input.txt + + CMD="curl -s -o ${API_KEYFILE} -H \"Content-Type: application/json\" -X POST -d @/tmp/input.txt http://${HOSTPORT}/apiKeys/create" + myrun "$CMD" + echo "API key for $key has been created: "; cat ${API_KEYFILE} + echo "generating API key $key done"; echo + fi + fi + + # create the topics for this key + keytopics=${topics[$key]} + for topic in ${keytopics}; do + if [ "$topic" == "-" ]; then + continue + fi + if [ "$key" == "anonymous" ]; then + echo "creating anonymous topic $topic" + CMD="curl -H \"Content-Type:text/plain\" -X POST -d @/tmp/sample.txt http://${HOSTPORT}/events/${topic}" + myrun "$CMD" + echo "done creating anonymous topic $topic"; echo + else + echo "creating API key secured topic $topic for API key $key" + UEBAPIKEYSECRET=`cat ${API_KEYFILE} |jq -r ".secret"` + UEBAPIKEYKEY=`cat ${API_KEYFILE} |jq -r ".key"` + echo '{"topicName":"'${topic}'","topicDescription":"'$key' API Key secure topic","partitionCount":"1","replicationCount":"1","transactionEnabled":"true"}' > /tmp/topicname.txt + time=`date --iso-8601=seconds` + signature=$(echo -n "$time" | openssl sha1 -hmac $UEBAPIKEYSECRET -binary | openssl base64) + xAuth=$UEBAPIKEYKEY:$signature + xDate="$time" + CMD="curl -i -H \"Content-Type: application/json\" -H \"X-CambriaAuth: $xAuth\" -H \"X-CambriaDate: $xDate\" -X POST -d @/tmp/topicname.txt http://${HOSTPORT}/topics/create" + myrun "$CMD" + echo "done creating api key topic $topic" + echo + fi + done +done + + +echo +echo "============ post loading state of topics =================" +CMD="curl http://${HOSTPORT}/topics" +myrun "$CMD" +for key in "${!topics[@]}"; do + keytopics=${topics[$key]} + echo "---------- key: ${key} " + for topic in ${keytopics}; do + if [ "$topic" == "-" ]; then + continue + fi + CMD="curl http://${HOSTPORT}/topics/${topic}" + myrun "$CMD" + echo + done + echo "end of key: ${key} secured topics" +done + + +# adding publisher and subscriber ACL +for topic in "${!acl_consumers[@]}"; do + consumers=${acl_consumers[$topic]} + for consumer in ${consumers}; do + add_acl "consumers" "$topic" "$consumer" + done +done + +for topic in "${!acl_producers[@]}"; do + producers=${acl_producers[$topic]} + for producer in ${producers}; do + add_acl "producers" "$topic" "$producer" + done +done + diff --git a/src/main/resources/docker-compose/start-kafka.sh b/demo/docker_files/start-kafka.sh index 87047ad..4d955da 100644 --- a/src/main/resources/docker-compose/start-kafka.sh +++ b/demo/docker_files/start-kafka.sh @@ -1,25 +1,4 @@ #!/bin/bash -#******************************************************************************* -# ============LICENSE_START======================================================= -# org.onap.dmaap -# ================================================================================ -# Copyright © 2017 AT&T Intellectual Property. All rights reserved. -# ================================================================================ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============LICENSE_END========================================================= -# -# ECOMP is a trademark and service mark of AT&T Intellectual Property. -# -#******************************************************************************* if [[ -z "$KAFKA_PORT" ]]; then export KAFKA_PORT=9092 @@ -29,11 +8,13 @@ if [[ -z "$KAFKA_ADVERTISED_PORT" ]]; then fi if [[ -z "$KAFKA_BROKER_ID" ]]; then # By default auto allocate broker ID + #export KAFKA_BROKER_ID=-1 export KAFKA_BROKER_ID=1 fi -if [[ -z "$KAFKA_LOG_DIRS" ]]; then - export KAFKA_LOG_DIRS="/kafka/kafka-logs-$HOSTNAME" -fi +#if [[ -z "$KAFKA_LOG_DIRS" ]]; then + #export KAFKA_LOG_DIRS="/kafka/kafka-logs-$HOSTNAME" + export KAFKA_LOG_DIRS="/kafka/kafka-logs" +#fi if [[ -z "$KAFKA_ZOOKEEPER_CONNECT" ]]; then export KAFKA_ZOOKEEPER_CONNECT=$(env | grep ZK.*PORT_2181_TCP= | sed -e 's|.*tcp://||' | paste -sd ,) fi diff --git a/demo/docker_files/state-20170301.tar.gz b/demo/docker_files/state-20170301.tar.gz Binary files differnew file mode 100644 index 0000000..b36b05a --- /dev/null +++ b/demo/docker_files/state-20170301.tar.gz diff --git a/demo/docker_files/subscriber.sh b/demo/docker_files/subscriber.sh new file mode 100644 index 0000000..3e193f0 --- /dev/null +++ b/demo/docker_files/subscriber.sh @@ -0,0 +1,4 @@ +#!/bin/bash + + +GET/events/{topic}/{consumerGroup}/{clientId} diff --git a/demo/docker_files/tests/data.2.2.json b/demo/docker_files/tests/data.2.2.json new file mode 100644 index 0000000..c1bcdfd --- /dev/null +++ b/demo/docker_files/tests/data.2.2.json @@ -0,0 +1 @@ +{ "topicName": "Topic-28592-2", "topicDescription": "topic for test 2.2", "partitionCount": "1", "replicationCount": "1", "transactionEnabled": "true" } diff --git a/demo/docker_files/tests/data.2.5.json b/demo/docker_files/tests/data.2.5.json new file mode 100644 index 0000000..75bade1 --- /dev/null +++ b/demo/docker_files/tests/data.2.5.json @@ -0,0 +1 @@ +{ "datestamp": "Thu Dec 15 19:50:28 UTC 2016", "appkey": "x100", "appval": "some value" } diff --git a/demo/docker_files/tests/data.3.1.txt b/demo/docker_files/tests/data.3.1.txt new file mode 100644 index 0000000..c6a738a --- /dev/null +++ b/demo/docker_files/tests/data.3.1.txt @@ -0,0 +1 @@ +datestamp: Thu Dec 15 19:50:38 UTC 2016, key: 3.1, value: this is a test diff --git a/demo/docker_files/tests/data.3.3.json b/demo/docker_files/tests/data.3.3.json new file mode 100644 index 0000000..9866789 --- /dev/null +++ b/demo/docker_files/tests/data.3.3.json @@ -0,0 +1 @@ +{ "datestamp": "Thu Dec 15 19:50:40 UTC 2016", "key": "3.3", "value": "this is a test" } diff --git a/demo/docker_files/tests/key.req b/demo/docker_files/tests/key.req new file mode 100644 index 0000000..a7e4092 --- /dev/null +++ b/demo/docker_files/tests/key.req @@ -0,0 +1 @@ +{ "email": "no.email", "description": "request for direct response KEY" } diff --git a/demo/docker_files/tests/out/1.1.out b/demo/docker_files/tests/out/1.1.out new file mode 100644 index 0000000..a9488d8 --- /dev/null +++ b/demo/docker_files/tests/out/1.1.out @@ -0,0 +1,5 @@ +{"topics": [ + "msgrtr.apinode.metrics.dmaap", + "28537.3", + "Topic-28537-2" +]}
\ No newline at end of file diff --git a/demo/docker_files/tests/out/2.1.out b/demo/docker_files/tests/out/2.1.out new file mode 100644 index 0000000..ef4eada --- /dev/null +++ b/demo/docker_files/tests/out/2.1.out @@ -0,0 +1,8 @@ +{ + "aux": { + "description": "request for direct response KEY", + "email": "no.email" + }, + "secret": "5V6YSDm8R6v6TArrLLtJUx4L", + "key": "HnJm7b9Zr16hgpU5" +}
\ No newline at end of file diff --git a/demo/docker_files/tests/out/2.2.out b/demo/docker_files/tests/out/2.2.out new file mode 100644 index 0000000..d682023 --- /dev/null +++ b/demo/docker_files/tests/out/2.2.out @@ -0,0 +1,13 @@ +{ + "owner": "HnJm7b9Zr16hgpU5", + "readerAcl": { + "enabled": true, + "users": [] + }, + "name": "Topic-28592-2", + "description": "topic for test 2.2", + "writerAcl": { + "enabled": true, + "users": [] + } +}
\ No newline at end of file diff --git a/demo/docker_files/tests/out/2.3.out b/demo/docker_files/tests/out/2.3.out new file mode 100644 index 0000000..d62034e --- /dev/null +++ b/demo/docker_files/tests/out/2.3.out @@ -0,0 +1,6 @@ +{"topics": [ + "Topic-28592-2", + "msgrtr.apinode.metrics.dmaap", + "28537.3", + "Topic-28537-2" +]}
\ No newline at end of file diff --git a/demo/docker_files/tests/out/2.4.out b/demo/docker_files/tests/out/2.4.out new file mode 100644 index 0000000..d682023 --- /dev/null +++ b/demo/docker_files/tests/out/2.4.out @@ -0,0 +1,13 @@ +{ + "owner": "HnJm7b9Zr16hgpU5", + "readerAcl": { + "enabled": true, + "users": [] + }, + "name": "Topic-28592-2", + "description": "topic for test 2.2", + "writerAcl": { + "enabled": true, + "users": [] + } +}
\ No newline at end of file diff --git a/demo/docker_files/tests/out/2.5.out b/demo/docker_files/tests/out/2.5.out new file mode 100644 index 0000000..670bf46 --- /dev/null +++ b/demo/docker_files/tests/out/2.5.out @@ -0,0 +1,4 @@ +{ + "serverTimeMs": 9, + "count": 1 +}
\ No newline at end of file diff --git a/demo/docker_files/tests/out/2.6.out b/demo/docker_files/tests/out/2.6.out new file mode 100644 index 0000000..0637a08 --- /dev/null +++ b/demo/docker_files/tests/out/2.6.out @@ -0,0 +1 @@ +[]
\ No newline at end of file diff --git a/demo/docker_files/tests/out/3.1.out b/demo/docker_files/tests/out/3.1.out new file mode 100644 index 0000000..d2a9b4e --- /dev/null +++ b/demo/docker_files/tests/out/3.1.out @@ -0,0 +1,4 @@ +{ + "serverTimeMs": 175, + "count": 1 +}
\ No newline at end of file diff --git a/demo/docker_files/tests/out/3.2.out b/demo/docker_files/tests/out/3.2.out new file mode 100644 index 0000000..0637a08 --- /dev/null +++ b/demo/docker_files/tests/out/3.2.out @@ -0,0 +1 @@ +[]
\ No newline at end of file diff --git a/demo/docker_files/tests/out/3.3.out b/demo/docker_files/tests/out/3.3.out new file mode 100644 index 0000000..b823f1c --- /dev/null +++ b/demo/docker_files/tests/out/3.3.out @@ -0,0 +1,4 @@ +{ + "serverTimeMs": 2, + "count": 1 +}
\ No newline at end of file diff --git a/demo/docker_files/tests/out/3.4.out b/demo/docker_files/tests/out/3.4.out new file mode 100644 index 0000000..9930076 --- /dev/null +++ b/demo/docker_files/tests/out/3.4.out @@ -0,0 +1 @@ +["{\"datestamp\":\"Thu Dec 15 19:50:40 UTC 2016\",\"value\":\"this is a test\",\"key\":\"3.3\"}"]
\ No newline at end of file diff --git a/demo/docker_files/tests/regress.sh b/demo/docker_files/tests/regress.sh new file mode 100644 index 0000000..758dd7c --- /dev/null +++ b/demo/docker_files/tests/regress.sh @@ -0,0 +1,113 @@ +#!/bin/ksh +# +# depends on jq - https://stedolan.github.io/jq/ + +PROTOCOL=http +FQDN=127.0.0.1 +#vm1-message-router +#FQDN=10.208.128.229 +PORT=3904 +URL=$PROTOCOL://$FQDN:$PORT + +rm -f out/* +mkdir -p out + +results() { +# echo "[debug] compare $1 to $2" + if [ $1 == $2 ] + then + echo -n "SUCCESS " + else + echo -n "FAIL ($1) " + fi + echo " :TEST $3 ($4)" +} +SUITE=0 +SUITE=$((SUITE + 1)) +echo "SUITE $SUITE: List topics" +TN=0 +TN=$((TN + 1)) +TC=$SUITE.$TN +expect=200 +rc=`curl -s -w "%{http_code}" -o out/$TC.out $URL/topics` +results $rc $expect $TC "list" +StartTopicCount=`cat out/$TC.out | wc -l` + + +SUITE=$((SUITE + 1)) +echo +echo "SUITE $SUITE: APIKEY authenticated topic" +TOPIC=Topic-$$-$SUITE +TN=0 +TN=$((TN + 1)) +TC=$SUITE.$TN +OUT=out/$TC.out +echo '{ "email": "no.email", "description": "request for direct response KEY" }' > key.req +rc=`curl -s -w "%{http_code}" -o out/$TC.out -X POST -H "Content-Type: application/json" -d @key.req $URL/apiKeys/create` +results $rc $expect $SUITE.$TN "gen apikey " +TN=$((TN + 1)) +TC=$SUITE.$TN +SECRET=$(jq ".secret" $OUT | cut -f 2 -d \") +KEY=$(jq ".key" $OUT | cut -f 2 -d \") +TIME=`date --iso-8601=seconds` +SIG=$(echo -n "$TIME" | openssl sha1 -hmac $SECRET -binary | openssl base64) +xAUTH=$KEY:$SIG +#echo "[debug] $SECRET $KEY $TIME $SIG $xAUTH" +DATA=data.$TC.json +echo "{ \"topicName\": \"$TOPIC\", \"topicDescription\": \"topic for test $TC\", \"partitionCount\": \"1\", \"replicationCount\": \"1\", \"transactionEnabled\": \"true\" }" > $DATA +rc=`curl -s -w "%{http_code}" -o out/$TC.out -X POST -H "Content-Type: application/json" -H "X-CambriaAuth: $xAUTH" -H "X-CambriaDate: $TIME" -d @$DATA $URL/topics/create` +results $rc $expect $SUITE.$TN "create topic" +TN=$((TN + 1)) +TC=$SUITE.$TN +expect=200 +rc=`curl -s -w "%{http_code}" -o out/$TC.out $URL/topics` +results $rc $expect $TC "list " +TopicCnt=`cat out/$TC.out | wc -l` +results $TopicCnt $((StartTopicCount + 1)) $TC "topic count" +TN=$((TN + 1)) +TC=$SUITE.$TN +expect=200 +rc=`curl -s -w "%{http_code}" -o out/$TC.out $URL/topics/$TOPIC` +results $rc $expect $TC "list $TOPIC" +TN=$((TN + 1)) +TC=$SUITE.$TN +DATA=data.$TC.json +echo "{ \"datestamp\": \"`date`\", \"appkey\": \"x100\", \"appval\": \"some value\" }" > $DATA +rc=`curl -s -w "%{http_code}" -o out/$TC.out -X POST -H "Content-Type: application/json" -H "X-CambriaAuth: $xAUTH" -H "X-CambriaDate: $TIME" -d @$DATA $URL/events/$TOPIC` +results $rc $expect $SUITE.$TN "pub APIKEY topic" +TN=$((TN + 1)) +TC=$SUITE.$TN +rc=`curl -s -w "%{http_code}" -o out/$TC.out -X GET -H "Content-Type: application/json" -H "X-CambriaAuth: $xAUTH" -H "X-CambriaDate: $TIME" $URL/events/$TOPIC/g0/u1` +results $rc $expect $SUITE.$TN "sub APIKEY topic" + + +SUITE=$((SUITE + 1)) +echo +echo "SUITE $SUITE: anonymous topic" +TOPIC=$$.$SUITE +TN=0 +TN=$((TN + 1)) +TC=$SUITE.$TN +DATA=data.$TC.txt +echo "datestamp: `date`, key: $TC, value: this is a test " > $DATA +expect=200 +rc=`curl -s -w "%{http_code}" -o out/$TC.out -X POST -H "Content-Type: text/plain" -d @$DATA $URL/events/$TOPIC` +results $rc $expect $SUITE.$TN "pub text/plain" +TN=$((TN + 1)) +TC=$SUITE.$TN +expect=200 +rc=`curl -s -w "%{http_code}" -o out/$TC.out $URL/events/$TOPIC/group1/u$$?timeout=1000` +results $rc $expect $SUITE.$TN "sub text/plain" +TN=$((TN + 1)) +TC=$SUITE.$TN +DATA=data.$TC.json +echo "{ \"datestamp\": \"`date`\", \"key\": \"$TC\", \"value\": \"this is a test\" }" > $DATA +expect=200 +rc=`curl -s -w "%{http_code}" -o out/$TC.out -X POST -H "Content-Type: application/json" -d @$DATA $URL/events/$TOPIC` +results $rc $expect $SUITE.$TN "pub json" +TN=$((TN + 1)) +TC=$SUITE.$TN +expect=200 +rc=`curl -s -w "%{http_code}" -o out/$TC.out $URL/events/$TOPIC/group1/u$$?timeout=1000` +results $rc $expect $SUITE.$TN "sub json" + diff --git a/demo/docker_files/tests/test.sh b/demo/docker_files/tests/test.sh new file mode 100644 index 0000000..0e06d5a --- /dev/null +++ b/demo/docker_files/tests/test.sh @@ -0,0 +1,50 @@ +#!/bin/bash +# lji: this is basically what Dom has in his regtest. re-do it in bash instead of ksh + +HOSTPORT="127.0.0.1:3904" +ANONTOPIC="anon-topic-$RANDOM" +APITOPIC="api-topic-$RANDOM" +APIKEYFILE="/tmp/key" + +echo "blah" > /tmp/sample.txt + +if [ ! -e /usr/bin/jq ]; then + apt-get update && apt-get -y install jq +fi + + +# list topics +curl http://${HOSTPORT}/topics + +# publish to an anonymous topic (first publish creats the topic) +curl -H "Content-Type:text/plain" -X POST -d @/tmp/sample.txt http://${HOSTPORT}/events/$ANONTOPIC + +# subscribe to an anonymous topic +curl -H "Content-Type:text/plain" -X GET http://${HOSTPORT}/events/$ANONTOPIC/group1/C1?timeout=5000 & +curl -H "Content-Type:text/plain" -X POST -d @/tmp/sample.txt http://${HOSTPORT}/events/$ANONTOPIC + + + + +# create api key +echo '{"email":"no email","description":"API key and secret both in reponse"}' > /tmp/input.txt +curl -s -o ${APIKEYFILE} -H "Content-Type:application/json" -X POST -d @/tmp/input.txt http://${HOSTPORT}/apiKeys/create +UEBAPIKEYSECRET=`cat ${APIKEYFILE} |jq -r ".secret"` +UEBAPIKEYKEY=`cat ${APIKEYFILE} |jq -r ".key"` + +# create an api key secured topic +# pay attendtion to replication count +echo '{"topicName":"'${APITOPIC}'","topicDescription":"This is an API key securedTopic","partitionCount":"1","replicationCount":"1","transactionEnabled":"true"}' > /tmp/topicname.txt +time=`date --iso-8601=seconds` +signature=$(echo -n "$time" | openssl sha1 -hmac $UEBAPIKEYSECRET -binary | openssl base64) +xAuth=$UEBAPIKEYKEY:$signature +xDate="$time" +curl -i -H "Content-Type: application/json" -H "X-CambriaAuth:$xAuth" -H "X-CambriaDate:$xDate" -X POST -d @/tmp/topicname.txt http://${HOSTPORT}/topics/create + +# first subscribe and run it in bg. then publish. +time=`date --iso-8601=seconds` +signature=$(echo -n "$time" | openssl sha1 -hmac $UEBAPIKEYSECRET -binary | openssl base64) +xAuth=$UEBAPIKEYKEY:$signature +xDate="$time" +curl -H "X-CambriaAuth:$xAuth" -H "X-CambriaDate:$xDate" -X GET http://${HOSTPORT}/events/${APITOPIC}/g0/u1 & +curl -H "Content-Type:text/plain" -H "X-CambriaAuth:$xAuth" -H "X-CambriaDate:$xDate" -X POST -d @/tmp/sample.txt http://${HOSTPORT}/events/${APITOPIC} diff --git a/demo/docker_files/uebapikey-sdc b/demo/docker_files/uebapikey-sdc new file mode 100644 index 0000000..0b3aa80 --- /dev/null +++ b/demo/docker_files/uebapikey-sdc @@ -0,0 +1,8 @@ +{ + "aux": { + "description": "API key for SDC", + "email": "no email" + }, + "secret": "KpMJB28vNduEJ0zHDWOQXBmQ", + "key": "779NflzwmkuKpqef" +}
\ No newline at end of file diff --git a/docs/Filter/Filter.rst b/docs/Filter/Filter.rst deleted file mode 100644 index 38d8dde..0000000 --- a/docs/Filter/Filter.rst +++ /dev/null @@ -1,591 +0,0 @@ -======= -Filter -======= - -A filter examine an event and decides if it matches or doesn't. Filters -are mainly used in rules to decide if the processing entries should be -executed on the given event. They're also used for settings, and systems -like the Graph Correlator re-use Highland Park's filter mechanism to -specify which alarms fit in a correlation. Some publishers may produce -topics with a lot of volume and a subscriber may want only a portion of -those messages. The subscriber can certainly filter messages after -receiving them, but it may be substantially more efficient to ask the -API server to filter the messages before sending them to the -subscriber.The standard library includes a number of simple filters. The -Equals filter, for example, compares a field's value with some other -value and returns true if the values match. The standard library also -includes filter classes called And, Or, and Not, so you can compose more -complex filters. For example, written in the standard JSON config -format: - -+-----------------------------------------+ -| "filter":{ | -| "class":"And", | -| "filters": | -| [ | -| { "class":"Equals", "foo":"abc" }, | -| { "class":"Assigned", "field":"bar" } | -| ] | -| } | -+-----------------------------------------+ - -- This filter matches events in which the field "foo" has value "abc" - and the field "bar" is assigned to some value (as opposed to not being - present on the event). Filters are used by the consumers to filter out - data and consume only specific set of data which matches the - conditions mentioned in filter. Filters can be passed as a query - parameter by the consumer in consume request as mentioned below: - **http://localhost:8080/DMaaP/dmaaprest/events/DMaaP/consumergroup/mHOeNFY4XiWx4CBa?filter=\\{"class":"Equals", - "field":"email", "value":"test@abc.com" }** -- Filters can be applied only on data in JSON format i.e. if applied, - filters will automatically ignore any non-json data. While consuming, - request CONTENT\_TYPE is not relevant to filter. - -All the supported filter can be found below. - -Types of Filters ----------------- - -- DMaaP Message Router supports all the filters which were supported by - DMaaP Message Router and are mentioned below:- -- **All Alarms:** - - Match all alarms. -- **And:** - - Create a set of filters. This filter matches when all of them matches. - -+-------------+--------------------+------------+---------------------+ -| **Field** | **Description** | **Type** | **Notes** | -+=============+====================+============+=====================+ -| filters | Combined Filters | LIST | A list of filters | -+-------------+--------------------+------------+---------------------+ -| | | | | -+-------------+--------------------+------------+---------------------+ - -- **Assigned:** - - Choose a field from the event to check for assignment. This filter - matches when the field is assigned. - -+-------------------+--------------------------------------------------------+------------+-----------------+ -| **Field** | **Description** | **Type** | **Notes** | -+===================+========================================================+============+=================+ -| field | The field to check for on the event. | STRING | A field name | -+-------------------+--------------------------------------------------------+------------+-----------------+ -| emptyIsAssigned | If true, an empty value is considered an assignment. | BOOLEAN | True or False | -+-------------------+--------------------------------------------------------+------------+-----------------+ - -- **Contains:** - - Check if a search string contains another string. - -+-------------+---------------------------------------------------+------------+--------------+ -| **Field** | **Description** | **Type** | **Notes** | -+=============+===================================================+============+==============+ -| String | The value to search. Supports ${} notation. | STRING | Any string | -+-------------+---------------------------------------------------+------------+--------------+ -| Value | The value to search for. Supports ${} notation. | STRING | Any string | -+-------------+---------------------------------------------------+------------+--------------+ -| | | | | -+-------------+---------------------------------------------------+------------+--------------+ - -- **EndsWith**: - - Check if a search string ends with another string. - -+-------------+---------------------------------------------------+------------+--------------+ -| **Field** | **Description** | **Type** | **Notes** | -+=============+===================================================+============+==============+ -| string | The value to search. Supports ${} notation. | STRING | Any string | -+-------------+---------------------------------------------------+------------+--------------+ -| value | The value to search for. Supports ${} notation. | STRING | Any string | -+-------------+---------------------------------------------------+------------+--------------+ - -- **Equals:** - - - Choose a field from the event and a value to check for equality. - -+-------------+----------------------------------------------+------------+--------------+ -| **Field** | **Description** | **Type** | **Notes** | -+=============+==============================================+============+==============+ -| field | The field to check. Supports ${} notation. | STRING | Any string | -+-------------+----------------------------------------------+------------+--------------+ -| value | The value to match. Supports ${} notation. | STRING | Any string | -+-------------+----------------------------------------------+------------+--------------+ - -- **FlatironObjectExists** - - - Matches when the given object exists in the given Flatiron instance. - -+-------------+---------------------------------------------+------------+--------------+ -| **Field** | **Description** | **Type** | **Notes** | -+=============+=============================================+============+==============+ -| oid | The OID of the object to look for. | STRING | Any string | -+-------------+---------------------------------------------+------------+--------------+ -| flatiron | The name of the Flatiron client instance. | STRING | Any string | -+-------------+---------------------------------------------+------------+--------------+ - -- **IsAging** - - Choose a field to test. This filter matches if the expression is - numeric. - -+-------------+---------------------------------------------+------------+--------------+ -| **Field** | **Description** | **Type** | **Notes** | -+=============+=============================================+============+==============+ -| field | The field to test. Supports ${} notation. | STRING | Any string | -+-------------+---------------------------------------------+------------+--------------+ - -- **IsNumeric** - - Choose a field to test. This filter matches if the expression is - numeric. - -+-------------+---------------------------------------------+------------+--------------+ -| **Field** | **Description** | **Type** | **Notes** | -+=============+=============================================+============+==============+ -| field | The field to test. Supports ${} notation. | STRING | Any string | -+-------------+---------------------------------------------+------------+--------------+ - -- **MathCondition** - - Choose a field from the event and a value for logical math conditions. - -+-------------+-------------------------------------------------+------------+-----------------------------------+ -| **Field** | **Description** | **Type** | **Notes** | -+=============+=================================================+============+===================================+ -| Field | The field to check. Supports ${} notation. | STRING | Any string | -+-------------+-------------------------------------------------+------------+-----------------------------------+ -| Value | The value to consider. Supports ${} notation. | STRING | Any string | -+-------------+-------------------------------------------------+------------+-----------------------------------+ -| operator | The operation. | STRING | One of { "<=", ">=", ">", "<" } | -+-------------+-------------------------------------------------+------------+-----------------------------------+ -| | | | | -+-------------+-------------------------------------------------+------------+-----------------------------------+ - -- **NoAlarms** - - Don't match any alarms. -- **Not** - - Negate the configured filter. - -+-------------+-------------------------+------------+-------------+ -| **Field** | **Description** | **Type** | **Notes** | -+=============+=========================+============+=============+ -| filter | The filter to negate. | FILTER | A filter | -+-------------+-------------------------+------------+-------------+ - -- **NotEqual** - - Choose a field from the event and a value to check for inequality. - -+-------------+----------------------------------------------+------------+--------------+ -| **Field** | **Description** | **Type** | **Notes** | -+=============+==============================================+============+==============+ -| field | The field to check. Supports ${} notation. | STRING | Any string | -+-------------+----------------------------------------------+------------+--------------+ -| value | The value to match. Supports ${} notation. | STRING | Any string | -+-------------+----------------------------------------------+------------+--------------+ - -- **NotOneOf** - - Match when the specified field does not have a value from the given list. - -+-------------+---------------------------------------------+------------+---------------------+ -| **Field** | **Description** | **Type** | **Notes** | -+=============+=============================================+============+=====================+ -| field | The field to test. Supports ${} notation. | STRING | Any string | -+-------------+---------------------------------------------+------------+---------------------+ -| values | The matching values. | LIST | A list of strings | -+-------------+---------------------------------------------+------------+---------------------+ - -- **OneOf** - - Match when the specified field has a value from the given list. - -+-------------+---------------------------------------------+------------+---------------------+ -| **Field** | **Description** | **Type** | **Notes** | -+=============+=============================================+============+=====================+ -| field | The field to test. Supports ${} notation. | STRING | Any string | -+-------------+---------------------------------------------+------------+---------------------+ -| values | The matching values. | LIST | A list of strings | -+-------------+---------------------------------------------+------------+---------------------+ - -- **Or** - - Create a set of filters. This filter matches when any one of them - matches. - -+-------------+--------------------+------------+---------------------+ -| **Field** | **Description** | **Type** | **Notes** | -+=============+====================+============+=====================+ -| filters | Combined Filters | LIST | A list of filters | -+-------------+--------------------+------------+---------------------+ - -- **RegEx** - - Choose a field from the event to match against the regular expression - you provide. - -+-------------+---------------------------------------------------------+------------+--------------+ -| **Field** | **Description** | **Type** | **Notes** | -+=============+=========================================================+============+==============+ -| field | The text to check for a match. Supports ${} notation. | STRING | Any string | -+-------------+---------------------------------------------------------+------------+--------------+ -| value | The regular expression (pattern) to match. | STRING | Any string | -+-------------+---------------------------------------------------------+------------+--------------+ - -- **StartsWith** - - Check if a search string starts with another string. - -+-------------+---------------------------------------------------+------------+--------------+ -| **Field** | **Description** | **Type** | **Notes** | -+=============+===================================================+============+==============+ -| string | The value to search. Supports ${} notation. | STRING | Any string | -+-------------+---------------------------------------------------+------------+--------------+ -| Value | The value to search for. Supports ${} notation. | STRING | Any string | -+-------------+---------------------------------------------------+------------+--------------+ - -- **Unassigned** - - Choose a field from the event to check for assignment. This filter - matches when the field is not assigned. - -+-------------------+--------------------------------------------------------+------------+-----------------+ -| **Field** | **Description** | **Type** | **Notes** | -+===================+========================================================+============+=================+ -| field | The field to check for on the event. | STRING | A field name | -+-------------------+--------------------------------------------------------+------------+-----------------+ -| emptyIsAssigned | If true, an empty value is considered an assignment. | BOOLEAN | True or False | -+-------------------+--------------------------------------------------------+------------+-----------------+ - -- **WithinSecondsFrom** - - This filter matches when the specified epoch time value is within the - given number of seconds from the baseline time value. Both time values - are assumed to be in seconds. If a value is in milliseconds, set - baselineTimeInMillis and/or eventTimeInMillis to true. - -+------------------------+------------------------------------------------------------+------------+-----------------+ -| **Field** | **Description** | **Type** | **Notes** | -+========================+============================================================+============+=================+ -| field | The time value to test. Supports ${} | STRING | A field name | -+------------------------+------------------------------------------------------------+------------+-----------------+ -| eventTimeInMillis | Whether to convert the event value from milliseconds. | BOOLEAN | True or False | -+------------------------+------------------------------------------------------------+------------+-----------------+ -| seconds | The number of seconds. | NUMBER | A number | -+------------------------+------------------------------------------------------------+------------+-----------------+ -| baselineTimeInMillis | Whether to convert the baseline value from milliseconds. | BOOLEAN | True or False | -+------------------------+------------------------------------------------------------+------------+-----------------+ -| baseline | The baseline time value. Supports ${}. | STRING | Any string | -+------------------------+------------------------------------------------------------+------------+-----------------+ - -- **WithinTimeFromNow** - - This filter matches when the named field has an epoch time value - within the given number of seconds from the current time. The event's - time value is assumed to be in seconds. If it's in milliseconds, set - eventTimeInMillis to true. - -+---------------------+---------------------------------------------------------+------------+-----------------+ -| **Field** | **Description** | **Type** | **Notes** | -+=====================+=========================================================+============+=================+ -| field | The field to check on the event. | STRING | A field name | -+---------------------+---------------------------------------------------------+------------+-----------------+ -| eventTimeInMillis | Whether to convert the event value from milliseconds. | BOOLEAN | True or False | -+---------------------+---------------------------------------------------------+------------+-----------------+ -| seconds | The number of seconds. | NUMBER | A number | -+---------------------+---------------------------------------------------------+------------+-----------------+ - -- **Limit:** - - - Limit is the integer value and DMaaP Message Router will consumes - only that set of message which are specified in limit. - - .. code:: bash - - Suppose if we set limit=2, then only 2 sets of data will be consumed. - *Get \ **<<topicName>>/group/2?limit=4*** - Let us suppose if - **No of data available** = 4 - **Set limit** = 6 - i.e. limit>no of data - In this scenario all 4 sets of data will be consumed. - -- If limit is not passed with the url then by default limit is set to - 4096. - - .. code:: bash - - i.e. 4096 sets of data will be consumed. - **Timeout and Long Poll:** - -- Timeout is the integer value which will be treated by DMaaP Message - Router as time in millisecond. - - - -- Get - -+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| `***http://localhost/DMaaP/dmaaprest/events/<<topicName>>/group/2?timeout=20000*** <http://localhost/DMaaP/dmaaprest/events/%3c%3ctopicName%3e%3e/group/2?timeout=20000>`__ | -+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ - -- If there is no data available to be consumed, then DMaaP Message - Router will poll for the particular period of time specified in - timeout this mechanism is known as Long Poll. - -- If timeout is not passed with url then DMaaP Message Router will set - the value of timeout =10000 - -- i.e. if no set of data are available then DMaaP Message Router will - poll for 10000 ms. - -***Meta:*** - -- Meta is a Boolean value. - -- DMaaP Message Router reads the value of meta from - MRConfiguration.properties file at the time of startup. - -- If the value of meta is not null and if value of meta is one of these - values true, yes, on, 1, y, checked then DMaaP Message Router will - take meta flag as true, else it will be false. - -- If meta is set to true then consumer will get the value of message - offset along with message. - - |image1| - - .. |image1| image:: images/image1.png - -- **Pretty**: - -- Pretty is a Boolean value. - -- DMaaP Message Router reads the value of pretty from - MRConfiguration.properties file at the time of startup. - -- If the value of pretty is not null and if value of pretty is one of - these values true, yes, on, 1, y, checked then DMaaP Message Router - will take pretty flag as true, else it will be false. - -- If pretty is set to true then different sets of messages will be - printed in next line separated by comma (,). - - |image2| - - .. |image2| image:: images/image2.png - -- **Filter** - - - A filter examine an event and decides if it matches or doesn't. - - Filters are mainly used in rules to decide if the processing entries - should be executed on the given event. They're also used for settings, - and systems like the Graph Correlator re-use Highland Park's filter - mechanism to specify which alarms fit in a correlation. - - The standard library includes a number of simple filters. The Equals - filter, for example, compares a field's value with some other value - and returns true if the values match. - - The standard library also includes filter classes called And, Or, and - Not, so you can compose more complex filters. For example, written in - the standard JSON config format: - - +-----------------------------------------+ - | "filter":{ | - | "class":"And", | - | "filters": | - | [ | - | { "class":"Equals", "foo":"abc" }, | - | { "class":"Assigned", "field":"bar" } | - | ] | - | } | - +-----------------------------------------+ - - - This filter matches events in which the field "foo" has value "abc" and - the field "bar" is assigned to some value (as opposed to not being - present on the event). - - Filters are used by the consumers to filter out data and consume only - specific set of data which matches the conditions mentioned in filter. - - Filters can be passed as a query parameter by the consumer in consume - request as mentioned below: - - **http://localhost:8080/DMaaP/dmaaprest/events/DMaaP/consumergroup/mHOeNFY4XiWx4CBa?filter=\\{"class":"Equals","field":"email", "value":"`test@abc.com <mailto:test@att.com>`__" }** - - Filters can be applied only on data in JSON format i.e. if applied, filters will automatically ignore any non-json data. - - While consuming, request CONTENT\_TYPE is not relevant to filter. - - The MR API allows a subscriber pass a Highland Park filter as part of - the GET request. This will filter the stream of messages sent back to - the subscriber, but for this to work, there are some requirements: - - - The message payload must be JSON - - - Only a filter built from Highland Park's Standard Library may be - used. (The Cambria API server doesn't have access to plugged in - filters.) - - - The filter must be encoded properly in the URL path. - -Server-side filtering can also be setup in the Java client as illustrated below - -**Filtering Consumer** - -You can also provide a Highland Park filter to your consumer instance, -and this filter is passed on to the server in the GET request. One way -to create the filter is programmatically. In your code, instantiate a -filter from the Highland Park Standard Library Then create a String -representation of the filter using the FilterIo.write utility. This -String can then be passed to the Cambria client instance for use on the -server. - -Remember, only Highland Park standard library filter components can be -used -- no plug-ins are available in the Cambria server context. - - .. code:: bash - - package org.onap.sa.highlandPark.integration; - - import java.io.IOException; - - import java.util.UUID; - - import org.onap.nsa.cambria.client.CambriaClientFactory; - - import org.onap.nsa.cambria.client.CambriaConsumer; - - import org.onap.sa.highlandPark.processor.HpEvent; - - import org.onap.sa.highlandPark.stdlib.filters.FilterIo; - - import org.onap.sa.highlandPark.stdlib.filters.OneOf; - - public class ExampleFilteringConsumer - - { - - public static void main ( String[] args ) throws IOException, - InterruptedException - - { - - // Cambria clients take a set of 1 or more servers to use in round-robin - fashion. - - // If a server becomes unreachable, another in the group is used. - - final String - serverGroup="ueb01hydc.it.att.com,ueb02hydc.it.att.com,ueb03hydc.it.att.com"; - - // choose a topic - - final String topic = "TEST-TOPIC"; - - // Cambria clients can run in a cooperative group to handle high-volume - topics. - - // Here, we create a random group name, which means this client is not - re-startable. - - final String consumerGroup = UUID.randomUUID ().toString (); - - final String consumerId = "0"; - - // Cambria clients can sit in a tight loop on the client side, using a - long-poll - - // to wait for messages, and a limit to tell the server the most to send - at a time. - - final int longPollMs = 30\*1000; - - final int limit = -1; - - // The Cambria server can filter the returned message stream using - filters from the - - // Highland Park system. Here, we create a simple filter to test for the - AlarmID - - // value being one of the Mobility power alarms. - - final OneOf oneOf = new OneOf ( "AlarmId", kPowerAlarms ); - - // create the consumer - - final CambriaConsumer cc = CambriaClientFactory.createConsumer ( - serverGroup, topic, - - consumerGroup, consumerId, longPollMs, limit, FilterIo.write ( oneOf ) - ); - - // now loop reading messages. Note that cc.fetch() will wait in its HTTP - receive - - // method for up to 30 seconds (longPollMs) when nothing's available at - the server. - - long count = 0; - - while ( true ) - - { - - for ( String msg : cc.fetch () ) - - { - - System.out.println ( "" + (++count) + ": " + msg ); - - } - - } - - } - - private static final String[] kPowerAlarms = - - { - - "HUB COMMERCIAL POWER FAIL\_FWD", - - "HUB COMMERCIAL POWER FAIL", - - "RBS COMMERCIAL POWER FAIL - Fixed\_FWD", - - "RBS COMMERCIAL POWER FAIL\_FWD", - - "RBS COMMERCIAL POWER FAIL - No Generator\_FWD", - - "RBS COMMERCIAL POWER FAIL - Portable\_FWD", - - "RBS COMMERCIAL POWER FAIL - Shared\_FWD", - - "RBS COMMERCIAL POWER FAIL - Yes\_FWD", - - "RBS COMMERCIAL POWER FAIL - YES\_FWD", - - "RBS COMMERCIAL POWER FAIL - Fixed", - - "RBS COMMERCIAL POWER FAIL - No Generator", - - "RBS COMMERCIAL POWER FAIL - Portable", - - "RBS COMMERCIAL POWER FAIL - Shared", - - "RBS COMMERCIAL POWER FAIL - YES", - - "RBS COMMERCIAL POWER FAIL - Yes", - - "RBS COMMERCIAL POWER FAIL", - - "HUB COMMERCIAL POWER FAIL - Fixed", - - "HUB COMMERCIAL POWER FAIL - No Generator", - - "HUB COMMERCIAL POWER FAIL - Portable", - - "HUB COMMERCIAL POWER FAIL - Shared", - - "HUB COMMERCIAL POWER FAIL - Fixed\_FWD", - - "HUB COMMERCIAL POWER FAIL - No Generator\_FWD", - - "HUB COMMERCIAL POWER FAIL - Portable\_FWD", - - "HUB COMMERCIAL POWER FAIL - Shared\_FWD", - - }; - - } - - - -**Filter Builder** - - MR server-side filtering allows a consumer to filter the stream of - messages returned from the GET call. The following link provide details - of building some of the filter to illustrate Filter Builder. It is not - meant to cover and provide examples of every filter diff --git a/docs/Installation/Installation.rst b/docs/Installation/Installation.rst index 5232bd4..6912562 100644 --- a/docs/Installation/Installation.rst +++ b/docs/Installation/Installation.rst @@ -1,7 +1,15 @@ -=================================
-DMAAP MESSAGE ROUTER INSTALLATION
-=================================
-This document describes how to install and access DMaaP Message Router.
+.. This work is licensed under a Creative Commons Attribution 4.0 International License.
+.. http://creativecommons.org/licenses/by/4.0
+
+Installation
+============
+
+Environment
+-----------
+Message Router is developed using Kafka, Zookeeper and Java. AJSC framework is used to create the REST service and Docker was used to package the service.
+
+Steps
+-----
Message Router has 3 docker containers. Dmaap\_container,
kafka\_contaienr and zookeeper\_container. Zookeeper runs on 172.18.0.3,
kafka runs on 172.18.0.2 and dmaap on 172.18.0.4.
@@ -17,9 +25,9 @@ kafka runs on 172.18.0.2 and dmaap on 172.18.0.4. 3) In /var/tmp/MsgRtrApi.properties, change value of below variables as
shown below:
- config.zk.servers=172.18.0.3
+ config.zk.servers=172.18.0.3(Change as per where ZooKeepeer is deployed)
- kafka.metadata.broker.list=172.18.0.2:9092
+ kafka.metadata.broker.list=172.18.0.2:9092(Change as per where Kafka is deployed)
4) Install docker and docker-compose
@@ -52,6 +60,4 @@ Testing http://172.18.0.4:3904/events/TestTopic1/CG1/C1?timeout=1000
Note: You will only receive messages which have been published after
- you have subscribed to a topic.
-
-
+ you have subscribed to a topic.
\ No newline at end of file diff --git a/docs/administration/administration.rst b/docs/administration/administration.rst new file mode 100644 index 0000000..7c082a3 --- /dev/null +++ b/docs/administration/administration.rst @@ -0,0 +1,14 @@ +.. This work is licensed under a Creative Commons Attribution 4.0 International License. +.. http://creativecommons.org/licenses/by/4.0 + +Administration +============== + + +Processes +--------- +NA + +Actions +------- +NA
\ No newline at end of file diff --git a/docs/architecture/architecture.rst b/docs/architecture/architecture.rst new file mode 100644 index 0000000..344cd92 --- /dev/null +++ b/docs/architecture/architecture.rst @@ -0,0 +1,18 @@ +.. This work is licensed under a Creative Commons Attribution 4.0 International License. +.. http://creativecommons.org/licenses/by/4.0 + +Architecture +============ + + +Capabilities +------------ +Message Router is a RESTful web service used for any needed action with Kaka + +Usage Scenarios +--------------- +Message Router endpoints are used to create/view/delete a topic in Kafka. Clients can use the Message Router endpoints to publish a message to a topic and subscribe to a topic + +Interactions +------------ +Message Service REST service uses the message service API to interact with the ZooKeeper/Kafka
\ No newline at end of file diff --git a/docs/configuration/configuration.rst b/docs/configuration/configuration.rst new file mode 100644 index 0000000..08359d9 --- /dev/null +++ b/docs/configuration/configuration.rst @@ -0,0 +1,10 @@ +.. This work is licensed under a Creative Commons Attribution 4.0 International License. +.. http://creativecommons.org/licenses/by/4.0 + +Configuration +============= + +Deploy the ZooKeeper and Kafka and update the following 2 properties in MsgRtrApi.properties + +| config.zk.servers=<<zookeeper_host>> +| kafka.metadata.broker.list=<<kafka_host>:<<kafka_port>> diff --git a/docs/consumedapis/consumedapis.rst b/docs/consumedapis/consumedapis.rst new file mode 100644 index 0000000..321b461 --- /dev/null +++ b/docs/consumedapis/consumedapis.rst @@ -0,0 +1,8 @@ +.. This work is licensed under a Creative Commons Attribution 4.0 International License. +.. http://creativecommons.org/licenses/by/4.0 + +Consumed APIs +============= + +Message Router do not consume any API + diff --git a/docs/delivery/delivery.rst b/docs/delivery/delivery.rst new file mode 100644 index 0000000..b815e1c --- /dev/null +++ b/docs/delivery/delivery.rst @@ -0,0 +1,34 @@ +.. This work is licensed under a Creative Commons Attribution 4.0 International License. +.. http://creativecommons.org/licenses/by/4.0 + +Delivery +======== + +Message Router is comprised of Message Router service and Message Router API. Message Router uses Kafka and ZooKeeper + +.. blockdiag:: + + + blockdiag layers { + orientation = portrait + MR_SERVIVE -> MR_API; + MR_SERVIVE -> KAFKA; + MR_SERVIVE -> ZOOKEEPER; + group l1 { + color = blue; + label = "MR container"; + MR_SERVIVE; MR_API; + } + group l2 { + color = yellow; + label = "Kafka Container"; + KAFKA; + } + group l3 { + color = orange; + label = "ZooKeeper Container"; + ZOOKEEPER; + } + + } + diff --git a/docs/humaninterfaces/humaninterfaces.rst b/docs/humaninterfaces/humaninterfaces.rst new file mode 100644 index 0000000..73fbe84 --- /dev/null +++ b/docs/humaninterfaces/humaninterfaces.rst @@ -0,0 +1,9 @@ +.. This work is licensed under a Creative Commons Attribution 4.0 International License. +.. http://creativecommons.org/licenses/by/4.0 + +Human Interfaces +================ + +Message Router does not have human interfaces + + diff --git a/docs/index.rst b/docs/index.rst index 4eda778..b8978f6 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,17 +1,23 @@ .. This work is licensed under a Creative Commons Attribution 4.0 International License. .. http://creativecommons.org/licenses/by/4.0 -.. Copyright © 2017 AT&T Intellectual Property. All rights reserved. -Message Router (MR) +dmaap/message router(MR) ==================== .. toctree:: - :maxdepth: 2 + :maxdepth: 1 - message-router/message-router - Installation/Installation - Filter/Filter + architecture/architecture.rst + offeredapis/offeredapis.rst + consumedapis/consumedapis.rst + delivery/delivery.rst + logging/logging.rst + Installation/Installation.rst + configuration/configuration.rst + administration/administration.rst + humaninterfaces/humaninterfaces.rst + release-notes/release-notes.rst diff --git a/docs/logging/logging.rst b/docs/logging/logging.rst new file mode 100644 index 0000000..052be98 --- /dev/null +++ b/docs/logging/logging.rst @@ -0,0 +1,52 @@ +.. This work is licensed under a Creative Commons Attribution 4.0 International License. +.. http://creativecommons.org/licenses/by/4.0 + +Logging +======= + +.. note:: + * This section is used to describe the informational or diagnostic messages emitted from + a software component and the methods or collecting them. + + * This section is typically: provided for a platform-component and sdk; and + referenced in developer and user guides + + * This note must be removed after content has been added. + + +Where to Access Information +--------------------------- +Message Router uses logback framework to generate logs and all the logs are emitted to the console. + +Error / Warning Messages +------------------------ +Following are the error codes + +| RESOURCE_NOT_FOUND=3001 +| SERVER_UNAVAILABLE=3002 +| METHOD_NOT_ALLOWED=3003 +| GENERIC_INTERNAL_ERROR=1004 +| MSG_SIZE_EXCEEDS_BATCH_LIMIT=5001 +| UNABLE_TO_PUBLISH=5002 +| INCORRECT_BATCHING_FORMAT=5003 +| MSG_SIZE_EXCEEDS_MSG_LIMIT=5004 +| INCORRECT_JSON=5005 +| CONN_TIMEOUT=5006 +| PARTIAL_PUBLISH_MSGS=5007 +| CONSUME_MSG_ERROR=5008 +| PUBLISH_MSG_ERROR=5009 +| RETRIEVE_TRANSACTIONS=5010 +| RETRIEVE_TRANSACTIONS_DETAILS=5011 +| TOO_MANY_REQUESTS=5012 +| RATE_LIMIT_EXCEED=301 +| GET_TOPICS_FAIL=6001 +| GET_TOPICS_DETAILS_FAIL=6002 +| CREATE_TOPIC_FAIL=6003 +| DELETE_TOPIC_FAIL=6004 +| GET_PUBLISHERS_BY_TOPIC=6005 +| GET_CONSUMERS_BY_TOPIC=6006 +| GET_CONSUMER_CACHE=6011 +| DROP_CONSUMER_CACHE=6012 +| GET_METRICS_ERROR=6013 +| TOPIC_NOT_IN_AAF=6017 + diff --git a/docs/message-router/image1.png b/docs/message-router/image1.png Binary files differdeleted file mode 100644 index 100973d..0000000 --- a/docs/message-router/image1.png +++ /dev/null diff --git a/docs/Filter/images/image1.png b/docs/offeredapis/images/image1.png Binary files differindex d424f15..d424f15 100644 --- a/docs/Filter/images/image1.png +++ b/docs/offeredapis/images/image1.png diff --git a/docs/Filter/images/image2.png b/docs/offeredapis/images/image2.png Binary files differindex faddcfe..faddcfe 100644 --- a/docs/Filter/images/image2.png +++ b/docs/offeredapis/images/image2.png diff --git a/docs/message-router/message-router.rst b/docs/offeredapis/offeredapis.rst index 6d08b16..ab0d4ea 100644 --- a/docs/message-router/message-router.rst +++ b/docs/offeredapis/offeredapis.rst @@ -1,18 +1,11 @@ -============================================ -Message Router (MR) API Guide -============================================ -Architecture -------------- - -In DMaaP Message Router, Restful web service is exposed to client to perform any needed action with Kafka. After getting the request it calls the Message router service layer which is created using AJSC ( AT&T Java Service Container) . AJSC finally calls Kafka services and response is sent back. - - |image0| - - .. |image0| image:: image1.png +.. This work is licensed under a Creative Commons Attribution 4.0 International License. +.. http://creativecommons.org/licenses/by/4.0 +Offered APIs +~~~~~~~~~~~~ -HTTP Service APIs ------------------- +.. toctree:: + :maxdepth: 3 DMaaP Message Router utilizes an HTTP REST API to service all Publish and Consume transactions. HTTP and REST standards are followed so @@ -21,7 +14,7 @@ work to interact with Message Router.Message Router uses AAF for user's authentication and authorization. General HTTP Requirements -========================= +------------------------- A DMaaP Message Router transactions consists of 4 distinct segments, HTTP URL, HTTP Header, HTTP Body (POST) and HTTP Response. The general @@ -29,24 +22,22 @@ considerations for each segment are as follows and are required for each of the specific transactions described in this section. HTTP URL -======== +------- -http[s]://Username:Password@serverBaseURL{/routing}{resourcePath} +http[s]://serverBaseURL{/routing}{resourcePath} -- The Username:Password utilizes HTTP Basic Authentication and HTTPS/TLS to securely transmit the authorization and authentication credentials that AAF needs to validate the client's access to the requested resource. - -- The serverBaseURL points to DMaaP Message Router host/port that will service the request. Optionally DME2 service end points for Message Router can be used. +- The serverBaseURL points to DMaaP Message Router host/port that will service the request. - The resourcePath specifies the specific service, or Topic, that the client is attempting to reach HTTP Header -=========== +----------- Specifies HTTP Headers, such as Content-Type, that define the parameters of the HTTP Transaction HTTP Body -========= +--------- The HTTP Body contains the topic content when Publishing or Consuming. The Body may contain topic messages in several formats (like below) but @@ -528,3 +519,597 @@ API Inventory | | from write | String consumerId) | {consumerId} | | | | | ACL on a Topic | | | | | +-----------+--------------------+-----------------------------------------+---------------------------------------+----------------+----------------------------------+ + + + +Filters +------- + +A filter examine an event and decides if it matches or doesn't. Filters +are mainly used in rules to decide if the processing entries should be +executed on the given event. They're also used for settings, and systems +like the Graph Correlator re-use Highland Park's filter mechanism to +specify which alarms fit in a correlation. Some publishers may produce +topics with a lot of volume and a subscriber may want only a portion of +those messages. The subscriber can certainly filter messages after +receiving them, but it may be substantially more efficient to ask the +API server to filter the messages before sending them to the +subscriber.The standard library includes a number of simple filters. The +Equals filter, for example, compares a field's value with some other +value and returns true if the values match. The standard library also +includes filter classes called And, Or, and Not, so you can compose more +complex filters. For example, written in the standard JSON config +format: + ++-----------------------------------------+ +| "filter":{ | +| "class":"And", | +| "filters": | +| [ | +| { "class":"Equals", "foo":"abc" }, | +| { "class":"Assigned", "field":"bar" } | +| ] | +| } | ++-----------------------------------------+ + +- This filter matches events in which the field "foo" has value "abc" + and the field "bar" is assigned to some value (as opposed to not being + present on the event). Filters are used by the consumers to filter out + data and consume only specific set of data which matches the + conditions mentioned in filter. Filters can be passed as a query + parameter by the consumer in consume request as mentioned below: + **http://localhost:8080/DMaaP/dmaaprest/events/DMaaP/consumergroup/mHOeNFY4XiWx4CBa?filter=\\{"class":"Equals", + "field":"email", "value":"test@abc.com" }** +- Filters can be applied only on data in JSON format i.e. if applied, + filters will automatically ignore any non-json data. While consuming, + request CONTENT\_TYPE is not relevant to filter. + +All the supported filter can be found below. + +Types of Filters +================ + +- DMaaP Message Router supports all the filters which were supported by + DMaaP Message Router and are mentioned below:- +- **All Alarms:** + - Match all alarms. +- **And:** + - Create a set of filters. This filter matches when all of them matches. + ++-------------+--------------------+------------+---------------------+ +| **Field** | **Description** | **Type** | **Notes** | ++=============+====================+============+=====================+ +| filters | Combined Filters | LIST | A list of filters | ++-------------+--------------------+------------+---------------------+ +| | | | | ++-------------+--------------------+------------+---------------------+ + +- **Assigned:** + - Choose a field from the event to check for assignment. This filter + matches when the field is assigned. + ++-------------------+--------------------------------------------------------+------------+-----------------+ +| **Field** | **Description** | **Type** | **Notes** | ++===================+========================================================+============+=================+ +| field | The field to check for on the event. | STRING | A field name | ++-------------------+--------------------------------------------------------+------------+-----------------+ +| emptyIsAssigned | If true, an empty value is considered an assignment. | BOOLEAN | True or False | ++-------------------+--------------------------------------------------------+------------+-----------------+ + +- **Contains:** + - Check if a search string contains another string. + ++-------------+---------------------------------------------------+------------+--------------+ +| **Field** | **Description** | **Type** | **Notes** | ++=============+===================================================+============+==============+ +| String | The value to search. Supports ${} notation. | STRING | Any string | ++-------------+---------------------------------------------------+------------+--------------+ +| Value | The value to search for. Supports ${} notation. | STRING | Any string | ++-------------+---------------------------------------------------+------------+--------------+ +| | | | | ++-------------+---------------------------------------------------+------------+--------------+ + +- **EndsWith**: + - Check if a search string ends with another string. + ++-------------+---------------------------------------------------+------------+--------------+ +| **Field** | **Description** | **Type** | **Notes** | ++=============+===================================================+============+==============+ +| string | The value to search. Supports ${} notation. | STRING | Any string | ++-------------+---------------------------------------------------+------------+--------------+ +| value | The value to search for. Supports ${} notation. | STRING | Any string | ++-------------+---------------------------------------------------+------------+--------------+ + +- **Equals:** + + - Choose a field from the event and a value to check for equality. + ++-------------+----------------------------------------------+------------+--------------+ +| **Field** | **Description** | **Type** | **Notes** | ++=============+==============================================+============+==============+ +| field | The field to check. Supports ${} notation. | STRING | Any string | ++-------------+----------------------------------------------+------------+--------------+ +| value | The value to match. Supports ${} notation. | STRING | Any string | ++-------------+----------------------------------------------+------------+--------------+ + +- **FlatironObjectExists** + + - Matches when the given object exists in the given Flatiron instance. + ++-------------+---------------------------------------------+------------+--------------+ +| **Field** | **Description** | **Type** | **Notes** | ++=============+=============================================+============+==============+ +| oid | The OID of the object to look for. | STRING | Any string | ++-------------+---------------------------------------------+------------+--------------+ +| flatiron | The name of the Flatiron client instance. | STRING | Any string | ++-------------+---------------------------------------------+------------+--------------+ + +- **IsAging** + - Choose a field to test. This filter matches if the expression is + numeric. + ++-------------+---------------------------------------------+------------+--------------+ +| **Field** | **Description** | **Type** | **Notes** | ++=============+=============================================+============+==============+ +| field | The field to test. Supports ${} notation. | STRING | Any string | ++-------------+---------------------------------------------+------------+--------------+ + +- **IsNumeric** + - Choose a field to test. This filter matches if the expression is + numeric. + ++-------------+---------------------------------------------+------------+--------------+ +| **Field** | **Description** | **Type** | **Notes** | ++=============+=============================================+============+==============+ +| field | The field to test. Supports ${} notation. | STRING | Any string | ++-------------+---------------------------------------------+------------+--------------+ + +- **MathCondition** + - Choose a field from the event and a value for logical math conditions. + ++-------------+-------------------------------------------------+------------+-----------------------------------+ +| **Field** | **Description** | **Type** | **Notes** | ++=============+=================================================+============+===================================+ +| Field | The field to check. Supports ${} notation. | STRING | Any string | ++-------------+-------------------------------------------------+------------+-----------------------------------+ +| Value | The value to consider. Supports ${} notation. | STRING | Any string | ++-------------+-------------------------------------------------+------------+-----------------------------------+ +| operator | The operation. | STRING | One of { "<=", ">=", ">", "<" } | ++-------------+-------------------------------------------------+------------+-----------------------------------+ +| | | | | ++-------------+-------------------------------------------------+------------+-----------------------------------+ + +- **NoAlarms** + - Don't match any alarms. +- **Not** + - Negate the configured filter. + ++-------------+-------------------------+------------+-------------+ +| **Field** | **Description** | **Type** | **Notes** | ++=============+=========================+============+=============+ +| filter | The filter to negate. | FILTER | A filter | ++-------------+-------------------------+------------+-------------+ + +- **NotEqual** + - Choose a field from the event and a value to check for inequality. + ++-------------+----------------------------------------------+------------+--------------+ +| **Field** | **Description** | **Type** | **Notes** | ++=============+==============================================+============+==============+ +| field | The field to check. Supports ${} notation. | STRING | Any string | ++-------------+----------------------------------------------+------------+--------------+ +| value | The value to match. Supports ${} notation. | STRING | Any string | ++-------------+----------------------------------------------+------------+--------------+ + +- **NotOneOf** + - Match when the specified field does not have a value from the given list. + ++-------------+---------------------------------------------+------------+---------------------+ +| **Field** | **Description** | **Type** | **Notes** | ++=============+=============================================+============+=====================+ +| field | The field to test. Supports ${} notation. | STRING | Any string | ++-------------+---------------------------------------------+------------+---------------------+ +| values | The matching values. | LIST | A list of strings | ++-------------+---------------------------------------------+------------+---------------------+ + +- **OneOf** + - Match when the specified field has a value from the given list. + ++-------------+---------------------------------------------+------------+---------------------+ +| **Field** | **Description** | **Type** | **Notes** | ++=============+=============================================+============+=====================+ +| field | The field to test. Supports ${} notation. | STRING | Any string | ++-------------+---------------------------------------------+------------+---------------------+ +| values | The matching values. | LIST | A list of strings | ++-------------+---------------------------------------------+------------+---------------------+ + +- **Or** + - Create a set of filters. This filter matches when any one of them + matches. + ++-------------+--------------------+------------+---------------------+ +| **Field** | **Description** | **Type** | **Notes** | ++=============+====================+============+=====================+ +| filters | Combined Filters | LIST | A list of filters | ++-------------+--------------------+------------+---------------------+ + +- **RegEx** + - Choose a field from the event to match against the regular expression + you provide. + ++-------------+---------------------------------------------------------+------------+--------------+ +| **Field** | **Description** | **Type** | **Notes** | ++=============+=========================================================+============+==============+ +| field | The text to check for a match. Supports ${} notation. | STRING | Any string | ++-------------+---------------------------------------------------------+------------+--------------+ +| value | The regular expression (pattern) to match. | STRING | Any string | ++-------------+---------------------------------------------------------+------------+--------------+ + +- **StartsWith** + - Check if a search string starts with another string. + ++-------------+---------------------------------------------------+------------+--------------+ +| **Field** | **Description** | **Type** | **Notes** | ++=============+===================================================+============+==============+ +| string | The value to search. Supports ${} notation. | STRING | Any string | ++-------------+---------------------------------------------------+------------+--------------+ +| Value | The value to search for. Supports ${} notation. | STRING | Any string | ++-------------+---------------------------------------------------+------------+--------------+ + +- **Unassigned** + - Choose a field from the event to check for assignment. This filter + matches when the field is not assigned. + ++-------------------+--------------------------------------------------------+------------+-----------------+ +| **Field** | **Description** | **Type** | **Notes** | ++===================+========================================================+============+=================+ +| field | The field to check for on the event. | STRING | A field name | ++-------------------+--------------------------------------------------------+------------+-----------------+ +| emptyIsAssigned | If true, an empty value is considered an assignment. | BOOLEAN | True or False | ++-------------------+--------------------------------------------------------+------------+-----------------+ + +- **WithinSecondsFrom** + - This filter matches when the specified epoch time value is within the + given number of seconds from the baseline time value. Both time values + are assumed to be in seconds. If a value is in milliseconds, set + baselineTimeInMillis and/or eventTimeInMillis to true. + ++------------------------+------------------------------------------------------------+------------+-----------------+ +| **Field** | **Description** | **Type** | **Notes** | ++========================+============================================================+============+=================+ +| field | The time value to test. Supports ${} | STRING | A field name | ++------------------------+------------------------------------------------------------+------------+-----------------+ +| eventTimeInMillis | Whether to convert the event value from milliseconds. | BOOLEAN | True or False | ++------------------------+------------------------------------------------------------+------------+-----------------+ +| seconds | The number of seconds. | NUMBER | A number | ++------------------------+------------------------------------------------------------+------------+-----------------+ +| baselineTimeInMillis | Whether to convert the baseline value from milliseconds. | BOOLEAN | True or False | ++------------------------+------------------------------------------------------------+------------+-----------------+ +| baseline | The baseline time value. Supports ${}. | STRING | Any string | ++------------------------+------------------------------------------------------------+------------+-----------------+ + +- **WithinTimeFromNow** + - This filter matches when the named field has an epoch time value + within the given number of seconds from the current time. The event's + time value is assumed to be in seconds. If it's in milliseconds, set + eventTimeInMillis to true. + ++---------------------+---------------------------------------------------------+------------+-----------------+ +| **Field** | **Description** | **Type** | **Notes** | ++=====================+=========================================================+============+=================+ +| field | The field to check on the event. | STRING | A field name | ++---------------------+---------------------------------------------------------+------------+-----------------+ +| eventTimeInMillis | Whether to convert the event value from milliseconds. | BOOLEAN | True or False | ++---------------------+---------------------------------------------------------+------------+-----------------+ +| seconds | The number of seconds. | NUMBER | A number | ++---------------------+---------------------------------------------------------+------------+-----------------+ + +- **Limit:** + + - Limit is the integer value and DMaaP Message Router will consumes + only that set of message which are specified in limit. + + .. code:: bash + + Suppose if we set limit=2, then only 2 sets of data will be consumed. + *Get \ **<<topicName>>/group/2?limit=4*** + Let us suppose if + **No of data available** = 4 + **Set limit** = 6 + i.e. limit>no of data + In this scenario all 4 sets of data will be consumed. + +- If limit is not passed with the url then by default limit is set to + 4096. + + .. code:: bash + + i.e. 4096 sets of data will be consumed. + **Timeout and Long Poll:** + +- Timeout is the integer value which will be treated by DMaaP Message + Router as time in millisecond. + + + +- Get + ++-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| `***http://localhost/DMaaP/dmaaprest/events/<<topicName>>/group/2?timeout=20000*** <http://localhost/DMaaP/dmaaprest/events/%3c%3ctopicName%3e%3e/group/2?timeout=20000>`__ | ++-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ + +- If there is no data available to be consumed, then DMaaP Message + Router will poll for the particular period of time specified in + timeout this mechanism is known as Long Poll. + +- If timeout is not passed with url then DMaaP Message Router will set + the value of timeout =10000 + +- i.e. if no set of data are available then DMaaP Message Router will + poll for 10000 ms. + +***Meta:*** + +- Meta is a Boolean value. + +- DMaaP Message Router reads the value of meta from + MRConfiguration.properties file at the time of startup. + +- If the value of meta is not null and if value of meta is one of these + values true, yes, on, 1, y, checked then DMaaP Message Router will + take meta flag as true, else it will be false. + +- If meta is set to true then consumer will get the value of message + offset along with message. + + |image1| + + .. |image1| image:: images/image1.png + +- **Pretty**: + +- Pretty is a Boolean value. + +- DMaaP Message Router reads the value of pretty from + MRConfiguration.properties file at the time of startup. + +- If the value of pretty is not null and if value of pretty is one of + these values true, yes, on, 1, y, checked then DMaaP Message Router + will take pretty flag as true, else it will be false. + +- If pretty is set to true then different sets of messages will be + printed in next line separated by comma (,). + + |image2| + + .. |image2| image:: images/image2.png + +- **Filter** + + - A filter examine an event and decides if it matches or doesn't. + - Filters are mainly used in rules to decide if the processing entries + should be executed on the given event. They're also used for settings, + and systems like the Graph Correlator re-use Highland Park's filter + mechanism to specify which alarms fit in a correlation. + - The standard library includes a number of simple filters. The Equals + filter, for example, compares a field's value with some other value + and returns true if the values match. + - The standard library also includes filter classes called And, Or, and + Not, so you can compose more complex filters. For example, written in + the standard JSON config format: + + +-----------------------------------------+ + | "filter":{ | + | "class":"And", | + | "filters": | + | [ | + | { "class":"Equals", "foo":"abc" }, | + | { "class":"Assigned", "field":"bar" } | + | ] | + | } | + +-----------------------------------------+ + + - This filter matches events in which the field "foo" has value "abc" and + the field "bar" is assigned to some value (as opposed to not being + present on the event). + - Filters are used by the consumers to filter out data and consume only + specific set of data which matches the conditions mentioned in filter. + - Filters can be passed as a query parameter by the consumer in consume + request as mentioned below: + - **http://localhost:8080/DMaaP/dmaaprest/events/DMaaP/consumergroup/mHOeNFY4XiWx4CBa?filter=\\{"class":"Equals","field":"email", "value":"`test@abc.com <mailto:test@att.com>`__" }** + - Filters can be applied only on data in JSON format i.e. if applied, filters will automatically ignore any non-json data. + - While consuming, request CONTENT\_TYPE is not relevant to filter. + + The MR API allows a subscriber pass a Highland Park filter as part of + the GET request. This will filter the stream of messages sent back to + the subscriber, but for this to work, there are some requirements: + + - The message payload must be JSON + + - Only a filter built from Highland Park's Standard Library may be + used. (The Cambria API server doesn't have access to plugged in + filters.) + + - The filter must be encoded properly in the URL path. + +Server-side filtering can also be setup in the Java client as illustrated below + +**Filtering Consumer** + +You can also provide a Highland Park filter to your consumer instance, +and this filter is passed on to the server in the GET request. One way +to create the filter is programmatically. In your code, instantiate a +filter from the Highland Park Standard Library Then create a String +representation of the filter using the FilterIo.write utility. This +String can then be passed to the Cambria client instance for use on the +server. + +Remember, only Highland Park standard library filter components can be +used -- no plug-ins are available in the Cambria server context. + + .. code:: bash + + package org.onap.sa.highlandPark.integration; + + import java.io.IOException; + + import java.util.UUID; + + import org.onap.nsa.cambria.client.CambriaClientFactory; + + import org.onap.nsa.cambria.client.CambriaConsumer; + + import org.onap.sa.highlandPark.processor.HpEvent; + + import org.onap.sa.highlandPark.stdlib.filters.FilterIo; + + import org.onap.sa.highlandPark.stdlib.filters.OneOf; + + public class ExampleFilteringConsumer + + { + + public static void main ( String[] args ) throws IOException, + InterruptedException + + { + + // Cambria clients take a set of 1 or more servers to use in round-robin + fashion. + + // If a server becomes unreachable, another in the group is used. + + final String + serverGroup="ueb01hydc.it.att.com,ueb02hydc.it.att.com,ueb03hydc.it.att.com"; + + // choose a topic + + final String topic = "TEST-TOPIC"; + + // Cambria clients can run in a cooperative group to handle high-volume + topics. + + // Here, we create a random group name, which means this client is not + re-startable. + + final String consumerGroup = UUID.randomUUID ().toString (); + + final String consumerId = "0"; + + // Cambria clients can sit in a tight loop on the client side, using a + long-poll + + // to wait for messages, and a limit to tell the server the most to send + at a time. + + final int longPollMs = 30\*1000; + + final int limit = -1; + + // The Cambria server can filter the returned message stream using + filters from the + + // Highland Park system. Here, we create a simple filter to test for the + AlarmID + + // value being one of the Mobility power alarms. + + final OneOf oneOf = new OneOf ( "AlarmId", kPowerAlarms ); + + // create the consumer + + final CambriaConsumer cc = CambriaClientFactory.createConsumer ( + serverGroup, topic, + + consumerGroup, consumerId, longPollMs, limit, FilterIo.write ( oneOf ) + ); + + // now loop reading messages. Note that cc.fetch() will wait in its HTTP + receive + + // method for up to 30 seconds (longPollMs) when nothing's available at + the server. + + long count = 0; + + while ( true ) + + { + + for ( String msg : cc.fetch () ) + + { + + System.out.println ( "" + (++count) + ": " + msg ); + + } + + } + + } + + private static final String[] kPowerAlarms = + + { + + "HUB COMMERCIAL POWER FAIL\_FWD", + + "HUB COMMERCIAL POWER FAIL", + + "RBS COMMERCIAL POWER FAIL - Fixed\_FWD", + + "RBS COMMERCIAL POWER FAIL\_FWD", + + "RBS COMMERCIAL POWER FAIL - No Generator\_FWD", + + "RBS COMMERCIAL POWER FAIL - Portable\_FWD", + + "RBS COMMERCIAL POWER FAIL - Shared\_FWD", + + "RBS COMMERCIAL POWER FAIL - Yes\_FWD", + + "RBS COMMERCIAL POWER FAIL - YES\_FWD", + + "RBS COMMERCIAL POWER FAIL - Fixed", + + "RBS COMMERCIAL POWER FAIL - No Generator", + + "RBS COMMERCIAL POWER FAIL - Portable", + + "RBS COMMERCIAL POWER FAIL - Shared", + + "RBS COMMERCIAL POWER FAIL - YES", + + "RBS COMMERCIAL POWER FAIL - Yes", + + "RBS COMMERCIAL POWER FAIL", + + "HUB COMMERCIAL POWER FAIL - Fixed", + + "HUB COMMERCIAL POWER FAIL - No Generator", + + "HUB COMMERCIAL POWER FAIL - Portable", + + "HUB COMMERCIAL POWER FAIL - Shared", + + "HUB COMMERCIAL POWER FAIL - Fixed\_FWD", + + "HUB COMMERCIAL POWER FAIL - No Generator\_FWD", + + "HUB COMMERCIAL POWER FAIL - Portable\_FWD", + + "HUB COMMERCIAL POWER FAIL - Shared\_FWD", + + }; + + } + + + +**Filter Builder** + + MR server-side filtering allows a consumer to filter the stream of + messages returned from the GET call. The following link provide details + of building some of the filter to illustrate Filter Builder. It is not + meant to cover and provide examples of every filter + diff --git a/docs/release-notes.rst b/docs/release-notes/release-notes.rst index f0bbf40..085652f 100644 --- a/docs/release-notes.rst +++ b/docs/release-notes/release-notes.rst @@ -1,18 +1,45 @@ .. This work is licensed under a Creative Commons Attribution 4.0 International License. +.. http://creativecommons.org/licenses/by/4.0 -DMAAP Release Notes -=================== -Version: 1.0.1 +Release Notes +============= + + + +Version: 1.1.4 -------------- -:Release Date: 2017-11-16 +:Release Date: 2018-04-18 **New Features** +This release fixes the packaging and security issues. + +**Bug Fixes** + NA +**Known Issues** + NA + +**Security Issues** + This release fixes the security vulnerabilities due to the opensourced libraries used in the project + + +**Upgrade Notes** + NA + +**Deprecation Notes** + +Version: 1.0.1 + +Release Date: 2017-11-16 + + +New Features: + - Pub/sub messaging metaphor to broaden data processing opportunities - A single solution for most event distribution needs to support a range of environments - Standardized topic names @@ -23,22 +50,15 @@ Version: 1.0.1 -**Bug Fixes** +Bug Fixes - `DMAAP-165 <https://jira.onap.org/browse/DMAAP-165>`_ Correct documentation rst file errors and warnings - `DMAAP-160 <https://jira.onap.org/browse/DMAAP-160>`_ DMaaP periodically loses connection to Kafka - `DMAAP-157 <https://jira.onap.org/browse/DMAAP-157>`_ SDC service models distribution fails - `DMAAP-151 <https://jira.onap.org/browse/DMAAP-151>`_ Fix docker image bug - `DMAAP-1 <https://jira.onap.org/browse/DMAAP-1>`_ MSO DB is not populated with the models from SDC -**Known Issues** +Known Issues - `DMAAP-164 <https://jira.onap.org/browse/DMAAP-164>`_ The dependency from kafka for zookeeper created issues when the vm is restarted - -**Security Issues** - N/A +Other -**Upgrade Notes** - N/A -**Deprecation Notes** - N/A -**Other** @@ -249,7 +249,7 @@ <javax-mail-version>1.5.0</javax-mail-version> <module.ajsc.namespace.name>dmaap</module.ajsc.namespace.name> <module.ajsc.namespace.version>v1</module.ajsc.namespace.version> - <ajscRuntimeVersion>3.0.5-oss</ajscRuntimeVersion> + <ajscRuntimeVersion>3.0.6-oss</ajscRuntimeVersion> <!-- This will be the Absolute Root of the Project and should contain NO Versioning --> @@ -332,7 +332,16 @@ <artifactId>dme2</artifactId> <version>3.1.200-oss</version> </dependency> - + <dependency> + <groupId>org.springframework</groupId> + <artifactId>spring-expression</artifactId> + <version>4.3.16.RELEASE</version> + </dependency> + <dependency> + <groupId>org.springframework</groupId> + <artifactId>spring-messaging</artifactId> + <version>4.1.9.RELEASE</version> + </dependency> <dependency> <groupId>com.sun.mail</groupId> <artifactId>javax.mail</artifactId> @@ -402,7 +411,23 @@ <dependency> <groupId>org.springframework</groupId> <artifactId>spring-webmvc</artifactId> - <version>3.2.18.RELEASE</version> + <version>4.3.15.RELEASE</version> + </dependency> + <dependency> + <groupId>org.springframework</groupId> + <artifactId>spring-core</artifactId> + <version>4.3.15.RELEASE</version> + </dependency> + <dependency> + <groupId>org.springframework</groupId> + <artifactId>spring-beans</artifactId> + <version>4.3.15.RELEASE</version> + </dependency> + + <dependency> + <groupId>org.springframework</groupId> + <artifactId>spring-context</artifactId> + <version>4.3.15.RELEASE</version> </dependency> <dependency> <groupId>commons-io</groupId> @@ -552,7 +577,7 @@ <dependency> <groupId>com.att.ajsc</groupId> <artifactId>ajsc-archetype-parent</artifactId> - <version>3.0.5-oss</version> + <version>3.0.6-oss</version> <type>pom</type> </dependency> @@ -684,13 +709,13 @@ <dependency> <groupId>com.att.ajsc</groupId> <artifactId>ajsc-runner</artifactId> - <version>3.0.5-oss</version> + <version>3.0.6-oss</version> <scope>runtime</scope> </dependency> <dependency> <groupId>com.att.ajsc</groupId> <artifactId>ajsc-core</artifactId> - <version>3.0.5-oss</version> + <version>3.0.6-oss</version> <scope>provided</scope> </dependency> </dependencies> diff --git a/src/main/config/ajsc-jetty.xml b/src/main/config/ajsc-jetty.xml index 7a42776..ee626bb 100644 --- a/src/main/config/ajsc-jetty.xml +++ b/src/main/config/ajsc-jetty.xml @@ -87,8 +87,8 @@ <New id="sslContextFactory" class="org.eclipse.jetty.util.ssl.SslContextFactory"> <Set name="KeyStorePath"><SystemProperty name="AJSC_HOME" />/bundleconfig/etc/keystore.jks</Set> - <Set name="KeyStorePassword">changeit</Set> - <Set name="KeyManagerPassword">changeit</Set> + <Set name="KeyStorePassword">Messaging for All</Set> + <Set name="KeyManagerPassword">Messaging for All</Set> </New> <Call id="sslConnector" name="addConnector"> <Arg> diff --git a/src/main/resources/docker-compose/LICENSE b/src/main/resources/docker-compose/LICENSE deleted file mode 100644 index e06d208..0000000 --- a/src/main/resources/docker-compose/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ -Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright {yyyy} {name of copyright owner} - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - diff --git a/src/main/resources/docker-compose/README.md b/src/main/resources/docker-compose/README.md deleted file mode 100644 index b8aaf8b..0000000 --- a/src/main/resources/docker-compose/README.md +++ /dev/null @@ -1,78 +0,0 @@ -[![Docker Pulls](https://img.shields.io/docker/pulls/wurstmeister/kafka.svg)](https://hub.docker.com/r/wurstmeister/kafka/) -[![Docker Stars](https://img.shields.io/docker/stars/wurstmeister/kafka.svg)](https://hub.docker.com/r/wurstmeister/kafka/) -[![](https://badge.imagelayers.io/wurstmeister/kafka:latest.svg)](https://imagelayers.io/?images=wurstmeister/kafka:latest) - -kafka-docker -============ - -Dockerfile for [Apache Kafka](http://kafka.apache.org/) - -The image is available directly from https://registry.hub.docker.com/ - -##Pre-Requisites - -- install docker-compose [https://docs.docker.com/compose/install/](https://docs.docker.com/compose/install/) -- modify the ```KAFKA_ADVERTISED_HOST_NAME``` in ```docker-compose.yml``` to match your docker host IP (Note: Do not use localhost or 127.0.0.1 as the host ip if you want to run multiple brokers.) -- if you want to customise any Kafka parameters, simply add them as environment variables in ```docker-compose.yml```, e.g. in order to increase the ```message.max.bytes``` parameter set the environment to ```KAFKA_MESSAGE_MAX_BYTES: 2000000```. To turn off automatic topic creation set ```KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'false'``` - -##Usage - -Start a cluster: - -- ```docker-compose up -d ``` - -Add more brokers: - -- ```docker-compose scale kafka=3``` - -Destroy a cluster: - -- ```docker-compose stop``` - -##Note - -The default ```docker-compose.yml``` should be seen as a starting point. By default each broker will get a new port number and broker id on restart. Depending on your use case this might not be desirable. If you need to use specific ports and broker ids, modify the docker-compose configuration accordingly, e.g. [docker-compose-single-broker.yml](https://github.com/wurstmeister/kafka-docker/blob/master/docker-compose-single-broker.yml): - -- ```docker-compose -f docker-compose-single-broker.yml up``` - -##Broker IDs - -If you don't specify a broker id in your docker-compose file, it will automatically be generated (see [https://issues.apache.org/jira/browse/KAFKA-1070](https://issues.apache.org/jira/browse/KAFKA-1070). This allows scaling up and down. In this case it is recommended to use the ```--no-recreate``` option of docker-compose to ensure that containers are not re-created and thus keep their names and ids. - - -##Automatically create topics - -If you want to have kafka-docker automatically create topics in Kafka during -creation, a ```KAFKA_CREATE_TOPICS``` environment variable can be -added in ```docker-compose.yml```. - -Here is an example snippet from ```docker-compose.yml```: - - environment: - KAFKA_CREATE_TOPICS: "Topic1:1:3,Topic2:1:1" - -```Topic 1``` will have 1 partition and 3 replicas, ```Topic 2``` will have 1 partition and 1 replica. - -##Advertised hostname - -You can configure the advertised hostname in different ways - -1. explicitly, using ```KAFKA_ADVERTISED_HOST_NAME``` -2. via a command, using ```HOSTNAME_COMMAND```, e.g. ```HOSTNAME_COMMAND: "route -n | awk '/UG[ \t]/{print $$2}'"``` - -When using commands, make sure you review the "Variable Substitution" section in [https://docs.docker.com/compose/compose-file/](https://docs.docker.com/compose/compose-file/) - -If ```KAFKA_ADVERTISED_HOST_NAME``` is specified, it takes presendence over ```HOSTNAME_COMMAND``` - -For AWS deployment, you can use the Metadata service to get the container host's IP: -``` -HOSTNAME_COMMAND=wget -t3 -T2 -qO- http://169.254.169.254/latest/meta-data/local-ipv4 -``` -Reference: http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-instance-metadata.html - -##Tutorial - -[http://wurstmeister.github.io/kafka-docker/](http://wurstmeister.github.io/kafka-docker/) - - - diff --git a/src/main/resources/docker-compose/broker-list.sh b/src/main/resources/docker-compose/broker-list.sh deleted file mode 100644 index 238c251..0000000 --- a/src/main/resources/docker-compose/broker-list.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/bin/bash -#******************************************************************************* -# ============LICENSE_START======================================================= -# org.onap.dmaap -# ================================================================================ -# Copyright © 2017 AT&T Intellectual Property. All rights reserved. -# ================================================================================ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============LICENSE_END========================================================= -# -# ECOMP is a trademark and service mark of AT&T Intellectual Property. -# -#******************************************************************************* - -CONTAINERS=$(docker ps | grep 9092 | awk '{print $1}') -BROKERS=$(for CONTAINER in $CONTAINERS; do docker port $CONTAINER 9092 | sed -e "s/0.0.0.0:/$HOST_IP:/g"; done) -echo $BROKERS | sed -e 's/ /,/g' diff --git a/src/main/resources/docker-compose/create-topics.sh b/src/main/resources/docker-compose/create-topics.sh deleted file mode 100644 index 4e46cd2..0000000 --- a/src/main/resources/docker-compose/create-topics.sh +++ /dev/null @@ -1,53 +0,0 @@ -#!/bin/bash -#******************************************************************************* -# ============LICENSE_START======================================================= -# org.onap.dmaap -# ================================================================================ -# Copyright © 2017 AT&T Intellectual Property. All rights reserved. -# ================================================================================ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============LICENSE_END========================================================= -# -# ECOMP is a trademark and service mark of AT&T Intellectual Property. -# -#******************************************************************************* - - -if [[ -z "$START_TIMEOUT" ]]; then - START_TIMEOUT=600 -fi - -start_timeout_exceeded=false -count=0 -step=10 -while netstat -lnt | awk '$4 ~ /:'$KAFKA_PORT'$/ {exit 1}'; do - echo "waiting for kafka to be ready" - sleep $step; - count=$(expr $count + $step) - if [ $count -gt $START_TIMEOUT ]; then - start_timeout_exceeded=true - break - fi -done - -if $start_timeout_exceeded; then - echo "Not able to auto-create topic (waited for $START_TIMEOUT sec)" - exit 1 -fi - -if [[ -n $KAFKA_CREATE_TOPICS ]]; then - IFS=','; for topicToCreate in $KAFKA_CREATE_TOPICS; do - echo "creating topics: $topicToCreate" - IFS=':' read -a topicConfig <<< "$topicToCreate" - JMX_PORT='' $KAFKA_HOME/bin/kafka-topics.sh --create --zookeeper $KAFKA_ZOOKEEPER_CONNECT --replication-factor ${topicConfig[2]} --partition ${topicConfig[1]} --topic "${topicConfig[0]}" - done -fi diff --git a/src/main/resources/docker-compose/docker-compose-single-broker.yml b/src/main/resources/docker-compose/docker-compose-single-broker.yml deleted file mode 100644 index 4d8e9f5..0000000 --- a/src/main/resources/docker-compose/docker-compose-single-broker.yml +++ /dev/null @@ -1,16 +0,0 @@ -version: '2' -services: - zookeeper: - image: wurstmeister/zookeeper - ports: - - "2181:2181" - kafka: - build: . - ports: - - "9092:9092" - environment: - KAFKA_ADVERTISED_HOST_NAME: 192.168.99.100 - KAFKA_CREATE_TOPICS: "test:1:1" - KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 - volumes: - - /var/run/docker.sock:/var/run/docker.sock diff --git a/src/main/resources/docker-compose/docker-compose.yml b/src/main/resources/docker-compose/docker-compose.yml index 74ae4eb..44d5ce4 100644 --- a/src/main/resources/docker-compose/docker-compose.yml +++ b/src/main/resources/docker-compose/docker-compose.yml @@ -4,26 +4,26 @@ services: image: wurstmeister/zookeeper ports: - "2181:2181" + kafka: - build: . - ports: - - "9092:9092" - environment: - KAFKA_ADVERTISED_HOST_NAME: 172.18.0.1 - KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + image: wurstmeister/kafka:0.8.1.1-1 + ports: + - "9092:9092" + environment: + HOSTNAME_COMMAND: "hostname -i" + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + volumes: + - /var/run/docker.sock:/var/run/docker.sock + depends_on: + - zookeeper - volumes: - - /var/run/docker.sock:/var/run/docker.sock - depends_on: - - zookeeper - dmaap: - image: nexus3.onap.org:10001/onap/dmaap/dmaap-mr + image: nexus3.onap.org:10001/onap/dmaap/dmaap-mr:1.1.4 ports: - "3904:3904" - "3905:3905" volumes: - - /var/tmp/MsgRtrApi.properties:/appl/dmaapMR1/bundleconfig/etc/appprops/MsgRtrApi.properties + - /var/tmp/MsgRtrApi.properties:/appl/dmaapMR1/bundleconfig/etc/appprops/MsgRtrApi.properties depends_on: - zookeeper - - kafka + - kafka diff --git a/src/main/resources/docker-compose/download-kafka.sh b/src/main/resources/docker-compose/download-kafka.sh deleted file mode 100644 index 6b586cd..0000000 --- a/src/main/resources/docker-compose/download-kafka.sh +++ /dev/null @@ -1,29 +0,0 @@ -#!/bin/sh -#******************************************************************************* -# ============LICENSE_START======================================================= -# org.onap.dmaap -# ================================================================================ -# Copyright © 2017 AT&T Intellectual Property. All rights reserved. -# ================================================================================ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============LICENSE_END========================================================= -# -# ECOMP is a trademark and service mark of AT&T Intellectual Property. -# -#******************************************************************************* - -#mirror=$(curl --stderr /dev/null https://www.apache.org/dyn/closer.cgi\?as_json\=1 | jq -r '.preferred') -#url="${mirror}kafka/${KAFKA_VERSION}/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz" -#wget -q "${url}" -O "/tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz" - -url="http://archive.apache.org/dist/kafka/${KAFKA_VERSION}/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz" -wget -q "${url}" -O "/tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz"
\ No newline at end of file diff --git a/src/main/resources/docker-compose/start-kafka-shell.sh b/src/main/resources/docker-compose/start-kafka-shell.sh deleted file mode 100644 index 025259e..0000000 --- a/src/main/resources/docker-compose/start-kafka-shell.sh +++ /dev/null @@ -1,23 +0,0 @@ -#!/bin/bash -#******************************************************************************* -# ============LICENSE_START======================================================= -# org.onap.dmaap -# ================================================================================ -# Copyright © 2017 AT&T Intellectual Property. All rights reserved. -# ================================================================================ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============LICENSE_END========================================================= -# -# ECOMP is a trademark and service mark of AT&T Intellectual Property. -# -#******************************************************************************* -docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -e HOST_IP=$1 -e ZK=$2 -i -t wurstmeister/kafka /bin/bash |