summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorefiacor <fiachra.corcoran@est.tech>2021-07-23 19:15:53 +0100
committerefiacor <fiachra.corcoran@est.tech>2021-08-09 17:22:44 +0100
commit0beea783cef8a84d8bc2655ea678e00d459cc831 (patch)
treeafa6b459a7f46910566dfce2d39a30ebe84d60e2
parent49b2a82292f8be3b73fe7460573260d47b5c2b98 (diff)
[DMAAP-MR] Remove redundant data
Signed-off-by: efiacor <fiachra.corcoran@est.tech> Change-Id: I56a8417f72d892705230e94f079db3024170e884 Issue-ID: DMAAP-1638
-rw-r--r--antBuild/build.xml118
-rw-r--r--bundleconfig-csi/README.txt3
-rw-r--r--bundleconfig-csi/RELEASE_NOTES.txt1
-rw-r--r--bundleconfig-csi/etc/appprops/AAFUserRoles.properties26
-rw-r--r--bundleconfig-csi/etc/appprops/PostProcessorInterceptors.properties24
-rw-r--r--bundleconfig-csi/etc/appprops/PreProcessorInterceptors.properties24
-rw-r--r--bundleconfig-csi/etc/appprops/app-intercepts.properties26
-rw-r--r--bundleconfig-csi/etc/appprops/caet.properties21
-rw-r--r--bundleconfig-csi/etc/appprops/methodMapper.properties44
-rw-r--r--bundleconfig-csi/etc/service-file-monitor.properties26
-rw-r--r--bundleconfig-csi/etc/sysprops/sys-props.properties155
-rw-r--r--bundleconfig-csi/swm-var-ajsc.txt4
-rw-r--r--bundleconfig-csi/symlinks.txt6
-rw-r--r--demo/deploy.sh38
-rw-r--r--demo/docker_files/Dockerfile22
-rw-r--r--demo/docker_files/Dockerfile-local22
-rw-r--r--demo/docker_files/__MsgRtrApi.properties141
-rw-r--r--demo/docker_files/__docker-compose.yml67
-rw-r--r--demo/docker_files/apikey-APPC1.key8
-rw-r--r--demo/docker_files/apikey-PORTAL1.key8
-rw-r--r--demo/docker_files/apikey-PORTALAPP1.key8
-rw-r--r--demo/docker_files/apikey-PORTALDBC1.key8
-rw-r--r--demo/docker_files/apikey-PORTALPOL1.key8
-rw-r--r--demo/docker_files/apikey-PORTALSDC1.key8
-rw-r--r--demo/docker_files/apikey-PORTALVID1.key8
-rw-r--r--demo/docker_files/apikey-SDC1.key8
-rw-r--r--demo/docker_files/broker-list.sh5
-rw-r--r--demo/docker_files/cadi.properties20
-rw-r--r--demo/docker_files/create-topics.sh32
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/.kafka_cleanshutdown0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/.lock0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/APPC-TEST1-0/00000000000000000000.index0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/APPC-TEST1-0/00000000000000000000.log0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-INBOX-0/00000000000000000000.index0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-INBOX-0/00000000000000000000.log0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-APP1-0/00000000000000000000.index0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-APP1-0/00000000000000000000.log0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-DBC1-0/00000000000000000000.index0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-DBC1-0/00000000000000000000.log0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-POL1-0/00000000000000000000.index0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-POL1-0/00000000000000000000.log0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-SDC1-0/00000000000000000000.index0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-SDC1-0/00000000000000000000.log0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-VID1-0/00000000000000000000.index0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-VID1-0/00000000000000000000.log0
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/recovery-point-offset-checkpoint9
-rw-r--r--demo/docker_files/data-kafka/kafka-logs/replication-offset-checkpoint9
-rw-r--r--demo/docker_files/data-zookeeper/version-2/log.01bin67108880 -> 0 bytes
-rw-r--r--demo/docker_files/data-zookeeper/version-2/log.0103bin67108880 -> 0 bytes
-rw-r--r--demo/docker_files/data-zookeeper/version-2/log.0125bin67108880 -> 0 bytes
-rw-r--r--demo/docker_files/data-zookeeper/version-2/log.0175bin67108880 -> 0 bytes
-rw-r--r--demo/docker_files/data-zookeeper/version-2/log.01b1bin67108880 -> 0 bytes
-rw-r--r--demo/docker_files/data-zookeeper/version-2/log.100000001bin5120016 -> 0 bytes
-rw-r--r--demo/docker_files/data-zookeeper/version-2/log.200000001bin5120016 -> 0 bytes
-rw-r--r--demo/docker_files/download-kafka.sh5
-rw-r--r--demo/docker_files/dump_mr_state.sh10
-rw-r--r--demo/docker_files/keystore.jksbin3659 -> 0 bytes
-rw-r--r--demo/docker_files/mykey27
-rw-r--r--demo/docker_files/preconfigure-ecomp-keystopics.sh191
-rw-r--r--demo/docker_files/start-kafka.sh69
-rw-r--r--demo/docker_files/state-20170301.tar.gzbin212717 -> 0 bytes
-rw-r--r--demo/docker_files/subscriber.sh4
-rw-r--r--demo/docker_files/tests/data.2.2.json1
-rw-r--r--demo/docker_files/tests/data.2.5.json1
-rw-r--r--demo/docker_files/tests/data.3.1.txt1
-rw-r--r--demo/docker_files/tests/data.3.3.json1
-rw-r--r--demo/docker_files/tests/key.req1
-rw-r--r--demo/docker_files/tests/out/1.1.out5
-rw-r--r--demo/docker_files/tests/out/2.1.out8
-rw-r--r--demo/docker_files/tests/out/2.2.out13
-rw-r--r--demo/docker_files/tests/out/2.3.out6
-rw-r--r--demo/docker_files/tests/out/2.4.out13
-rw-r--r--demo/docker_files/tests/out/2.5.out4
-rw-r--r--demo/docker_files/tests/out/2.6.out1
-rw-r--r--demo/docker_files/tests/out/3.1.out4
-rw-r--r--demo/docker_files/tests/out/3.2.out1
-rw-r--r--demo/docker_files/tests/out/3.3.out4
-rw-r--r--demo/docker_files/tests/out/3.4.out1
-rw-r--r--demo/docker_files/tests/regress.sh113
-rw-r--r--demo/docker_files/tests/test.sh50
-rw-r--r--demo/docker_files/uebapikey-sdc8
-rw-r--r--etc/logstash_cambria_template.conf36
-rw-r--r--oom-topics/data-kafka/kafka-logs/.kafka_cleanshutdown0
-rw-r--r--oom-topics/data-kafka/kafka-logs/.lock0
-rw-r--r--oom-topics/data-kafka/kafka-logs/APPC-TEST1-0/00000000000000000000.index0
-rw-r--r--oom-topics/data-kafka/kafka-logs/APPC-TEST1-0/00000000000000000000.log0
-rw-r--r--oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-INBOX-0/00000000000000000000.index0
-rw-r--r--oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-INBOX-0/00000000000000000000.log0
-rw-r--r--oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-APP1-0/00000000000000000000.index0
-rw-r--r--oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-APP1-0/00000000000000000000.log0
-rw-r--r--oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-DBC1-0/00000000000000000000.index0
-rw-r--r--oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-DBC1-0/00000000000000000000.log0
-rw-r--r--oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-POL1-0/00000000000000000000.index0
-rw-r--r--oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-POL1-0/00000000000000000000.log0
-rw-r--r--oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-SDC1-0/00000000000000000000.index0
-rw-r--r--oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-SDC1-0/00000000000000000000.log0
-rw-r--r--oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-VID1-0/00000000000000000000.index0
-rw-r--r--oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-VID1-0/00000000000000000000.log0
-rw-r--r--oom-topics/data-kafka/kafka-logs/recovery-point-offset-checkpoint9
-rw-r--r--oom-topics/data-kafka/kafka-logs/replication-offset-checkpoint9
-rw-r--r--oom-topics/data-zookeeper/version-2/log.01bin67108880 -> 0 bytes
-rw-r--r--oom-topics/data-zookeeper/version-2/log.0103bin67108880 -> 0 bytes
-rw-r--r--oom-topics/data-zookeeper/version-2/log.0125bin67108880 -> 0 bytes
-rw-r--r--oom-topics/data-zookeeper/version-2/log.0175bin67108880 -> 0 bytes
-rw-r--r--oom-topics/data-zookeeper/version-2/log.01b1bin67108880 -> 0 bytes
-rw-r--r--oom-topics/data-zookeeper/version-2/log.100000001bin5120016 -> 0 bytes
-rw-r--r--oom-topics/data-zookeeper/version-2/log.200000001bin5120016 -> 0 bytes
-rw-r--r--pom.xml399
-rw-r--r--services/README.txt8
-rw-r--r--src/main/config/ajsc-chef.jksbin5229 -> 0 bytes
-rw-r--r--src/main/config/ajscJetty.jksbin3685 -> 0 bytes
-rw-r--r--src/main/config/keyfile27
-rw-r--r--src/main/config/keyfile127
-rw-r--r--src/main/config/keyfile227
-rw-r--r--src/main/config/keyfile_old27
-rw-r--r--src/main/java/org/onap/dmaap/dmf/mr/constants/CambriaConstants.java3
-rw-r--r--src/main/resources/docker/startup.sh3
-rw-r--r--src/main/resources/images/attLogo.gifbin1885 -> 0 bytes
-rw-r--r--src/main/resources/images/att_vt_1cp_grd_rev.gifbin5238 -> 0 bytes
-rw-r--r--src/main/resources/templates/hello.html9
-rw-r--r--src/main/test/com/att/nsa/dmaap/DummyTest.java45
-rw-r--r--src/test/java/org/onap/dmaap/mr/cambria/embed/EmbedConfigurationReader.java3
-rw-r--r--src/test/java/org/onap/dmaap/mr/cambria/embed/KafkaLocal.java17
-rw-r--r--src/test/java/org/onap/dmaap/mr/cambria/embed/ZooKeeperLocal.java55
-rw-r--r--src/test/resources/MsgRtrApi.properties4
125 files changed, 156 insertions, 2001 deletions
diff --git a/antBuild/build.xml b/antBuild/build.xml
index 18ba18a..eab8052 100644
--- a/antBuild/build.xml
+++ b/antBuild/build.xml
@@ -4,6 +4,7 @@
org.onap.dmaap
================================================================================
Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ Modification copyright (C) 2021 Nordix Foundation.
================================================================================
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
@@ -18,106 +19,15 @@
============LICENSE_END=========================================================
ECOMP is a trademark and service mark of AT&T Intellectual Property.
-
+
-->
<project>
- <target name="runLocal">
- <java dir="${basedir}" fork="yes" newenvironment="true" failonerror="true" classname="com.att.ajsc.runner.Runner">
- <classpath path="${classpath}:${basedir}/ajsc-shared-config/etc:${runAjscHome}/lib/ajsc-runner-${ajscRuntimeVersion}.jar" />
-
- <!-- Windows Users may need to add a jvmarg arg to create a temp directory properly. -->
- <!-- <jvmarg value="-Djava.io.tmpdir=C:/yourTempDirectory"/> -->
-
- <!-- Uncomment the following 2 jvmarg values to enable Remote Debugging.
- for more information on how to setup Remote Debugger -->
- <!-- <jvmarg value="-Xdebug" /> -->
- <!-- <jvmarg value="-Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=5432" /> -->
-
- <jvmarg value="-XX:MaxPermSize=512m" />
- <jvmarg value="-Xmx1024m" />
-
- <!-- Main ajsc Variables below (Variables necessary for proper startup of AJSC) -->
- <env key="AJSC_HOME" value="${runAjscHome}" />
- <sysproperty key="AJSC_HOME" value="${runAjscHome}" />
- <!-- you may specify any external location for AJSC_CONF_HOME where etc folder
- & all other configs can be found under it. If not specified, it will default to AJSC_HOME -->
- <sysproperty key="AJSC_CONF_HOME" value="${basedir}/bundleconfig-local" />
- <sysproperty key="AJSC_SHARED_CONFIG" value="${basedir}/ajsc-shared-config" />
-
- <!-- Location of logback.xml file used for logging configurations. Please, note, when deploying a service
- to either CSI or NON-CSI environment, this system property will be set in sys-props.properties file. We
- are setting it here for running locally due to the ease of use of maven variable for basedir. -->
- <sysproperty key="logback.configurationFile" value="${basedir}/ajsc-shared-config/etc/logback.xml" />
-
- <!-- Setting system properties for the AJSC external libs and properties folders below. When deploying to
- a node, these properties will be set within the bundleconfig/etc/sysprops/sys-props.properties file.
- However, when running locally, the ${basedir} substitution works more efficiently in this manner. -->
- <sysproperty key="AJSC_EXTERNAL_LIB_FOLDERS" value="${basedir}/target/commonLibs" />
- <sysproperty key="AJSC_EXTERNAL_PROPERTIES_FOLDERS" value="${basedir}/ajsc-shared-config/etc" />
-
- <!-- End of Main ajsc Variables below (Variables necessary for proper startup of AJSC) -->
-
- <!-- Uncomment the following line to add oauthentication to your Service -->
- <!-- <sysproperty key="spring.profiles.active" value="oauth" /> -->
-
- <!-- If using Cassandra as Database, Enter the ip/host and port below based on your known configuration -->
- <!-- <sysproperty key="cassandra.ip" value="hostname" /> -->
- <!-- <sysproperty key="cassandra.port" value="9042" /> -->
-
- <!-- The APP_SERVLET_URL_PATTERN variable is defaulted to "/services" within
- the initial configuration of the AJSC. If you are changing the CamelServlet
- Filter within the ajsc-override-web.xml, you should use that url-pattern
- here. This is necessary to properly register your service with dme2. An empty
- value, "", is used when NO value is wanted (url-pattern would be /* for
- CamelServlet Filter) -->
- <!-- As of 4.5.1, this property is no longer needed -->
- <!-- <sysproperty key="APP_SERVLET_URL_PATTERN" value="/services" /> -->
-
- <!-- GRM/DME2 System Properties below -->
- <sysproperty key="AJSC_SERVICE_NAMESPACE" value="${module.ajsc.namespace.name}" />
- <sysproperty key="AJSC_SERVICE_VERSION" value="${module.ajsc.namespace.version}" />
- <sysproperty key="SOACLOUD_SERVICE_VERSION" value="${project.version}" />
- <!-- End of GRM/DME2 System Property Variables -->
-
- <!-- The following server.port variable was necessary for the proper registration
- of the AJSC to dme2. This value may still need to be used if the Developer is
- hardcoding their port (example: 8080). Then, the server.port value="8080".
- The default functionality for the AJSC is to use EPHEMERAL ports. In this
- case, you do NOT need to set the server.port value. The AJSC will find the
- proper port value and register to dme2 correctly -->
- <!-- <sysproperty key="server.port" value="${serverPort}" /> -->
-
- <!-- Command Line Arguments to add to the java command. Here, you
- can specify the port as well as the Context you want your service
- to run in. Use context=/ to run in an unnamed Context (Root Context).
- The default configuration of the AJSC is to run under the /ajsc Context.
- Setting the port here can aid during the development phase of your
- service. However, you can leave this argument out entirely, and the
- AJSC will default to using an Ephemeral port. -->
- <arg line="context=/ port=${serverPort} sslport=${sslport}" />
- </java>
- </target>
<target name="prep_home_directory_for_swm_pkgcreate">
<!-- These tasks are copying contents from the installHomeDirectory into
the eventual $AJSC_HOME directory for running locally and soa cloud installation -->
<echo message="ENTERING 'prep_home_directory_for_swm_pkgcreate' ant tasks" />
- <!-- Please, NOTE: The ajsc-archetype is setup for a default CSI Env deployment. If you are deploying to a CSI Env,
- you should NOT have to change anything within this build file. However, if you are NOT deploying to a CSI Env,
- you should comment OUT the CSI related portion of this build.xml. -->
-
- <!-- The following code snippet is copying the bundleconfig-csi directory to the proper installation/bundleconfig directory
- used in CSI envs. If you are NOT installing to a CSI node, you should comment out (or delete) the following snippet,
- and uncomment the NON-CSI copy task to copy EVERYTHING to the installation/bundleconfig directory. -->
-
- <!-- CSI related bundleconfig copy task. If you are NOT deploying to a CSI Env, please COMMENT OUT or delete the following
- copy task code snippet. -->
- <!--<copy toDir="${basedir}/target/swm/package/nix/dist_files${distFilesRoot}/bundleconfig" failonerror="true">
- <fileset dir="${basedir}/bundleconfig-csi" />
- </copy>-->
- <!-- End of CSI related bundleconfig copy task -->
-
<!-- NOTE: If you are NOT deploying to CSI environment, and you are NOT using an AJSC_SHARED_CONFIG location on a
node, you should go ahead and copy EVERYTHING from bundleconfig and ajsc-shared-config (logback.xml) directory
to utilize proper logging from logback.xml. Simply, uncomment the following code snippet below to copy EVERYTHING and
@@ -131,18 +41,12 @@
</copy>
<!-- End of NON-CSI related build copy task. -->
- <!-- Copying any zips (deployment packages) to $AJSC_HOME/services for
+ <!-- Copying any zips (deployment packages) to $AJSC_HOME/services for
auto-deployment -->
<copy toDir="${basedir}/target/swm/package/nix/dist_files${distFilesRoot}/services" failonerror="false">
<fileset dir="${basedir}/services" includes="*.zip" />
</copy>
- <!-- Copying runtimeEnvironment zip file to $AJSC_HOME/runtime and renaming runtimeEnvironment.zip for
- proper auto-deployment of ajsc services. -->
- <!--<copy tofile="${basedir}/target/swm/package/nix/dist_files${distFilesRoot}/runtime/runtimeEnvironment.zip">
- <fileset dir="target" includes="*-runtimeEnvironment.zip" />
- </copy>-->
-
<!-- Copying dependencies from the service project (not provided by AJSC
Container) to the $AJSC_HOME/extJars folder to be accessible on the classpath -->
<copy toDir="${basedir}/target/swm/package/nix/dist_files${distFilesRoot}/extJars" failonerror="false">
@@ -155,22 +59,6 @@
starts up. -->
<mkdir dir="${basedir}/target/swm/package/nix/dist_files${distFilesRoot}/extApps" />
- <!-- Copying any extra wars to $AJSC_HOME/extApps to be deployed within
- AJSC -->
- <copy toDir="${basedir}/target/swm/package/nix/dist_files${distFilesRoot}/extApps" failonerror="false">
- <fileset dir="${basedir}/src/main/resources/extApps" includes="*" />
- </copy>
-
- <!-- staticContent folder is for serving static content within an ajsc service. Any static content to be served
- will be copyied to the ultimate $AJSC_HOME/staticContent folder and can be served with the att-static-content
- camel component. -->
- <!-- Uncomment the following snippet to copy items from staticContent folder to ultimate $AJSC_HOME/staticConent -->
- <!-- <copy
- toDir="${basedir}/target/swm/package/nix/dist_files${distFilesRoot}/staticContent"
- failonerror="false">
- <fileset dir="${basedir}/staticContent" includes="**/**" />
- </copy> -->
-
<!-- Copying extra jar files that have been labeled as dependencies in service project
to /extJars folder to be made available on the classpath for your service -->
<copy toDir="${basedir}/target/swm/package/nix/dist_files${distFilesRoot}/extJars" failonerror="false">
diff --git a/bundleconfig-csi/README.txt b/bundleconfig-csi/README.txt
deleted file mode 100644
index f97fe73..0000000
--- a/bundleconfig-csi/README.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-The bundleconfig-csi directory contains the necessary configuration files to be included with your AJSC bundle when deploying
-to a CSI env. This file is NOT used during local development. For local development, all files are read from the bundleconfig-local
-folder. \ No newline at end of file
diff --git a/bundleconfig-csi/RELEASE_NOTES.txt b/bundleconfig-csi/RELEASE_NOTES.txt
deleted file mode 100644
index a567d9b..0000000
--- a/bundleconfig-csi/RELEASE_NOTES.txt
+++ /dev/null
@@ -1 +0,0 @@
-Place Release Notes here to provide updated Release information for CSTEM to better help manage your service in the CSI environment. \ No newline at end of file
diff --git a/bundleconfig-csi/etc/appprops/AAFUserRoles.properties b/bundleconfig-csi/etc/appprops/AAFUserRoles.properties
deleted file mode 100644
index 18cd07d..0000000
--- a/bundleconfig-csi/etc/appprops/AAFUserRoles.properties
+++ /dev/null
@@ -1,26 +0,0 @@
-###############################################################################
-# ============LICENSE_START=======================================================
-# org.onap.dmaap
-# ================================================================================
-# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-#
-# ECOMP is a trademark and service mark of AT&T Intellectual Property.
-#
-###############################################################################
-#If using AAF for Role based authentication/authorization, define your routes/services which will utilize AAF. The AJSC will
-#read this file and protect the routes given with the AAF role defined.
-#For more information on AAF role based permissions, please contact AAF team.
-
-
diff --git a/bundleconfig-csi/etc/appprops/PostProcessorInterceptors.properties b/bundleconfig-csi/etc/appprops/PostProcessorInterceptors.properties
deleted file mode 100644
index a39131c..0000000
--- a/bundleconfig-csi/etc/appprops/PostProcessorInterceptors.properties
+++ /dev/null
@@ -1,24 +0,0 @@
-###############################################################################
-# ============LICENSE_START=======================================================
-# org.onap.dmaap
-# ================================================================================
-# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-#
-# ECOMP is a trademark and service mark of AT&T Intellectual Property.
-#
-###############################################################################
-#This properties file is for defining any PostProcessorInterceptors that have been created for your AJSC service.
-
-/**=ajsc.beans.interceptors.CsiPostInterceptor
diff --git a/bundleconfig-csi/etc/appprops/PreProcessorInterceptors.properties b/bundleconfig-csi/etc/appprops/PreProcessorInterceptors.properties
deleted file mode 100644
index 966bae8..0000000
--- a/bundleconfig-csi/etc/appprops/PreProcessorInterceptors.properties
+++ /dev/null
@@ -1,24 +0,0 @@
-###############################################################################
-# ============LICENSE_START=======================================================
-# org.onap.dmaap
-# ================================================================================
-# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-#
-# ECOMP is a trademark and service mark of AT&T Intellectual Property.
-#
-###############################################################################
-#This properties file is for defining any PreProcessorInterceptors that have been created for your AJSC service.
-
-/**=com.att.ajsc.csi.restmethodmap.RestMethodMapInterceptor,ajsc.beans.interceptors.CsiPreInterceptor
diff --git a/bundleconfig-csi/etc/appprops/app-intercepts.properties b/bundleconfig-csi/etc/appprops/app-intercepts.properties
deleted file mode 100644
index 5c38596..0000000
--- a/bundleconfig-csi/etc/appprops/app-intercepts.properties
+++ /dev/null
@@ -1,26 +0,0 @@
-###############################################################################
-# ============LICENSE_START=======================================================
-# org.onap.dmaap
-# ================================================================================
-# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-#
-# ECOMP is a trademark and service mark of AT&T Intellectual Property.
-#
-###############################################################################
-#This is where all your application intercept strategies must be configured. AJSC reads this property file and adds
-#the list of intercepts specified here to the camel context. This can be useful for accessing every exchange object transferred from/to
-#each endpoint in the request/response flow and can allow for more precise debugging and/or processing of the exchange.
-
-#e.g.
diff --git a/bundleconfig-csi/etc/appprops/caet.properties b/bundleconfig-csi/etc/appprops/caet.properties
deleted file mode 100644
index 0757a7f..0000000
--- a/bundleconfig-csi/etc/appprops/caet.properties
+++ /dev/null
@@ -1,21 +0,0 @@
-###############################################################################
-# ============LICENSE_START=======================================================
-# org.onap.dmaap
-# ================================================================================
-# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-#
-# ECOMP is a trademark and service mark of AT&T Intellectual Property.
-#
-###############################################################################
diff --git a/bundleconfig-csi/etc/appprops/methodMapper.properties b/bundleconfig-csi/etc/appprops/methodMapper.properties
deleted file mode 100644
index dcfc63a..0000000
--- a/bundleconfig-csi/etc/appprops/methodMapper.properties
+++ /dev/null
@@ -1,44 +0,0 @@
-// Json object holds the method mapping.Update the JSON object with the proper route to logical mapping based
-// on the example provided below :
-// "helloWorld" = Service Name
-// "method" = http method
-// "url" = the url component from the route
-// "logicalName"= When a combination of method and url from the route matches the json object ,
-// the logical name is put in the http header as "x-CSI-ServiceName" and "x-CSI-MethodName
-// ""dme2url"= if provided it register the endpoint to GRM, it is optional.
-
-{
- "helloWorld": [
- {
- "method": "get",
- "url": "/rest/dmaap/v1/helloWorld",
- "logicalName": "GetMethod(Logical)"
- },
- {
- "method": "get",
- "url": "/services/dmaap/v1/jaxrsExample/jaxrs-services/echo/{input}",
- "logicalName": "GetJaxrsExampleEcho(Logical)",
- "dme2url": "/services/dmaap/v1/jaxrsExample"
- },
- {
- "method": "get",
- "url": "/services/dmaap/v1/jaxrsExample/jaxrs-services/property/{fileName}/{input}",
- "logicalName": "GetJaxrsExampleProperty(Logical)",
- "dme2url": "/services/dmaap/v1/jaxrsExample"
- }
- ],
- "errormessage":
- [
- {
- "method": "get",
- "url": "/services/dmaap/v1/jaxrsExample/errormessage/emls",
- "logicalName": "setCAETHeaders(Logical)"
- },
- {
- "method": "get",
- "url": "/services/dmaap/v1/errorMessageLookupService2",
- "logicalName": "setCAETHeaders(Logical)"
- }
-
- ]
-} \ No newline at end of file
diff --git a/bundleconfig-csi/etc/service-file-monitor.properties b/bundleconfig-csi/etc/service-file-monitor.properties
deleted file mode 100644
index 9596440..0000000
--- a/bundleconfig-csi/etc/service-file-monitor.properties
+++ /dev/null
@@ -1,26 +0,0 @@
-###############################################################################
-# ============LICENSE_START=======================================================
-# org.onap.dmaap
-# ================================================================================
-# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-#
-# ECOMP is a trademark and service mark of AT&T Intellectual Property.
-#
-###############################################################################
-#This properties file is used for defining any file paths that you would like to have monitored.
-
-#user config file paths go here as (key-value) pairs. File extension will have to be either .properties or .json in order for it to be monitored.
-#file1=/opt/app/yourService/file.properties
-
diff --git a/bundleconfig-csi/etc/sysprops/sys-props.properties b/bundleconfig-csi/etc/sysprops/sys-props.properties
deleted file mode 100644
index d637204..0000000
--- a/bundleconfig-csi/etc/sysprops/sys-props.properties
+++ /dev/null
@@ -1,155 +0,0 @@
-###############################################################################
-# ============LICENSE_START=======================================================
-# org.onap.dmaap
-# ================================================================================
-# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-#
-# ECOMP is a trademark and service mark of AT&T Intellectual Property.
-#
-###############################################################################
-#This file is used for defining AJSC system properties for different configuration schemes and is necessary for the AJSC to run properly.
-#The sys-props.properties file is used for running locally. The template.sys-props.properties file will be used when deployed
-#to a SOA/CSI Cloud node. For more information, please goto wiki link:
-
-#AJSC System Properties. The following properties are required for ALL AJSC services.
-
-#For Cadi Authorization, use value="authentication-scheme-1
-CadiAuthN=authentication-scheme-1
-
-#For Basic Authorization, use value="authentication-scheme-1
-authN=authentication-scheme-2
-
-#Persistence used for AJSC meta-data storage. For most environments, "file" should be used.
-ajscPersistence=file
-
-# If using hawtio for local development, these properties will allow for faster server startup and usage for local development
-hawtio.authenticationEnabled=false
-hawtio.config.pullOnStartup=false
-
-#Removes the extraneous restlet console output
-org.restlet.engine.loggerFacadeClass=org.restlet.ext.slf4j.Slf4jLoggerFacade
-
-#server.host property to be enabled for local DME2 related testing
-#server.host=<Your network IP address>
-
-#Enable/disable SSL (values=true/false).
-enableSSL=true
-
-#Enable/disable OSGI
-isOSGIEnable=false
-
-#Generate/Skip api docs
-isApiDoc=false
-
-#Jetty Container ThreadCount Configuration Variables
-AJSC_JETTY_ThreadCount_MIN=${ChangeMe_AJSC_JETTY_ThreadCount_MIN}
-AJSC_JETTY_ThreadCount_MAX=${ChangeMe_AJSC_JETTY_ThreadCount_MAX}
-AJSC_JETTY_IDLETIME_MAX=${ChangeMe_AJSC_JETTY_IDLETIME_MAX}
-
-#Camel Context level default threadPool Profile configuration
-CAMEL_POOL_SIZE=${ChangeMe_CAMEL_POOL_SIZE}
-CAMEL_MAX_POOL_SIZE=${ChangeMe_CAMEL_MAX_POOL_SIZE}
-CAMEL_KEEP_ALIVE_TIME=${ChangeMe_CAMEL_KEEP_ALIVE_TIME}
-CAMEL_MAX_QUEUE_SIZE=${ChangeMe_CAMEL_MAX_QUEUE_SIZE}
-
-#Restlet Component Default Properties
-RESTLET_COMPONENT_CONTROLLER_DAEMON=true
-RESTLET_COMPONENT_CONTROLLER_SLEEP_TIME_MS=${ChangeMe_RESTLET_COMPONENT_CONTROLLER_SLEEP_TIME_MS}
-RESTLET_COMPONENT_MIN_THREADS=${ChangeMe_RESTLET_COMPONENT_MIN_THREADS}
-RESTLET_COMPONENT_MAX_THREADS=${ChangeMe_RESTLET_COMPONENT_MAX_THREADS}
-RESTLET_COMPONENT_LOW_THREADS=${ChangeMe_RESTLET_COMPONENT_LOW_THREADS}
-RESTLET_COMPONENT_MAX_QUEUED=${ChangeMe_RESTLET_COMPONENT_MAX_QUEUED}
-RESTLET_COMPONENT_MAX_CONNECTIONS_PER_HOST=${ChangeMe_RESTLET_COMPONENT_MAX_CONNECTIONS_PER_HOST}
-RESTLET_COMPONENT_MAX_TOTAL_CONNECTIONS=${ChangeMe_RESTLET_COMPONENT_MAX_TOTAL_CONNECTIONS}
-RESTLET_COMPONENT_INBOUND_BUFFER_SIZE=${ChangeMe_RESTLET_COMPONENT_INBOUND_BUFFER_SIZE}
-RESTLET_COMPONENT_OUTBOUND_BUFFER_SIZE=${ChangeMe_RESTLET_COMPONENT_OUTBOUND_BUFFER_SIZE}
-RESTLET_COMPONENT_PERSISTING_CONNECTIONS=true
-RESTLET_COMPONENT_PIPELINING_CONNECTIONS=false
-RESTLET_COMPONENT_THREAD_MAX_IDLE_TIME_MS=${ChangeMe_RESTLET_COMPONENT_THREAD_MAX_IDLE_TIME_MS}
-RESTLET_COMPONENT_USE_FORWARDED_HEADER=false
-RESTLET_COMPONENT_REUSE_ADDRESS=true
-
-#File Monitor configurations
-ssf_filemonitor_polling_interval=5
-ssf_filemonitor_threadpool_size=10
-
-#SOA_CLOUD_ENV is used to register your service with dme2 and can be turned off for local development (values=true/false).
-SOA_CLOUD_ENV=true
-
-#CONTINUE_ON_LISTENER_EXCEPTION will exit the application if there is a DME2 exception at the time of registration.
-CONTINUE_ON_LISTENER_EXCEPTION=false
-
-#GRM/DME2 System Properties
-AFT_DME2_CONN_IDLE_TIMEOUTMS=5000
-AJSC_ENV=SOACLOUD
-AJSC_SERVICE_NAMESPACE=__module_ajsc_namespace_name__
-AJSC_SERVICE_VERSION=__module_ajsc_namespace_version__
-
-SOACLOUD_NAMESPACE=org.onap.dmaap.mr
-SOACLOUD_ENV_CONTEXT=${ChangeMe_SOACLOUD_ENV_CONTEXT}
-SOACLOUD_PROTOCOL=https
-SOACLOUD_ROUTE_OFFER=${ChangeMe_SOACLOUD_ROUTE_OFFER}
-
-AFT_LATITUDE=${ChangeMe_AFT_LATITUDE}
-AFT_LONGITUDE=${ChangeMe_AFT_LONGITUDE}
-AFT_ENVIRONMENT=${ChangeMe_AFT_ENVIRONMENT}
-
-#Enable/disable csi logging (values=true/false). This can be disabled during local development
-csiEnable=true
-
-#Enable/disable CAET This can be disabled during local development
-isCAETEnable=true
-
-#Enable/disable endpoint level logging (values=true/false). This can be disabled during local development
-endpointLogging=false
-
-#Enable/disable trail logging and trail logging summary
-enableTrailLogging=false
-enableTrailLoggingSummary=false
-
-#Configure JMS Queue (WMQ/TIBCO)
-JMS_BROKER=WMQ
-
-#JMS properties needed for CSI Logging (TIBCO)
-JMS_TIBCO_PROVIDER_URL=${ChangeMe_JMS_TIBCO_PROVIDER_URL}
-JMS_LOGGER_USER_NAME=${ChangeMe_JMS_LOGGER_USER_NAME}
-JMS_LOGGER_PASSWORD=${ChangeMe_JMS_LOGGER_PASSWORD}
-JMS_LOGGER_AUDIT_QUEUE_BINDING=${ChangeMe_JMS_LOGGER_AUDIT_QUEUE_BINDING}
-JMS_LOGGER_PERF_QUEUE_BINDING=${ChangeMe_JMS_LOGGER_PERF_QUEUE_BINDING}
-
-#WMQ connectivity
-
-logback.configurationFile=${ChangeMe_localBundlePath}/shared/etc/logback.xml
-
-#CSI related variables for CSM framework
-csm.hostname=${ChangeMe_VTIER_FQDN}
-
-#Externalized jar and properties file location. In CSI environments, there are a few libs that have been externalized to aid
-#in CSTEM maintenance of the versions of these libs. The most important to the AJSC is the DME2 lib. Not only is this lib necessary
-#for proper registration of your AJSC service on a node, but it is also necessary for running locally as well. Another framework
-#used in CSI envs is the CSM framework. These 2 framework libs are shown as "provided" dependencies within the pom.xml. These
-#dependencies will be copied into the target/commonLibs folder with the normal "mvn clean package" goal of the AJSC. They will
-#then be added to the classpath via AJSC_EXTERNAL_LIB_FOLDERS system property. Any files (mainly property files) that need
-#to be on the classpath should be added to the AJSC_EXTERNAL_PROPERTIES_FOLDERS system property. The default scenario when
-#testing your AJSC service locally will utilize the target/commonLibs directory for DME2 and CSM related artifacts and 2
-#default csm properties files will be used for local testing with anything CSM related.
-#NOTE: we are using maven-replacer-plugin to replace "(doubleUnderscore)basedir(doubleUnderscore)" with ${basedir} within the
-#target directory for running locally. Multiple folder locations can be separated by the pipe ("|") character.
-#Example: target/commonLibs|target/otherLibs
-AJSC_EXTERNAL_LIB_FOLDERS=${ChangeMe_localBundlePath}/symlinks/link_csi-csm/lib|${ChangeMe_localBundlePath}/symlinks/link_csi-dme/lib
-AJSC_EXTERNAL_PROPERTIES_FOLDERS=${ChangeMe_localBundlePath}/shared/etc
-#End of AJSC System Properties
-
-#Service System Properties. Please, place any Service related System Properties below. \ No newline at end of file
diff --git a/bundleconfig-csi/swm-var-ajsc.txt b/bundleconfig-csi/swm-var-ajsc.txt
deleted file mode 100644
index dbfb87e..0000000
--- a/bundleconfig-csi/swm-var-ajsc.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-#This file is for SWM node variable default replacement. SWM node variables will already exist on CSI nodes. This file is used
-#for dev teams to override the default SWM node variables. The examples given are meant to be just that: EXAMPLES. Please,
-#REMOVE/DELETE/ADD/MODIFY these values for YOUR SWM package based on the needs of your project.
-
diff --git a/bundleconfig-csi/symlinks.txt b/bundleconfig-csi/symlinks.txt
deleted file mode 100644
index 70f2c0d..0000000
--- a/bundleconfig-csi/symlinks.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-#This file provides the definitions of sym-links to CSTEM that will be used by your project once installed on a CSI node.
-
-link_jre=ajsc-shared-config
-link_csi-csm=ajsc-shared-config
-link_csi-dme=ajsc-shared-config
-link_introscope=ajsc-shared-config
diff --git a/demo/deploy.sh b/demo/deploy.sh
deleted file mode 100644
index 6461817..0000000
--- a/demo/deploy.sh
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/bin/bash
-
-set -e
-
-export NEXUS_DOCKER_REPO=$(cat /opt/config/nexus_docker_repo.txt)
-
-# do not change this, it is already matched with the git repo file structure
-DOCKER_FILE_DIR='./docker_files'
-
-# commands to run docker and docker-compose
-DOCKER_COMPOSE_EXE='/opt/docker/docker-compose'
-
-cd "${DOCKER_FILE_DIR}"
-
-while ! ifconfig |grep "docker0" > /dev/null;
- do sleep 1
- echo 'waiting for docker operational'
-done
-
-echo "prep any files with local configurations"
-if ls __* 1> /dev/null 2>&1; then
- IP_DOCKER0=$(ifconfig docker0 |grep "inet addr" | cut -d: -f2 |cut -d" " -f1)
- TEMPLATES=$(ls -1 __*)
- for TEMPLATE in $TEMPLATES
- do
- FILENAME=${TEMPLATE//_}
- if [ ! -z "${IP_DOCKER0}" ]; then
- sed -e "s/{{ ip.docker0 }}/${IP_DOCKER0}/" "$TEMPLATE" > "$FILENAME"
- fi
- done
-fi
-
-if [ -z "$MTU" ]; then
- export MTU=$(ifconfig docker0 |grep MTU |sed -e 's/.*MTU://' -e 's/\s.*$//')
-fi
-
-echo "starting docker operations"
-${DOCKER_COMPOSE_EXE} up -d --build
diff --git a/demo/docker_files/Dockerfile b/demo/docker_files/Dockerfile
deleted file mode 100644
index 56ed86b..0000000
--- a/demo/docker_files/Dockerfile
+++ /dev/null
@@ -1,22 +0,0 @@
-FROM openjdk:8-jdk
-
-MAINTAINER Wurstmeister
-
-RUN apk add --update unzip wget curl docker jq coreutils
-
-ENV KAFKA_VERSION="0.8.1.1" SCALA_VERSION="2.9.2"
-ADD download-kafka.sh /tmp/download-kafka.sh
-RUN chmod a+x /tmp/download-kafka.sh && sync && /tmp/download-kafka.sh && tar xfz /tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz -C /opt && rm /tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz
-
-VOLUME ["/kafka"]
-
-ENV KAFKA_HOME /opt/kafka_${SCALA_VERSION}-${KAFKA_VERSION}
-ADD start-kafka.sh /usr/bin/start-kafka.sh
-ADD broker-list.sh /usr/bin/broker-list.sh
-ADD create-topics.sh /usr/bin/create-topics.sh
-# The scripts need to have executable permission
-RUN chmod a+x /usr/bin/start-kafka.sh && \
- chmod a+x /usr/bin/broker-list.sh && \
- chmod a+x /usr/bin/create-topics.sh
-# Use "exec" form so that it runs as PID 1 (useful for graceful shutdown)
-CMD ["start-kafka.sh"]
diff --git a/demo/docker_files/Dockerfile-local b/demo/docker_files/Dockerfile-local
deleted file mode 100644
index caf7ce6..0000000
--- a/demo/docker_files/Dockerfile-local
+++ /dev/null
@@ -1,22 +0,0 @@
-FROM openjdk:8-jdk
-
-MAINTAINER Wurstmeister
-
-RUN apk add --update tar wget curl docker coreutils
-
-ENV KAFKA_VERSION="0.8.1.1" SCALA_VERSION="2.9.2"
-COPY kafka_2.9.2-0.8.1.1.tgz /tmp/kafka_2.9.2-0.8.1.1.tgz
-RUN tar xfz /tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz -C /opt
-
-VOLUME ["/kafka"]
-
-ENV KAFKA_HOME /opt/kafka_2.9.2-0.8.1.1
-ADD start-kafka.sh /usr/bin/start-kafka.sh
-ADD broker-list.sh /usr/bin/broker-list.sh
-ADD create-topics.sh /usr/bin/create-topics.sh
-# The scripts need to have executable permission
-RUN chmod a+x /usr/bin/start-kafka.sh && \
- chmod a+x /usr/bin/broker-list.sh && \
- chmod a+x /usr/bin/create-topics.sh
-# Use "exec" form so that it runs as PID 1 (useful for graceful shutdown)
-CMD ["start-kafka.sh"]
diff --git a/demo/docker_files/__MsgRtrApi.properties b/demo/docker_files/__MsgRtrApi.properties
deleted file mode 100644
index ea6e089..0000000
--- a/demo/docker_files/__MsgRtrApi.properties
+++ /dev/null
@@ -1,141 +0,0 @@
-###############################################################################
-##
-## Cambria API Server config
-##
-## - Default values are shown as commented settings.
-##
-
-###############################################################################
-##
-## HTTP service
-##
-## - 3904 is standard as of 7/29/14.
-#
-## Zookeeper Connection
-##
-## Both Cambria and Kafka make use of Zookeeper.
-##
-config.zk.servers=zookeeper:2181
-#config.zk.servers={{ ip.docker0 }}:2181
-#10.0.11.1:2181
-#10.208.128.229:2181
-#config.zk.root=/fe3c/cambria/config
-
-
-###############################################################################
-##
-## Kafka Connection
-##
-## Items below are passed through to Kafka's producer and consumer
-## configurations (after removing "kafka.")
-## if you want to change request.required.acks it can take this one value
-#kafka.metadata.broker.list=localhost:9092,localhost:9093
-kafka.metadata.broker.list=kafka:9092
-#kafka.metadata.broker.list={{ ip.docker0 }}:9092
-#10.0.11.1:9092
-#10.208.128.229:9092
-##kafka.request.required.acks=-1
-#kafka.client.zookeeper=${config.zk.servers}
-consumer.timeout.ms=100
-zookeeper.connection.timeout.ms=6000
-zookeeper.session.timeout.ms=6000
-zookeeper.sync.time.ms=2000
-auto.commit.interval.ms=1000
-fetch.message.max.bytes =1000000
-auto.commit.enable=false
-
-
-###############################################################################
-##
-## Secured Config
-##
-## Some data stored in the config system is sensitive -- API keys and secrets,
-## for example. to protect it, we use an encryption layer for this section
-## of the config.
-##
-## The key is a base64 encode AES key. This must be created/configured for
-## each installation.
-#cambria.secureConfig.key=
-##
-## The initialization vector is a 16 byte value specific to the secured store.
-## This must be created/configured for each installation.
-#cambria.secureConfig.iv=
-
-## Southfield Sandbox
-cambria.secureConfig.key=b/7ouTn9FfEw2PQwL0ov/Q==
-cambria.secureConfig.iv=wR9xP5k5vbz/xD0LmtqQLw==
-authentication.adminSecret=fe3cCompound
-#cambria.secureConfig.key[pc569h]=YT3XPyxEmKCTLI2NK+Sjbw==
-#cambria.secureConfig.iv[pc569h]=rMm2jhR3yVnU+u2V9Ugu3Q==
-
-
-###############################################################################
-##
-## Consumer Caching
-##
-## Kafka expects live connections from the consumer to the broker, which
-## obviously doesn't work over connectionless HTTP requests. The Cambria
-## server proxies HTTP requests into Kafka consumer sessions that are kept
-## around for later re-use. Not doing so is costly for setup per request,
-## which would substantially impact a high volume consumer's performance.
-##
-## This complicates Cambria server failover, because we often need server
-## A to close its connection before server B brings up the replacement.
-##
-
-## The consumer cache is normally enabled.
-#cambria.consumer.cache.enabled=true
-
-## Cached consumers are cleaned up after a period of disuse. The server inspects
-## consumers every sweepFreqSeconds and will clean up any connections that are
-## dormant for touchFreqMs.
-#cambria.consumer.cache.sweepFreqSeconds=15
-#cambria.consumer.cache.touchFreqMs=120000
-
-## The cache is managed through ZK. The default value for the ZK connection
-## string is the same as config.zk.servers.
-#cambria.consumer.cache.zkConnect=${config.zk.servers}
-
-##
-## Shared cache information is associated with this node's name. The default
-## name is the hostname plus the HTTP service port this host runs on. (The
-## hostname is determined via InetAddress.getLocalHost ().getCanonicalHostName(),
-## which is not always adequate.) You can set this value explicitly here.
-##
-cambria.api.node.identifier=message-router
-
-###############################################################################
-##
-## Metrics Reporting
-##
-## This server can report its metrics periodically on a topic.
-##
-#metrics.send.cambria.enabled=true
-#metrics.send.cambria.topic=cambria.apinode.metrics #msgrtr.apinode.metrics.dmaap
-#metrics.send.cambria.sendEverySeconds=60
-
-cambria.consumer.cache.zkBasePath=/fe3c/cambria/consumerCache
-consumer.timeout=17
-
-##############################################################################
-#100mb
-maxcontentlength=10000
-
-
-##############################################################################
-#AAF Properties
-msgRtr.namespace.aaf=org.openecomp.dcae.dmaap.mtnje2.mr.topic
-msgRtr.topicfactory.aaf=org.openecomp.dcae.dmaap.topicFactory|:org.openecomp.dcae.dmaap.mtnje2.mr.topic:
-enforced.topic.name.AAF=org.openecomp
-forceAAF=false
-transidUEBtopicreqd=false
-defaultNSforUEB=org.openecomp.dmaap.mr.ueb
-##############################################################################
-#Mirror Maker Agent
-msgRtr.mirrormakeradmin.aaf=org.openecomp.dmaap.mr.dev.mirrormaker|*|admin
-msgRtr.mirrormakeruser.aaf=org.openecomp.dmaap.mr.dev.mirrormaker|*|user
-msgRtr.mirrormakeruser.aaf.create=org.openecomp.dmaap.mr.dev.topicFactory|:org.openecomp.dmaap.mr.dev.topic:
-msgRtr.mirrormaker.timeout=15000
-msgRtr.mirrormaker.topic=org.openecomp.dmaap.mr.prod.mm.agent
-msgRtr.mirrormaker.consumergroup=mmagentserver
-msgRtr.mirrormaker.consumerid=1
diff --git a/demo/docker_files/__docker-compose.yml b/demo/docker_files/__docker-compose.yml
deleted file mode 100644
index 94f8d93..0000000
--- a/demo/docker_files/__docker-compose.yml
+++ /dev/null
@@ -1,67 +0,0 @@
-version: '2'
-networks:
- default:
- driver: bridge
- driver_opts:
- com.docker.network.driver.mtu: ${MTU}
-services:
- zookeeper:
- image: nexus3.onap.org:10001/onap/dmaap/zookeeper:1.0.0
- ports:
- - "2181:2181"
- volumes:
- - ./data-zookeeper:/var/lib/zookeeper/data
- logging:
- driver: "json-file"
- options:
- max-size: "30m"
- max-file: "5"
-
- kafka:
- #CADI integrated kafka image
- #image: nexus3.onap.org:10001/onap/dmaap/kafka01101:0.0.2
- image: nexus3.onap.org:10001/onap/dmaap/kafka01101:0.0.1
- ports:
- - "9092:9092"
- environment:
- enableCadi: 'false'
- KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
- #Properties required for using the CADI integrated Kafka
- #KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTERNAL_SASL_PLAINTEXT:SASL_PLAINTEXT,EXTERNAL_SASL_PLAINTEXT:SASL_PLAINTEXT
- #KAFKA_ADVERTISED_LISTENERS: INTERNAL_SASL_PLAINTEXT://kafka:9092
- #KAFKA_LISTENERS: INTERNAL_SASL_PLAINTEXT://0.0.0.0:9092
- #KAFKA_INTER_BROKER_LISTENER_NAME: INTERNAL_SASL_PLAINTEXT
- #KAFKA_SASL_ENABLED_MECHANISMS: PLAIN
- #KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL: PLAIN
- #KAFKA_AUTHORIZER_CLASS_NAME: org.onap.dmaap.kafkaAuthorize.KafkaCustomAuthorizer
- #aaf_locate_url: https://aaf-onap-test.osaaf.org:8095
- #Remove the below 4 properties if CADI
- KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTERNAL_PLAINTEXT:PLAINTEXT,EXTERNAL_PLAINTEXT:PLAINTEXT
- KAFKA_ADVERTISED_LISTENERS: INTERNAL_PLAINTEXT://kafka:9092
- KAFKA_LISTENERS: INTERNAL_PLAINTEXT://0.0.0.0:9092
- KAFKA_INTER_BROKER_LISTENER_NAME: INTERNAL_PLAINTEXT
- volumes:
- - /var/run/docker.sock:/var/run/docker.sock
- logging:
- driver: "json-file"
- options:
- max-size: "30m"
- max-file: "5"
-
- dmaap:
- image: ${NEXUS_DOCKER_REPO}/onap/dmaap/dmaap-mr:1.1.9
- ports:
- - "3904:3904"
- - "3905:3905"
- volumes:
- - ./MsgRtrApi.properties:/appl/dmaapMR1/bundleconfig/etc/appprops/MsgRtrApi.properties
- - ./cadi.properties:/appl/dmaapMR1/etc/cadi.properties
- - ./keystore.jks:/appl/dmaapMR1/bundleconfig/etc/keystore.jks
- depends_on:
- - zookeeper
- - kafka
- logging:
- driver: "json-file"
- options:
- max-size: "30m"
- max-file: "5"
diff --git a/demo/docker_files/apikey-APPC1.key b/demo/docker_files/apikey-APPC1.key
deleted file mode 100644
index 2f77745..0000000
--- a/demo/docker_files/apikey-APPC1.key
+++ /dev/null
@@ -1,8 +0,0 @@
-{
- "aux": {
- "description": "API key for apikey-APPC1",
- "email": "no email"
- },
- "secret": "64AG2hF4pYeG2pq7CT6XwUOT",
- "key": "VIlbtVl6YLhNUrtU"
-} \ No newline at end of file
diff --git a/demo/docker_files/apikey-PORTAL1.key b/demo/docker_files/apikey-PORTAL1.key
deleted file mode 100644
index 068bed7..0000000
--- a/demo/docker_files/apikey-PORTAL1.key
+++ /dev/null
@@ -1,8 +0,0 @@
-{
- "aux": {
- "description": "API key for apikey-PORTAL1",
- "email": "no email"
- },
- "secret": "uCYgKjWKK5IxPGNNZzYSSWo9",
- "key": "7GkVcrO6sIDb3ngW"
-} \ No newline at end of file
diff --git a/demo/docker_files/apikey-PORTALAPP1.key b/demo/docker_files/apikey-PORTALAPP1.key
deleted file mode 100644
index a27422f..0000000
--- a/demo/docker_files/apikey-PORTALAPP1.key
+++ /dev/null
@@ -1,8 +0,0 @@
-{
- "aux": {
- "description": "API key for apikey-PORTALAPP1",
- "email": "no email"
- },
- "secret": "P0HpqEBhKJvxjRYdw2sCTUll",
- "key": "jQd4a9zVNi4ePyBp"
-} \ No newline at end of file
diff --git a/demo/docker_files/apikey-PORTALDBC1.key b/demo/docker_files/apikey-PORTALDBC1.key
deleted file mode 100644
index f29d959..0000000
--- a/demo/docker_files/apikey-PORTALDBC1.key
+++ /dev/null
@@ -1,8 +0,0 @@
-{
- "aux": {
- "description": "API key for apikey-PORTALDBC1",
- "email": "no email"
- },
- "secret": "WB7AJICClKg9oZLsxhQnykXA",
- "key": "MtRwsF16RdpHZ7eM"
-} \ No newline at end of file
diff --git a/demo/docker_files/apikey-PORTALPOL1.key b/demo/docker_files/apikey-PORTALPOL1.key
deleted file mode 100644
index 97b39a4..0000000
--- a/demo/docker_files/apikey-PORTALPOL1.key
+++ /dev/null
@@ -1,8 +0,0 @@
-{
- "aux": {
- "description": "API key for apikey-PORTALPOL1",
- "email": "no email"
- },
- "secret": "P7ejzF4nS3LAsMmKKTvYYFpA",
- "key": "Gsd3C3hLYaUcor6l"
-} \ No newline at end of file
diff --git a/demo/docker_files/apikey-PORTALSDC1.key b/demo/docker_files/apikey-PORTALSDC1.key
deleted file mode 100644
index 28bfb89..0000000
--- a/demo/docker_files/apikey-PORTALSDC1.key
+++ /dev/null
@@ -1,8 +0,0 @@
-{
- "aux": {
- "description": "API key for apikey-PORTALSDC1",
- "email": "no email"
- },
- "secret": "XftIATw9Jr3VzAcPqt3NnJOu",
- "key": "x9UfO7JsDn8BESVX"
-} \ No newline at end of file
diff --git a/demo/docker_files/apikey-PORTALVID1.key b/demo/docker_files/apikey-PORTALVID1.key
deleted file mode 100644
index 3373566..0000000
--- a/demo/docker_files/apikey-PORTALVID1.key
+++ /dev/null
@@ -1,8 +0,0 @@
-{
- "aux": {
- "description": "API key for apikey-PORTALVID1",
- "email": "no email"
- },
- "secret": "S31PrbOzGgL4hg4owgtx47Da",
- "key": "2Re7Pvdkgw5aeAUD"
-} \ No newline at end of file
diff --git a/demo/docker_files/apikey-SDC1.key b/demo/docker_files/apikey-SDC1.key
deleted file mode 100644
index 207431d..0000000
--- a/demo/docker_files/apikey-SDC1.key
+++ /dev/null
@@ -1,8 +0,0 @@
-{
- "aux": {
- "description": "API key for apikey-SDC1",
- "email": "no email"
- },
- "secret": "Ehq3WyT4bkif4zwgEbvshGal",
- "key": "iPIxkpAMI8qTcQj8"
-} \ No newline at end of file
diff --git a/demo/docker_files/broker-list.sh b/demo/docker_files/broker-list.sh
deleted file mode 100644
index 7f04639..0000000
--- a/demo/docker_files/broker-list.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/bash
-
-CONTAINERS=$(docker ps | grep 9092 | awk '{print $1}')
-BROKERS=$(for CONTAINER in $CONTAINERS; do docker port $CONTAINER 9092 | sed -e "s/0.0.0.0:/$HOST_IP:/g"; done)
-echo $BROKERS | sed -e 's/ /,/g'
diff --git a/demo/docker_files/cadi.properties b/demo/docker_files/cadi.properties
deleted file mode 100644
index a79bb8d..0000000
--- a/demo/docker_files/cadi.properties
+++ /dev/null
@@ -1,20 +0,0 @@
-aaf_locate_url=https://aaf-onap-test.osaaf.org:8095
-aaf_url=https://AAF_LOCATE_URL/AAF_NS.service:2.1
-aaf_env=DEV
-aaf_lur=org.onap.aaf.cadi.aaf.v2_0.AAFLurPerm
-
-cadi_truststore=/appl/dmaapMR1/etc/truststoreONAPall.jks
-cadi_truststore_password=changeit
-
-cadi_keyfile=/appl/dmaapMR1/etc/keyfilenew
-
-cadi_alias=dmaapmr@mr.dmaap.onap.org
-cadi_keystore=/appl/dmaapMR1/etc/org.onap.dmaap.mr.p12
-cadi_keystore_password=Messaging for All
-cadi_x509_issuers=CN=intermediateCA_1, OU=OSAAF, O=ONAP, C=US
-
-
-cadi_loglevel=INFO
-cadi_protocols=TLSv1.1,TLSv1.2
-cadi_latitude=37.78187
-cadi_longitude=-122.26147 \ No newline at end of file
diff --git a/demo/docker_files/create-topics.sh b/demo/docker_files/create-topics.sh
deleted file mode 100644
index e07bf06..0000000
--- a/demo/docker_files/create-topics.sh
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/bin/bash
-
-
-if [[ -z "$START_TIMEOUT" ]]; then
- START_TIMEOUT=600
-fi
-
-start_timeout_exceeded=false
-count=0
-step=10
-while netstat -lnt | awk '$4 ~ /:'$KAFKA_PORT'$/ {exit 1}'; do
- echo "waiting for kafka to be ready"
- sleep $step;
- count=$(expr $count + $step)
- if [ $count -gt $START_TIMEOUT ]; then
- start_timeout_exceeded=true
- break
- fi
-done
-
-if $start_timeout_exceeded; then
- echo "Not able to auto-create topic (waited for $START_TIMEOUT sec)"
- exit 1
-fi
-
-if [[ -n $KAFKA_CREATE_TOPICS ]]; then
- IFS=','; for topicToCreate in $KAFKA_CREATE_TOPICS; do
- echo "creating topics: $topicToCreate"
- IFS=':' read -a topicConfig <<< "$topicToCreate"
- JMX_PORT='' $KAFKA_HOME/bin/kafka-topics.sh --create --zookeeper $KAFKA_ZOOKEEPER_CONNECT --replication-factor ${topicConfig[2]} --partition ${topicConfig[1]} --topic "${topicConfig[0]}"
- done
-fi
diff --git a/demo/docker_files/data-kafka/kafka-logs/.kafka_cleanshutdown b/demo/docker_files/data-kafka/kafka-logs/.kafka_cleanshutdown
deleted file mode 100644
index e69de29..0000000
--- a/demo/docker_files/data-kafka/kafka-logs/.kafka_cleanshutdown
+++ /dev/null
diff --git a/demo/docker_files/data-kafka/kafka-logs/.lock b/demo/docker_files/data-kafka/kafka-logs/.lock
deleted file mode 100644
index e69de29..0000000
--- a/demo/docker_files/data-kafka/kafka-logs/.lock
+++ /dev/null
diff --git a/demo/docker_files/data-kafka/kafka-logs/APPC-TEST1-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/APPC-TEST1-0/00000000000000000000.index
deleted file mode 100644
index e69de29..0000000
--- a/demo/docker_files/data-kafka/kafka-logs/APPC-TEST1-0/00000000000000000000.index
+++ /dev/null
diff --git a/demo/docker_files/data-kafka/kafka-logs/APPC-TEST1-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/APPC-TEST1-0/00000000000000000000.log
deleted file mode 100644
index e69de29..0000000
--- a/demo/docker_files/data-kafka/kafka-logs/APPC-TEST1-0/00000000000000000000.log
+++ /dev/null
diff --git a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-INBOX-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-INBOX-0/00000000000000000000.index
deleted file mode 100644
index e69de29..0000000
--- a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-INBOX-0/00000000000000000000.index
+++ /dev/null
diff --git a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-INBOX-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-INBOX-0/00000000000000000000.log
deleted file mode 100644
index e69de29..0000000
--- a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-INBOX-0/00000000000000000000.log
+++ /dev/null
diff --git a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-APP1-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-APP1-0/00000000000000000000.index
deleted file mode 100644
index e69de29..0000000
--- a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-APP1-0/00000000000000000000.index
+++ /dev/null
diff --git a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-APP1-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-APP1-0/00000000000000000000.log
deleted file mode 100644
index e69de29..0000000
--- a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-APP1-0/00000000000000000000.log
+++ /dev/null
diff --git a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-DBC1-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-DBC1-0/00000000000000000000.index
deleted file mode 100644
index e69de29..0000000
--- a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-DBC1-0/00000000000000000000.index
+++ /dev/null
diff --git a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-DBC1-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-DBC1-0/00000000000000000000.log
deleted file mode 100644
index e69de29..0000000
--- a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-DBC1-0/00000000000000000000.log
+++ /dev/null
diff --git a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-POL1-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-POL1-0/00000000000000000000.index
deleted file mode 100644
index e69de29..0000000
--- a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-POL1-0/00000000000000000000.index
+++ /dev/null
diff --git a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-POL1-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-POL1-0/00000000000000000000.log
deleted file mode 100644
index e69de29..0000000
--- a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-POL1-0/00000000000000000000.log
+++ /dev/null
diff --git a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-SDC1-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-SDC1-0/00000000000000000000.index
deleted file mode 100644
index e69de29..0000000
--- a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-SDC1-0/00000000000000000000.index
+++ /dev/null
diff --git a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-SDC1-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-SDC1-0/00000000000000000000.log
deleted file mode 100644
index e69de29..0000000
--- a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-SDC1-0/00000000000000000000.log
+++ /dev/null
diff --git a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-VID1-0/00000000000000000000.index b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-VID1-0/00000000000000000000.index
deleted file mode 100644
index e69de29..0000000
--- a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-VID1-0/00000000000000000000.index
+++ /dev/null
diff --git a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-VID1-0/00000000000000000000.log b/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-VID1-0/00000000000000000000.log
deleted file mode 100644
index e69de29..0000000
--- a/demo/docker_files/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-VID1-0/00000000000000000000.log
+++ /dev/null
diff --git a/demo/docker_files/data-kafka/kafka-logs/recovery-point-offset-checkpoint b/demo/docker_files/data-kafka/kafka-logs/recovery-point-offset-checkpoint
deleted file mode 100644
index e845fda..0000000
--- a/demo/docker_files/data-kafka/kafka-logs/recovery-point-offset-checkpoint
+++ /dev/null
@@ -1,9 +0,0 @@
-0
-7
-ECOMP-PORTAL-OUTBOX-VID1 0 0
-APPC-TEST1 0 0
-ECOMP-PORTAL-INBOX 0 0
-ECOMP-PORTAL-OUTBOX-APP1 0 0
-ECOMP-PORTAL-OUTBOX-SDC1 0 0
-ECOMP-PORTAL-OUTBOX-DBC1 0 0
-ECOMP-PORTAL-OUTBOX-POL1 0 0
diff --git a/demo/docker_files/data-kafka/kafka-logs/replication-offset-checkpoint b/demo/docker_files/data-kafka/kafka-logs/replication-offset-checkpoint
deleted file mode 100644
index e845fda..0000000
--- a/demo/docker_files/data-kafka/kafka-logs/replication-offset-checkpoint
+++ /dev/null
@@ -1,9 +0,0 @@
-0
-7
-ECOMP-PORTAL-OUTBOX-VID1 0 0
-APPC-TEST1 0 0
-ECOMP-PORTAL-INBOX 0 0
-ECOMP-PORTAL-OUTBOX-APP1 0 0
-ECOMP-PORTAL-OUTBOX-SDC1 0 0
-ECOMP-PORTAL-OUTBOX-DBC1 0 0
-ECOMP-PORTAL-OUTBOX-POL1 0 0
diff --git a/demo/docker_files/data-zookeeper/version-2/log.01 b/demo/docker_files/data-zookeeper/version-2/log.01
deleted file mode 100644
index f3cb136..0000000
--- a/demo/docker_files/data-zookeeper/version-2/log.01
+++ /dev/null
Binary files differ
diff --git a/demo/docker_files/data-zookeeper/version-2/log.0103 b/demo/docker_files/data-zookeeper/version-2/log.0103
deleted file mode 100644
index 9b648e2..0000000
--- a/demo/docker_files/data-zookeeper/version-2/log.0103
+++ /dev/null
Binary files differ
diff --git a/demo/docker_files/data-zookeeper/version-2/log.0125 b/demo/docker_files/data-zookeeper/version-2/log.0125
deleted file mode 100644
index 0613642..0000000
--- a/demo/docker_files/data-zookeeper/version-2/log.0125
+++ /dev/null
Binary files differ
diff --git a/demo/docker_files/data-zookeeper/version-2/log.0175 b/demo/docker_files/data-zookeeper/version-2/log.0175
deleted file mode 100644
index b0e5e32..0000000
--- a/demo/docker_files/data-zookeeper/version-2/log.0175
+++ /dev/null
Binary files differ
diff --git a/demo/docker_files/data-zookeeper/version-2/log.01b1 b/demo/docker_files/data-zookeeper/version-2/log.01b1
deleted file mode 100644
index bf1d130..0000000
--- a/demo/docker_files/data-zookeeper/version-2/log.01b1
+++ /dev/null
Binary files differ
diff --git a/demo/docker_files/data-zookeeper/version-2/log.100000001 b/demo/docker_files/data-zookeeper/version-2/log.100000001
deleted file mode 100644
index 714241e..0000000
--- a/demo/docker_files/data-zookeeper/version-2/log.100000001
+++ /dev/null
Binary files differ
diff --git a/demo/docker_files/data-zookeeper/version-2/log.200000001 b/demo/docker_files/data-zookeeper/version-2/log.200000001
deleted file mode 100644
index 273ecce..0000000
--- a/demo/docker_files/data-zookeeper/version-2/log.200000001
+++ /dev/null
Binary files differ
diff --git a/demo/docker_files/download-kafka.sh b/demo/docker_files/download-kafka.sh
deleted file mode 100644
index 2ddc911..0000000
--- a/demo/docker_files/download-kafka.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/sh
-
-mirror=$(curl --stderr /dev/null https://www.apache.org/dyn/closer.cgi\?as_json\=1 | jq -r '.preferred')
-url="${mirror}kafka/${KAFKA_VERSION}/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz"
-wget -q "${url}" -O "/tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz"
diff --git a/demo/docker_files/dump_mr_state.sh b/demo/docker_files/dump_mr_state.sh
deleted file mode 100644
index 75c16d6..0000000
--- a/demo/docker_files/dump_mr_state.sh
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/bin/bash
-
-
-TIMESTAMP=`date +%Y%m%d%H%M`
-CONTAINERID=`docker ps |grep kafka |cut -b1-12`
-docker cp $CONTAINERID:/kafka ./data-kafka-$TIMESTAMP
-tar zcvf ./data-kafka-$TIMESTAMP.tgz ./data-kafka-$TIMESTAMP
-CONTAINERID=`docker ps |grep zookeeper |cut -b1-12`
-docker cp $CONTAINERID:/opt/zookeeper-3.4.9/data ./data-zookeeper-$TIMESTAMP
-tar zcvf ./data-zookeeper-$TIMESTAMP.tgz ./data-zookeeper-$TIMESTAMP
diff --git a/demo/docker_files/keystore.jks b/demo/docker_files/keystore.jks
deleted file mode 100644
index c5b83cd..0000000
--- a/demo/docker_files/keystore.jks
+++ /dev/null
Binary files differ
diff --git a/demo/docker_files/mykey b/demo/docker_files/mykey
deleted file mode 100644
index c2b8b87..0000000
--- a/demo/docker_files/mykey
+++ /dev/null
@@ -1,27 +0,0 @@
-_sNOLphPzrU7L0L3oWv0pYwgV_ddGF1XoBsQEIAp34jfP-fGJFPfFYaMpDEZ3gwH59rNw6qyMZHk
-k-4irklvVcWk36lC3twNvc0DueRCVrws1bkuhOLCXdxHJx-YG-1xM8EJfRmzh79WPlPkbAdyPmFF
-Ah44V0GjAnInPOFZA6MHP9rNx9B9qECHRfmvzU13vJCcgTsrmOr-CEiWfRsnzPjsICxpq9OaVT_D
-zn6rNaroGm1OiZNCrCgvRkCUHPOOCw3j9G1GeaImoZNYtozbz9u4sj13PU-MxIIAa64b1bMMMjpz
-Upc8lVPI4FnJKg6axMmEGn5zJ6JUq9mtOVyPj__2GEuDgpx5H4AwodXXVjFsVgR8UJwI_BvS2JVp
-JoQk0J1RqXmAXVamlsMAfzmmbARXgmrBfnuhveZnh9ymFVU-YZeujdANniXAwBGI7c6hG_BXkH7i
-Eyf4Fn41_SV78PskP6qgqJahr9r3bqdjNbKBztIKCOEVrE_w3IM5r02l-iStk_NBRkj6cq_7VCpG
-afxZ2CtZMwuZMiypO_wOgbdpCSKNzsL-NH2b4b08OlKiWb263gz634KJmV5WEfCl-6eH-JUFbWOS
-JwQfActLNT2ZQPl2MyZQNBzJEWoJRgS6k7tPRO-zqeUtYYHGHVMCxMuMHGQcoilNNHEFeBCG_fBh
-yAKb9g9F86Cbx9voMLiyTX2T3rwVHiSJFOzfNxGmfN5JWOthIun_c5hEY1tLQ15BomzkDwk7BAj7
-VbRCrVD45B6xrmSTMBSWYmLyr6mnQxQqeh9cMbD-0ZAncE3roxRnRvPKjFFa208ykYUp2V83r_PJ
-fV5I9ZPKSjk9DwFyrjkcQQEYDhdK6IFqcd6nEthjYVkmunu2fsX0bIOm9GGdIbKGqBnpdgBO5hyT
-rBr9HSlZrHcGdti1R823ckDF0Ekcl6kioDr5NLIpLtg9zUEDRm3QrbX2mv5Zs8W0pYnOqglxy3lz
-bJZTN7oR7VasHUtjmp0RT9nLZkUs5TZ6MHhlIq3ZsQ6w_Q9Rv1-ofxfwfCC4EBrWKbWAGCf6By4K
-Ew8321-2YnodhmsK5BrT4zQ1DZlmUvK8BmYjZe7wTljKjgYcsLTBfX4eMhJ7MIW1kpnl8AbiBfXh
-QzN56Mki51Q8PSQWHm0W9tnQ0z6wKdck6zBJ8JyNzewZahFKueDTn-9DOqIDfr3YHvQLLzeXyJ8e
-h4AgjW-hvlLzRGtkCknjLIgXVa3rMTycseAwbW-mgdCqqkw3SdEG8feAcyntmvE8j2jbtSDStQMB
-9JdvyNLuQdNG4pxpusgvVso0-8NQF0YVa9VFwg9U6IPSx5p8FcW68OAHt_fEgT4ZtiH7o9aur4o9
-oYqUh2lALCY-__9QLq1KkNjMKs33Jz9E8LbRerG9PLclkTrxCjYAeUWBjCwSI7OB7xkuaYDSjkjj
-a46NLpdBN1GNcsFFcZ79GFAK0_DsyxGLX8Tq6q0Bvhs8whD8wlSxpTGxYkyqNX-vcb7SDN_0WkCE
-XSdZWkqTHXcYbOvoCOb_e6SFAztuMenuHWY0utX0gBfx_X5lPDFyoYXErxFQHiA7t27keshXNa6R
-ukQRRS8kMjre1U74sc-fRNXkXpl57rG4rgxaEX0eBeowa53KAsVvUAoSac2aC_nfzXrDvoyf9Xi3
-JpEZNhUDLpFCEycV4I7jGQ9wo9qNaosvlsr6kbLDNdb_1xrGVgjT3xEvRNJNPqslSAu-yD-UFhC3
-AmCdYUnugw_eEFqXCHTARcRkdPPvl2XsmEKY2IqEeO5tz4DyXQFaL-5hEVh6lYEU1EOWHk3UGIXe
-Vc5_Ttp82qNLmlJPbZvgmNTJzYTHDQ_27KBcp7IVVZgPDjVKdWqQvZ18KhxvfF3Idgy82LBZniFV
-IbtxllXiPRxoPQriSXMnXjh3XkvSDI2pFxXfEvLRn1tvcFOwPNCz3QfPIzYg8uYXN5bRt3ZOrR_g
-ZhIlrc7HO0VbNbeqEVPKMZ-cjkqGj4VAuDKoQc0eQ6X_wCoAGO78nPpLeIvZPx1X3z5YoqNA \ No newline at end of file
diff --git a/demo/docker_files/preconfigure-ecomp-keystopics.sh b/demo/docker_files/preconfigure-ecomp-keystopics.sh
deleted file mode 100644
index 03cf45c..0000000
--- a/demo/docker_files/preconfigure-ecomp-keystopics.sh
+++ /dev/null
@@ -1,191 +0,0 @@
-#!/bin/bash
-
-HOSTPORT="127.0.0.1:3904"
-KEYDIR="."
-
-
-# dictionary of API Keys and the tpics owned by each API key
-declare -A topics
-topics=( \
-["anonymous"]="APPC-CL APPC-TEST2 PDPD-CONFIGURATION POLICY-CL-MGT DCAE-CL-EVENT unauthenticated.SEC_MEASUREMENT_OUTPUT unauthenticated.TCA_EVENT_OUTPUT " \
-["apikey-SDC1"]="SDC-DISTR-NOTIF-TOPIC-SDC-OPENSOURCE-ENV1 SDC-DISTR-STATUS-TOPIC-SDC-OPENSOURCE-ENV1" \
-["apikey-APPC1"]="APPC-TEST1" \
-["apikey-PORTAL1"]="ECOMP-PORTAL-INBOX" \
-["apikey-PORTALAPP1"]="ECOMP-PORTAL-OUTBOX-APP1" \
-["apikey-PORTALDBC1"]="ECOMP-PORTAL-OUTBOX-DBC1" \
-["apikey-PORTALSDC1"]="ECOMP-PORTAL-OUTBOX-SDC1" \
-["apikey-PORTALVID1"]="ECOMP-PORTAL-OUTBOX-VID1" \
-["apikey-PORTALPOL1"]="ECOMP-PORTAL-OUTBOX-POL1" \
-)
-
-# dictionary of producers for each topic
-declare -A acl_producers
-acl_producers=(\
-["SDC-DISTR-NOTIF-TOPIC-SDC-OPENSOURCE-ENV1"]="apikey-sdc1" \
-["SDC-DISTR-STATUS-TOPIC-SDC-OPENSOURCE-ENV1"]="apikey-sdc1" \
-["ECOMP-PORTAL-INBOX"]="apikey-PORTALAPP1 apikey-PORTALDBC1 apikey-PORTALSDC1 apikey-PORTALVID1 apikey-PORTALPOL1" \
-["ECOMP-PORTAL-OUTBOX-APP1"]="apikey-PORTAL1" \
-["ECOMP-PORTAL-OUTBOX-DBC1"]="apikey-PORTAL1" \
-["ECOMP-PORTAL-OUTBOX-SDC1"]="apikey-PORTAL1" \
-["ECOMP-PORTAL-OUTBOX-VID1"]="apikey-PORTAL1" \
-["ECOMP-PORTAL-OUTBOX-POL1"]="apikey-PORTAL1" \
-["APPC-TEST1"]="apikey-APPC1" \
-)
-
-# dictionary of consumers for each topic
-declare -A acl_consumers
-acl_consumers=(\
-["SDC-DISTR-NOTIF-TOPIC-SDC-OPENSOURCE-ENV1"]="apikey-sdc1" \
-["SDC-DISTR-STATUS-TOPIC-SDC-OPENSOURCE-ENV1"]="apikey-sdc1" \
-["ECOMP-PORTAL-INBOX"]="apikey-PORTAL1" \
-["ECOMP-PORTAL-OUTBOX-APP1"]="apikey-PORTALAPP1" \
-["ECOMP-PORTAL-OUTBOX-DBC1"]="apikey-PORTALDBC1" \
-["ECOMP-PORTAL-OUTBOX-SDC1"]="apikey-PORTALSDC1" \
-["ECOMP-PORTAL-OUTBOX-VID1"]="apikey-PORTALVID1" \
-["ECOMP-PORTAL-OUTBOX-POL1"]="apikey-PORTALPOL1" \
-["APPC-TEST1"]="apikey-APPC1" \
-)
-
-myrun () {
- CMD="$1"
- echo "CMD:[$CMD]"
- eval $CMD
-}
-
-getowner () {
- local -n outowner=$2
- target_topic="$1"
- echo "look for owner for $target_topic"
- for o in "${!topics[@]}"; do
- keytopics=${topics[$o]}
- for topic in ${keytopics}; do
- if [ "$topic" == "-" ]; then
- continue
- fi
- if [ "$topic" == "$target_topic" ]; then
- echo "found owner $o"
- outowner=$o
- return
- fi
- done
- done
-}
-
-add_acl () {
- acl_group="$1"
- topic="$2"
- client="$3"
- echo " adding $client to group $acl_group for topic $2"
-
- getowner "$topic" owner
- echo "==owner for $topic is $owner"
-
-
- if [ -z "$owner" ]; then
- echo "No owner API key found for topic $topic"
- #exit
- fi
- OWNER_API_KEYFILE="${KEYDIR}/${owner}.key"
- if [ ! -e $API_KEYFILE ]; then
- echo "No API key file $OWNER_API_KEYFILE for owner $owner of topic $topic, exit "
- #exit
- fi
-
- CLIENT_API_KEYFILE="${KEYDIR}/${client}.key"
- if [ ! -e $CLIENT_API_KEYFILE ]; then
- echo "No API key file $CLIENT_API_KEYFILE for client $client, exit "
- #exit
- else
- CLIENTKEY=`cat ${CLIENT_API_KEYFILE} |jq -r ".key"`
- UEBAPIKEYSECRET=`cat ${OWNER_API_KEYFILE} |jq -r ".secret"`
- UEBAPIKEYKEY=`cat ${OWNER_API_KEYFILE} |jq -r ".key"`
- time=`date --iso-8601=seconds`
- signature=$(echo -n "$time" | openssl sha1 -hmac $UEBAPIKEYSECRET -binary | openssl base64)
- xAuth=$UEBAPIKEYKEY:$signature
- xDate="$time"
- CMD="curl -i -H \"Content-Type: application/json\" -H \"X-CambriaAuth:$xAuth\" -H \"X-CambriaDate:$xDate\" -X PUT http://${HOSTPORT}/topics/${topic}/${acl_group}/${CLIENTKEY}"
- myrun "$CMD"
- fi
-}
-
-
-for key in "${!topics[@]}"; do
- # try to create key if no such key exists
- API_KEYFILE="${KEYDIR}/${key}.key"
- if [ "$key" != "anonymous" ]; then
- if [ -e ${API_KEYFILE} ]; then
- echo "API key for $key already exists, no need to create new"
- else
- echo "generating API key $key"
- echo '{"email":"no email","description":"API key for '$key'"}' > /tmp/input.txt
-
- CMD="curl -s -o ${API_KEYFILE} -H \"Content-Type: application/json\" -X POST -d @/tmp/input.txt http://${HOSTPORT}/apiKeys/create"
- myrun "$CMD"
- echo "API key for $key has been created: "; cat ${API_KEYFILE}
- echo "generating API key $key done"; echo
- fi
- fi
-
- # create the topics for this key
- keytopics=${topics[$key]}
- for topic in ${keytopics}; do
- if [ "$topic" == "-" ]; then
- continue
- fi
- if [ "$key" == "anonymous" ]; then
- echo "creating anonymous topic $topic"
- CMD="curl -H \"Content-Type:text/plain\" -X POST -d @/tmp/sample.txt http://${HOSTPORT}/events/${topic}"
- myrun "$CMD"
- echo "done creating anonymous topic $topic"; echo
- else
- echo "creating API key secured topic $topic for API key $key"
- UEBAPIKEYSECRET=`cat ${API_KEYFILE} |jq -r ".secret"`
- UEBAPIKEYKEY=`cat ${API_KEYFILE} |jq -r ".key"`
- echo '{"topicName":"'${topic}'","topicDescription":"'$key' API Key secure topic","partitionCount":"1","replicationCount":"1","transactionEnabled":"true"}' > /tmp/topicname.txt
- time=`date --iso-8601=seconds`
- signature=$(echo -n "$time" | openssl sha1 -hmac $UEBAPIKEYSECRET -binary | openssl base64)
- xAuth=$UEBAPIKEYKEY:$signature
- xDate="$time"
- CMD="curl -i -H \"Content-Type: application/json\" -H \"X-CambriaAuth: $xAuth\" -H \"X-CambriaDate: $xDate\" -X POST -d @/tmp/topicname.txt http://${HOSTPORT}/topics/create"
- myrun "$CMD"
- echo "done creating api key topic $topic"
- echo
- fi
- done
-done
-
-
-echo
-echo "============ post loading state of topics ================="
-CMD="curl http://${HOSTPORT}/topics"
-myrun "$CMD"
-for key in "${!topics[@]}"; do
- keytopics=${topics[$key]}
- echo "---------- key: ${key} "
- for topic in ${keytopics}; do
- if [ "$topic" == "-" ]; then
- continue
- fi
- CMD="curl http://${HOSTPORT}/topics/${topic}"
- myrun "$CMD"
- echo
- done
- echo "end of key: ${key} secured topics"
-done
-
-
-# adding publisher and subscriber ACL
-for topic in "${!acl_consumers[@]}"; do
- consumers=${acl_consumers[$topic]}
- for consumer in ${consumers}; do
- add_acl "consumers" "$topic" "$consumer"
- done
-done
-
-for topic in "${!acl_producers[@]}"; do
- producers=${acl_producers[$topic]}
- for producer in ${producers}; do
- add_acl "producers" "$topic" "$producer"
- done
-done
-
diff --git a/demo/docker_files/start-kafka.sh b/demo/docker_files/start-kafka.sh
deleted file mode 100644
index 4d955da..0000000
--- a/demo/docker_files/start-kafka.sh
+++ /dev/null
@@ -1,69 +0,0 @@
-#!/bin/bash
-
-if [[ -z "$KAFKA_PORT" ]]; then
- export KAFKA_PORT=9092
-fi
-if [[ -z "$KAFKA_ADVERTISED_PORT" ]]; then
- export KAFKA_ADVERTISED_PORT=$(docker port `hostname` $KAFKA_PORT | sed -r "s/.*:(.*)/\1/g")
-fi
-if [[ -z "$KAFKA_BROKER_ID" ]]; then
- # By default auto allocate broker ID
- #export KAFKA_BROKER_ID=-1
- export KAFKA_BROKER_ID=1
-fi
-#if [[ -z "$KAFKA_LOG_DIRS" ]]; then
- #export KAFKA_LOG_DIRS="/kafka/kafka-logs-$HOSTNAME"
- export KAFKA_LOG_DIRS="/kafka/kafka-logs"
-#fi
-if [[ -z "$KAFKA_ZOOKEEPER_CONNECT" ]]; then
- export KAFKA_ZOOKEEPER_CONNECT=$(env | grep ZK.*PORT_2181_TCP= | sed -e 's|.*tcp://||' | paste -sd ,)
-fi
-
-if [[ -n "$KAFKA_HEAP_OPTS" ]]; then
- sed -r -i "s/(export KAFKA_HEAP_OPTS)=\"(.*)\"/\1=\"$KAFKA_HEAP_OPTS\"/g" $KAFKA_HOME/bin/kafka-server-start.sh
- unset KAFKA_HEAP_OPTS
-fi
-
-if [[ -z "$KAFKA_ADVERTISED_HOST_NAME" && -n "$HOSTNAME_COMMAND" ]]; then
- export KAFKA_ADVERTISED_HOST_NAME=$(eval $HOSTNAME_COMMAND)
-fi
-
-for VAR in `env`
-do
- if [[ $VAR =~ ^KAFKA_ && ! $VAR =~ ^KAFKA_HOME ]]; then
- kafka_name=`echo "$VAR" | sed -r "s/KAFKA_(.*)=.*/\1/g" | tr '[:upper:]' '[:lower:]' | tr _ .`
- env_var=`echo "$VAR" | sed -r "s/(.*)=.*/\1/g"`
- if egrep -q "(^|^#)$kafka_name=" $KAFKA_HOME/config/server.properties; then
- sed -r -i "s@(^|^#)($kafka_name)=(.*)@\2=${!env_var}@g" $KAFKA_HOME/config/server.properties #note that no config values may contain an '@' char
- else
- echo "$kafka_name=${!env_var}" >> $KAFKA_HOME/config/server.properties
- fi
- fi
-done
-
-if [[ -n "$CUSTOM_INIT_SCRIPT" ]] ; then
- eval $CUSTOM_INIT_SCRIPT
-fi
-
-
-KAFKA_PID=0
-
-# see https://medium.com/@gchudnov/trapping-signals-in-docker-containers-7a57fdda7d86#.bh35ir4u5
-term_handler() {
- echo 'Stopping Kafka....'
- if [ $KAFKA_PID -ne 0 ]; then
- kill -s TERM "$KAFKA_PID"
- wait "$KAFKA_PID"
- fi
- echo 'Kafka stopped.'
- exit
-}
-
-
-# Capture kill requests to stop properly
-trap "term_handler" SIGHUP SIGINT SIGTERM
-create-topics.sh &
-$KAFKA_HOME/bin/kafka-server-start.sh $KAFKA_HOME/config/server.properties &
-KAFKA_PID=$!
-
-wait "$KAFKA_PID"
diff --git a/demo/docker_files/state-20170301.tar.gz b/demo/docker_files/state-20170301.tar.gz
deleted file mode 100644
index b36b05a..0000000
--- a/demo/docker_files/state-20170301.tar.gz
+++ /dev/null
Binary files differ
diff --git a/demo/docker_files/subscriber.sh b/demo/docker_files/subscriber.sh
deleted file mode 100644
index 3e193f0..0000000
--- a/demo/docker_files/subscriber.sh
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/bin/bash
-
-
-GET/events/{topic}/{consumerGroup}/{clientId}
diff --git a/demo/docker_files/tests/data.2.2.json b/demo/docker_files/tests/data.2.2.json
deleted file mode 100644
index c1bcdfd..0000000
--- a/demo/docker_files/tests/data.2.2.json
+++ /dev/null
@@ -1 +0,0 @@
-{ "topicName": "Topic-28592-2", "topicDescription": "topic for test 2.2", "partitionCount": "1", "replicationCount": "1", "transactionEnabled": "true" }
diff --git a/demo/docker_files/tests/data.2.5.json b/demo/docker_files/tests/data.2.5.json
deleted file mode 100644
index 75bade1..0000000
--- a/demo/docker_files/tests/data.2.5.json
+++ /dev/null
@@ -1 +0,0 @@
-{ "datestamp": "Thu Dec 15 19:50:28 UTC 2016", "appkey": "x100", "appval": "some value" }
diff --git a/demo/docker_files/tests/data.3.1.txt b/demo/docker_files/tests/data.3.1.txt
deleted file mode 100644
index c6a738a..0000000
--- a/demo/docker_files/tests/data.3.1.txt
+++ /dev/null
@@ -1 +0,0 @@
-datestamp: Thu Dec 15 19:50:38 UTC 2016, key: 3.1, value: this is a test
diff --git a/demo/docker_files/tests/data.3.3.json b/demo/docker_files/tests/data.3.3.json
deleted file mode 100644
index 9866789..0000000
--- a/demo/docker_files/tests/data.3.3.json
+++ /dev/null
@@ -1 +0,0 @@
-{ "datestamp": "Thu Dec 15 19:50:40 UTC 2016", "key": "3.3", "value": "this is a test" }
diff --git a/demo/docker_files/tests/key.req b/demo/docker_files/tests/key.req
deleted file mode 100644
index a7e4092..0000000
--- a/demo/docker_files/tests/key.req
+++ /dev/null
@@ -1 +0,0 @@
-{ "email": "no.email", "description": "request for direct response KEY" }
diff --git a/demo/docker_files/tests/out/1.1.out b/demo/docker_files/tests/out/1.1.out
deleted file mode 100644
index a9488d8..0000000
--- a/demo/docker_files/tests/out/1.1.out
+++ /dev/null
@@ -1,5 +0,0 @@
-{"topics": [
- "msgrtr.apinode.metrics.dmaap",
- "28537.3",
- "Topic-28537-2"
-]} \ No newline at end of file
diff --git a/demo/docker_files/tests/out/2.1.out b/demo/docker_files/tests/out/2.1.out
deleted file mode 100644
index ef4eada..0000000
--- a/demo/docker_files/tests/out/2.1.out
+++ /dev/null
@@ -1,8 +0,0 @@
-{
- "aux": {
- "description": "request for direct response KEY",
- "email": "no.email"
- },
- "secret": "5V6YSDm8R6v6TArrLLtJUx4L",
- "key": "HnJm7b9Zr16hgpU5"
-} \ No newline at end of file
diff --git a/demo/docker_files/tests/out/2.2.out b/demo/docker_files/tests/out/2.2.out
deleted file mode 100644
index d682023..0000000
--- a/demo/docker_files/tests/out/2.2.out
+++ /dev/null
@@ -1,13 +0,0 @@
-{
- "owner": "HnJm7b9Zr16hgpU5",
- "readerAcl": {
- "enabled": true,
- "users": []
- },
- "name": "Topic-28592-2",
- "description": "topic for test 2.2",
- "writerAcl": {
- "enabled": true,
- "users": []
- }
-} \ No newline at end of file
diff --git a/demo/docker_files/tests/out/2.3.out b/demo/docker_files/tests/out/2.3.out
deleted file mode 100644
index d62034e..0000000
--- a/demo/docker_files/tests/out/2.3.out
+++ /dev/null
@@ -1,6 +0,0 @@
-{"topics": [
- "Topic-28592-2",
- "msgrtr.apinode.metrics.dmaap",
- "28537.3",
- "Topic-28537-2"
-]} \ No newline at end of file
diff --git a/demo/docker_files/tests/out/2.4.out b/demo/docker_files/tests/out/2.4.out
deleted file mode 100644
index d682023..0000000
--- a/demo/docker_files/tests/out/2.4.out
+++ /dev/null
@@ -1,13 +0,0 @@
-{
- "owner": "HnJm7b9Zr16hgpU5",
- "readerAcl": {
- "enabled": true,
- "users": []
- },
- "name": "Topic-28592-2",
- "description": "topic for test 2.2",
- "writerAcl": {
- "enabled": true,
- "users": []
- }
-} \ No newline at end of file
diff --git a/demo/docker_files/tests/out/2.5.out b/demo/docker_files/tests/out/2.5.out
deleted file mode 100644
index 670bf46..0000000
--- a/demo/docker_files/tests/out/2.5.out
+++ /dev/null
@@ -1,4 +0,0 @@
-{
- "serverTimeMs": 9,
- "count": 1
-} \ No newline at end of file
diff --git a/demo/docker_files/tests/out/2.6.out b/demo/docker_files/tests/out/2.6.out
deleted file mode 100644
index 0637a08..0000000
--- a/demo/docker_files/tests/out/2.6.out
+++ /dev/null
@@ -1 +0,0 @@
-[] \ No newline at end of file
diff --git a/demo/docker_files/tests/out/3.1.out b/demo/docker_files/tests/out/3.1.out
deleted file mode 100644
index d2a9b4e..0000000
--- a/demo/docker_files/tests/out/3.1.out
+++ /dev/null
@@ -1,4 +0,0 @@
-{
- "serverTimeMs": 175,
- "count": 1
-} \ No newline at end of file
diff --git a/demo/docker_files/tests/out/3.2.out b/demo/docker_files/tests/out/3.2.out
deleted file mode 100644
index 0637a08..0000000
--- a/demo/docker_files/tests/out/3.2.out
+++ /dev/null
@@ -1 +0,0 @@
-[] \ No newline at end of file
diff --git a/demo/docker_files/tests/out/3.3.out b/demo/docker_files/tests/out/3.3.out
deleted file mode 100644
index b823f1c..0000000
--- a/demo/docker_files/tests/out/3.3.out
+++ /dev/null
@@ -1,4 +0,0 @@
-{
- "serverTimeMs": 2,
- "count": 1
-} \ No newline at end of file
diff --git a/demo/docker_files/tests/out/3.4.out b/demo/docker_files/tests/out/3.4.out
deleted file mode 100644
index 9930076..0000000
--- a/demo/docker_files/tests/out/3.4.out
+++ /dev/null
@@ -1 +0,0 @@
-["{\"datestamp\":\"Thu Dec 15 19:50:40 UTC 2016\",\"value\":\"this is a test\",\"key\":\"3.3\"}"] \ No newline at end of file
diff --git a/demo/docker_files/tests/regress.sh b/demo/docker_files/tests/regress.sh
deleted file mode 100644
index 758dd7c..0000000
--- a/demo/docker_files/tests/regress.sh
+++ /dev/null
@@ -1,113 +0,0 @@
-#!/bin/ksh
-#
-# depends on jq - https://stedolan.github.io/jq/
-
-PROTOCOL=http
-FQDN=127.0.0.1
-#vm1-message-router
-#FQDN=10.208.128.229
-PORT=3904
-URL=$PROTOCOL://$FQDN:$PORT
-
-rm -f out/*
-mkdir -p out
-
-results() {
-# echo "[debug] compare $1 to $2"
- if [ $1 == $2 ]
- then
- echo -n "SUCCESS "
- else
- echo -n "FAIL ($1) "
- fi
- echo " :TEST $3 ($4)"
-}
-SUITE=0
-SUITE=$((SUITE + 1))
-echo "SUITE $SUITE: List topics"
-TN=0
-TN=$((TN + 1))
-TC=$SUITE.$TN
-expect=200
-rc=`curl -s -w "%{http_code}" -o out/$TC.out $URL/topics`
-results $rc $expect $TC "list"
-StartTopicCount=`cat out/$TC.out | wc -l`
-
-
-SUITE=$((SUITE + 1))
-echo
-echo "SUITE $SUITE: APIKEY authenticated topic"
-TOPIC=Topic-$$-$SUITE
-TN=0
-TN=$((TN + 1))
-TC=$SUITE.$TN
-OUT=out/$TC.out
-echo '{ "email": "no.email", "description": "request for direct response KEY" }' > key.req
-rc=`curl -s -w "%{http_code}" -o out/$TC.out -X POST -H "Content-Type: application/json" -d @key.req $URL/apiKeys/create`
-results $rc $expect $SUITE.$TN "gen apikey "
-TN=$((TN + 1))
-TC=$SUITE.$TN
-SECRET=$(jq ".secret" $OUT | cut -f 2 -d \")
-KEY=$(jq ".key" $OUT | cut -f 2 -d \")
-TIME=`date --iso-8601=seconds`
-SIG=$(echo -n "$TIME" | openssl sha1 -hmac $SECRET -binary | openssl base64)
-xAUTH=$KEY:$SIG
-#echo "[debug] $SECRET $KEY $TIME $SIG $xAUTH"
-DATA=data.$TC.json
-echo "{ \"topicName\": \"$TOPIC\", \"topicDescription\": \"topic for test $TC\", \"partitionCount\": \"1\", \"replicationCount\": \"1\", \"transactionEnabled\": \"true\" }" > $DATA
-rc=`curl -s -w "%{http_code}" -o out/$TC.out -X POST -H "Content-Type: application/json" -H "X-CambriaAuth: $xAUTH" -H "X-CambriaDate: $TIME" -d @$DATA $URL/topics/create`
-results $rc $expect $SUITE.$TN "create topic"
-TN=$((TN + 1))
-TC=$SUITE.$TN
-expect=200
-rc=`curl -s -w "%{http_code}" -o out/$TC.out $URL/topics`
-results $rc $expect $TC "list "
-TopicCnt=`cat out/$TC.out | wc -l`
-results $TopicCnt $((StartTopicCount + 1)) $TC "topic count"
-TN=$((TN + 1))
-TC=$SUITE.$TN
-expect=200
-rc=`curl -s -w "%{http_code}" -o out/$TC.out $URL/topics/$TOPIC`
-results $rc $expect $TC "list $TOPIC"
-TN=$((TN + 1))
-TC=$SUITE.$TN
-DATA=data.$TC.json
-echo "{ \"datestamp\": \"`date`\", \"appkey\": \"x100\", \"appval\": \"some value\" }" > $DATA
-rc=`curl -s -w "%{http_code}" -o out/$TC.out -X POST -H "Content-Type: application/json" -H "X-CambriaAuth: $xAUTH" -H "X-CambriaDate: $TIME" -d @$DATA $URL/events/$TOPIC`
-results $rc $expect $SUITE.$TN "pub APIKEY topic"
-TN=$((TN + 1))
-TC=$SUITE.$TN
-rc=`curl -s -w "%{http_code}" -o out/$TC.out -X GET -H "Content-Type: application/json" -H "X-CambriaAuth: $xAUTH" -H "X-CambriaDate: $TIME" $URL/events/$TOPIC/g0/u1`
-results $rc $expect $SUITE.$TN "sub APIKEY topic"
-
-
-SUITE=$((SUITE + 1))
-echo
-echo "SUITE $SUITE: anonymous topic"
-TOPIC=$$.$SUITE
-TN=0
-TN=$((TN + 1))
-TC=$SUITE.$TN
-DATA=data.$TC.txt
-echo "datestamp: `date`, key: $TC, value: this is a test " > $DATA
-expect=200
-rc=`curl -s -w "%{http_code}" -o out/$TC.out -X POST -H "Content-Type: text/plain" -d @$DATA $URL/events/$TOPIC`
-results $rc $expect $SUITE.$TN "pub text/plain"
-TN=$((TN + 1))
-TC=$SUITE.$TN
-expect=200
-rc=`curl -s -w "%{http_code}" -o out/$TC.out $URL/events/$TOPIC/group1/u$$?timeout=1000`
-results $rc $expect $SUITE.$TN "sub text/plain"
-TN=$((TN + 1))
-TC=$SUITE.$TN
-DATA=data.$TC.json
-echo "{ \"datestamp\": \"`date`\", \"key\": \"$TC\", \"value\": \"this is a test\" }" > $DATA
-expect=200
-rc=`curl -s -w "%{http_code}" -o out/$TC.out -X POST -H "Content-Type: application/json" -d @$DATA $URL/events/$TOPIC`
-results $rc $expect $SUITE.$TN "pub json"
-TN=$((TN + 1))
-TC=$SUITE.$TN
-expect=200
-rc=`curl -s -w "%{http_code}" -o out/$TC.out $URL/events/$TOPIC/group1/u$$?timeout=1000`
-results $rc $expect $SUITE.$TN "sub json"
-
diff --git a/demo/docker_files/tests/test.sh b/demo/docker_files/tests/test.sh
deleted file mode 100644
index 0e06d5a..0000000
--- a/demo/docker_files/tests/test.sh
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/bin/bash
-# lji: this is basically what Dom has in his regtest. re-do it in bash instead of ksh
-
-HOSTPORT="127.0.0.1:3904"
-ANONTOPIC="anon-topic-$RANDOM"
-APITOPIC="api-topic-$RANDOM"
-APIKEYFILE="/tmp/key"
-
-echo "blah" > /tmp/sample.txt
-
-if [ ! -e /usr/bin/jq ]; then
- apt-get update && apt-get -y install jq
-fi
-
-
-# list topics
-curl http://${HOSTPORT}/topics
-
-# publish to an anonymous topic (first publish creats the topic)
-curl -H "Content-Type:text/plain" -X POST -d @/tmp/sample.txt http://${HOSTPORT}/events/$ANONTOPIC
-
-# subscribe to an anonymous topic
-curl -H "Content-Type:text/plain" -X GET http://${HOSTPORT}/events/$ANONTOPIC/group1/C1?timeout=5000 &
-curl -H "Content-Type:text/plain" -X POST -d @/tmp/sample.txt http://${HOSTPORT}/events/$ANONTOPIC
-
-
-
-
-# create api key
-echo '{"email":"no email","description":"API key and secret both in reponse"}' > /tmp/input.txt
-curl -s -o ${APIKEYFILE} -H "Content-Type:application/json" -X POST -d @/tmp/input.txt http://${HOSTPORT}/apiKeys/create
-UEBAPIKEYSECRET=`cat ${APIKEYFILE} |jq -r ".secret"`
-UEBAPIKEYKEY=`cat ${APIKEYFILE} |jq -r ".key"`
-
-# create an api key secured topic
-# pay attendtion to replication count
-echo '{"topicName":"'${APITOPIC}'","topicDescription":"This is an API key securedTopic","partitionCount":"1","replicationCount":"1","transactionEnabled":"true"}' > /tmp/topicname.txt
-time=`date --iso-8601=seconds`
-signature=$(echo -n "$time" | openssl sha1 -hmac $UEBAPIKEYSECRET -binary | openssl base64)
-xAuth=$UEBAPIKEYKEY:$signature
-xDate="$time"
-curl -i -H "Content-Type: application/json" -H "X-CambriaAuth:$xAuth" -H "X-CambriaDate:$xDate" -X POST -d @/tmp/topicname.txt http://${HOSTPORT}/topics/create
-
-# first subscribe and run it in bg. then publish.
-time=`date --iso-8601=seconds`
-signature=$(echo -n "$time" | openssl sha1 -hmac $UEBAPIKEYSECRET -binary | openssl base64)
-xAuth=$UEBAPIKEYKEY:$signature
-xDate="$time"
-curl -H "X-CambriaAuth:$xAuth" -H "X-CambriaDate:$xDate" -X GET http://${HOSTPORT}/events/${APITOPIC}/g0/u1 &
-curl -H "Content-Type:text/plain" -H "X-CambriaAuth:$xAuth" -H "X-CambriaDate:$xDate" -X POST -d @/tmp/sample.txt http://${HOSTPORT}/events/${APITOPIC}
diff --git a/demo/docker_files/uebapikey-sdc b/demo/docker_files/uebapikey-sdc
deleted file mode 100644
index 0b3aa80..0000000
--- a/demo/docker_files/uebapikey-sdc
+++ /dev/null
@@ -1,8 +0,0 @@
-{
- "aux": {
- "description": "API key for SDC",
- "email": "no email"
- },
- "secret": "KpMJB28vNduEJ0zHDWOQXBmQ",
- "key": "779NflzwmkuKpqef"
-} \ No newline at end of file
diff --git a/etc/logstash_cambria_template.conf b/etc/logstash_cambria_template.conf
deleted file mode 100644
index 2ddb7d5..0000000
--- a/etc/logstash_cambria_template.conf
+++ /dev/null
@@ -1,36 +0,0 @@
-input {
- file {
- path => "${CAMBRIA_SERVER_LOG}"
- codec => multiline {
- pattern => "^\[%{YEAR}-%{MONTH}-%{MONTHDAY}%{SPACE}%{HOUR}:%{MINUTE}:%{SECOND}\]"
- negate => "true"
- what => "previous"
- }
- sincedb_path => "/opt/app/logstash/conf_stage/.sincedb_cambria"
- start_position => "beginning"
- type => "cambria"
- }
-}
-
-filter {
- if [type] == "cambria" {
- grok {
- match => ["message", "\[(?<date>%{YEAR}-%{MONTH}-%{MONTHDAY}%{SPACE}%{HOUR}:%{MINUTE}:%{SECOND})\]\[%{DATA:logLevel}\]\[%{DATA:thread}\]\[%{DATA:class}\]\[%{DATA:id}\]%{GREEDYDATA:message}"]
- }
-
- date {
- match => ["date", "YYYY-MMM-DD HH:mm:ss,SSS"]
- }
- }
-}
-
-output {
- if [type] == "cambria" {
- elasticsearch {
- cluster => "2020SA"
- host => "${ELASTICSEARCH_NODES}"
- index => "cambria-%{+YYYY.MM.dd}"
- }
- }
-}
-
diff --git a/oom-topics/data-kafka/kafka-logs/.kafka_cleanshutdown b/oom-topics/data-kafka/kafka-logs/.kafka_cleanshutdown
deleted file mode 100644
index e69de29..0000000
--- a/oom-topics/data-kafka/kafka-logs/.kafka_cleanshutdown
+++ /dev/null
diff --git a/oom-topics/data-kafka/kafka-logs/.lock b/oom-topics/data-kafka/kafka-logs/.lock
deleted file mode 100644
index e69de29..0000000
--- a/oom-topics/data-kafka/kafka-logs/.lock
+++ /dev/null
diff --git a/oom-topics/data-kafka/kafka-logs/APPC-TEST1-0/00000000000000000000.index b/oom-topics/data-kafka/kafka-logs/APPC-TEST1-0/00000000000000000000.index
deleted file mode 100644
index e69de29..0000000
--- a/oom-topics/data-kafka/kafka-logs/APPC-TEST1-0/00000000000000000000.index
+++ /dev/null
diff --git a/oom-topics/data-kafka/kafka-logs/APPC-TEST1-0/00000000000000000000.log b/oom-topics/data-kafka/kafka-logs/APPC-TEST1-0/00000000000000000000.log
deleted file mode 100644
index e69de29..0000000
--- a/oom-topics/data-kafka/kafka-logs/APPC-TEST1-0/00000000000000000000.log
+++ /dev/null
diff --git a/oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-INBOX-0/00000000000000000000.index b/oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-INBOX-0/00000000000000000000.index
deleted file mode 100644
index e69de29..0000000
--- a/oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-INBOX-0/00000000000000000000.index
+++ /dev/null
diff --git a/oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-INBOX-0/00000000000000000000.log b/oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-INBOX-0/00000000000000000000.log
deleted file mode 100644
index e69de29..0000000
--- a/oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-INBOX-0/00000000000000000000.log
+++ /dev/null
diff --git a/oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-APP1-0/00000000000000000000.index b/oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-APP1-0/00000000000000000000.index
deleted file mode 100644
index e69de29..0000000
--- a/oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-APP1-0/00000000000000000000.index
+++ /dev/null
diff --git a/oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-APP1-0/00000000000000000000.log b/oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-APP1-0/00000000000000000000.log
deleted file mode 100644
index e69de29..0000000
--- a/oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-APP1-0/00000000000000000000.log
+++ /dev/null
diff --git a/oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-DBC1-0/00000000000000000000.index b/oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-DBC1-0/00000000000000000000.index
deleted file mode 100644
index e69de29..0000000
--- a/oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-DBC1-0/00000000000000000000.index
+++ /dev/null
diff --git a/oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-DBC1-0/00000000000000000000.log b/oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-DBC1-0/00000000000000000000.log
deleted file mode 100644
index e69de29..0000000
--- a/oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-DBC1-0/00000000000000000000.log
+++ /dev/null
diff --git a/oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-POL1-0/00000000000000000000.index b/oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-POL1-0/00000000000000000000.index
deleted file mode 100644
index e69de29..0000000
--- a/oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-POL1-0/00000000000000000000.index
+++ /dev/null
diff --git a/oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-POL1-0/00000000000000000000.log b/oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-POL1-0/00000000000000000000.log
deleted file mode 100644
index e69de29..0000000
--- a/oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-POL1-0/00000000000000000000.log
+++ /dev/null
diff --git a/oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-SDC1-0/00000000000000000000.index b/oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-SDC1-0/00000000000000000000.index
deleted file mode 100644
index e69de29..0000000
--- a/oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-SDC1-0/00000000000000000000.index
+++ /dev/null
diff --git a/oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-SDC1-0/00000000000000000000.log b/oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-SDC1-0/00000000000000000000.log
deleted file mode 100644
index e69de29..0000000
--- a/oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-SDC1-0/00000000000000000000.log
+++ /dev/null
diff --git a/oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-VID1-0/00000000000000000000.index b/oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-VID1-0/00000000000000000000.index
deleted file mode 100644
index e69de29..0000000
--- a/oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-VID1-0/00000000000000000000.index
+++ /dev/null
diff --git a/oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-VID1-0/00000000000000000000.log b/oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-VID1-0/00000000000000000000.log
deleted file mode 100644
index e69de29..0000000
--- a/oom-topics/data-kafka/kafka-logs/ECOMP-PORTAL-OUTBOX-VID1-0/00000000000000000000.log
+++ /dev/null
diff --git a/oom-topics/data-kafka/kafka-logs/recovery-point-offset-checkpoint b/oom-topics/data-kafka/kafka-logs/recovery-point-offset-checkpoint
deleted file mode 100644
index e845fda..0000000
--- a/oom-topics/data-kafka/kafka-logs/recovery-point-offset-checkpoint
+++ /dev/null
@@ -1,9 +0,0 @@
-0
-7
-ECOMP-PORTAL-OUTBOX-VID1 0 0
-APPC-TEST1 0 0
-ECOMP-PORTAL-INBOX 0 0
-ECOMP-PORTAL-OUTBOX-APP1 0 0
-ECOMP-PORTAL-OUTBOX-SDC1 0 0
-ECOMP-PORTAL-OUTBOX-DBC1 0 0
-ECOMP-PORTAL-OUTBOX-POL1 0 0
diff --git a/oom-topics/data-kafka/kafka-logs/replication-offset-checkpoint b/oom-topics/data-kafka/kafka-logs/replication-offset-checkpoint
deleted file mode 100644
index e845fda..0000000
--- a/oom-topics/data-kafka/kafka-logs/replication-offset-checkpoint
+++ /dev/null
@@ -1,9 +0,0 @@
-0
-7
-ECOMP-PORTAL-OUTBOX-VID1 0 0
-APPC-TEST1 0 0
-ECOMP-PORTAL-INBOX 0 0
-ECOMP-PORTAL-OUTBOX-APP1 0 0
-ECOMP-PORTAL-OUTBOX-SDC1 0 0
-ECOMP-PORTAL-OUTBOX-DBC1 0 0
-ECOMP-PORTAL-OUTBOX-POL1 0 0
diff --git a/oom-topics/data-zookeeper/version-2/log.01 b/oom-topics/data-zookeeper/version-2/log.01
deleted file mode 100644
index f3cb136..0000000
--- a/oom-topics/data-zookeeper/version-2/log.01
+++ /dev/null
Binary files differ
diff --git a/oom-topics/data-zookeeper/version-2/log.0103 b/oom-topics/data-zookeeper/version-2/log.0103
deleted file mode 100644
index 9b648e2..0000000
--- a/oom-topics/data-zookeeper/version-2/log.0103
+++ /dev/null
Binary files differ
diff --git a/oom-topics/data-zookeeper/version-2/log.0125 b/oom-topics/data-zookeeper/version-2/log.0125
deleted file mode 100644
index 0613642..0000000
--- a/oom-topics/data-zookeeper/version-2/log.0125
+++ /dev/null
Binary files differ
diff --git a/oom-topics/data-zookeeper/version-2/log.0175 b/oom-topics/data-zookeeper/version-2/log.0175
deleted file mode 100644
index b0e5e32..0000000
--- a/oom-topics/data-zookeeper/version-2/log.0175
+++ /dev/null
Binary files differ
diff --git a/oom-topics/data-zookeeper/version-2/log.01b1 b/oom-topics/data-zookeeper/version-2/log.01b1
deleted file mode 100644
index bf1d130..0000000
--- a/oom-topics/data-zookeeper/version-2/log.01b1
+++ /dev/null
Binary files differ
diff --git a/oom-topics/data-zookeeper/version-2/log.100000001 b/oom-topics/data-zookeeper/version-2/log.100000001
deleted file mode 100644
index 714241e..0000000
--- a/oom-topics/data-zookeeper/version-2/log.100000001
+++ /dev/null
Binary files differ
diff --git a/oom-topics/data-zookeeper/version-2/log.200000001 b/oom-topics/data-zookeeper/version-2/log.200000001
deleted file mode 100644
index 273ecce..0000000
--- a/oom-topics/data-zookeeper/version-2/log.200000001
+++ /dev/null
Binary files differ
diff --git a/pom.xml b/pom.xml
index 4eb9642..176bddd 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1,13 +1,15 @@
-<!-- ============LICENSE_START=======================================================
- org.onap.dmaap ================================================================================
- Copyright © 2017 AT&T Intellectual Property. All rights reserved. ================================================================================
+<!-- ============LICENSE_START====================================================
+ org.onap.dmaap
+ ================================================================================
+ Copyright © 2017 AT&T Intellectual Property. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not
use this file except in compliance with the License. You may obtain a copy
of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required
by applicable law or agreed to in writing, software distributed under the
License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
OF ANY KIND, either express or implied. See the License for the specific
- language governing permissions and limitations under the License. ============LICENSE_END=========================================================
+ language governing permissions and limitations under the License.
+ ============LICENSE_END=========================================================
ECOMP is a trademark and service mark of AT&T Intellectual Property. -->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
@@ -29,7 +31,6 @@
<name>Apache License Version 2.0</name>
</license>
</licenses>
-
<developers>
<developer>
<name>Rajashree</name>
@@ -44,18 +45,15 @@
<organizationUrl>www.att.com</organizationUrl>
</developer>
</developers>
-
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<dockerLocation>${basedir}/target/swm/package/nix/dist_files/</dockerLocation>
<docker.image.prefix>simpledemo</docker.image.prefix>
- <!-- <javax-mail-version>1.5.0</javax-mail-version> -->
<module.ajsc.namespace.name>dmaap</module.ajsc.namespace.name>
<module.ajsc.namespace.version>v1</module.ajsc.namespace.version>
<ajscRuntimeVersion>3.0.11-oss</ajscRuntimeVersion>
- <!-- This will be the Absolute Root of the Project and should contain NO
- Versioning -->
+ <!-- This will be the Absolute Root of the Project and should contain NO Versioning -->
<absoluteDistFilesRoot>/appl/${project.artifactId}</absoluteDistFilesRoot>
<!-- <absoluteDistFilesRoot>/opt/app/dmaap/${project.artifactId}</absoluteDistFilesRoot> -->
<!-- For Versioning upon installation, add /${project.version} to distFilesRoot.
@@ -65,23 +63,9 @@
<distFilesRoot>/appl/${project.artifactId}</distFilesRoot>
<!-- <distFilesRoot>/opt/app/dmaap/${project.artifactId}</distFilesRoot> -->
<runAjscHome>${basedir}/target/swm/package/nix/dist_files${distFilesRoot}</runAjscHome>
-
- <!-- For SOA Cloud Installation <installOwnerUser>aft</installOwnerUser>
- <installOwnerGroup>aft</installOwnerGroup> <ownerManagementGroup>com.att.acsi.saat.dt.dmaap.dev</ownerManagementGroup> -->
- <!-- For SOA Cloud Installation -->
- <installOwnerUser>msgrtr</installOwnerUser>
- <installOwnerGroup>dmaap</installOwnerGroup>
- <ownerManagementGroup>com.att.acsi.saat.dt.dmaap.dev</ownerManagementGroup>
- <!-- Port Selection. A value of 0 will allow for dynamic port selection.
- For local testing, you may choose to hardcode this value to something like
- 8080 -->
<serverPort>3904</serverPort>
<sslport>3905</sslport>
<onap.nexus.url>https://nexus.onap.org</onap.nexus.url>
- <testRouteOffer>workstation</testRouteOffer>
- <testEnv>DEV</testEnv>
- <!-- <dmaapImg>${project.version}</dmaapImg> -->
- <timestamp>${maven.build.timestamp}</timestamp>
<maven.build.timestamp.format>yyyyMMdd'T'HHmmss'Z'</maven.build.timestamp.format>
<camel.version>2.21.5</camel.version>
<camel.version.latest>3.5.0</camel.version.latest>
@@ -89,8 +73,8 @@
<skip.docker.build>false</skip.docker.build>
<skip.docker.tag>false</skip.docker.tag>
<skip.docker.push>false</skip.docker.push>
- <nexusproxy>https://nexus.onap.org</nexusproxy>
<docker.push.registry>nexus3.onap.org:10003</docker.push.registry>
+ <nexusproxy>https://nexus.onap.org</nexusproxy>
<spring.version>3.2.18.RELEASE</spring.version>
<sonar.language>java</sonar.language>
<sonar.skip>false</sonar.skip>
@@ -216,17 +200,16 @@
at ajsc-Support <DL-ajsc-Support@att.com>. For more information regarding
the usage of the AJSC service pom.xml and management of dependencies, -->
<dependencies>
-
- <!-- cmn-CommonDataModel dependency added to resolve build issue not finding
- version 100.0.64 -->
-
- <!-- <dependency> <groupId>csi-schemas-source</groupId> <artifactId>cmn-CommonDataModel</artifactId>
- <version>112.0.50</version> </dependency> -->
- <!-- TODO: add open source version here -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-actuator</artifactId>
<version>2.4.0</version>
+ <exclusions>
+ <exclusion>
+ <artifactId>logback-classic</artifactId>
+ <groupId>ch.qos.logback</groupId>
+ </exclusion>
+ </exclusions>
</dependency>
<!-- Micormeter core dependecy -->
<dependency>
@@ -312,11 +295,6 @@
<artifactId>zkclient</artifactId>
<version>0.11</version>
</dependency>
- <dependency>
- <groupId>org.slf4j</groupId>
- <artifactId>slf4j-api</artifactId>
- <version>1.6.1</version>
- </dependency>
<!-- Log4j2 logger -->
<dependency>
<groupId>org.apache.logging.log4j</groupId>
@@ -331,7 +309,7 @@
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
- <version>1.6.1</version>
+ <version>1.7.32</version>
<exclusions>
<exclusion>
<groupId>*</groupId>
@@ -585,6 +563,10 @@
<artifactId>jline</artifactId>
<groupId>jline</groupId>
</exclusion>
+ <exclusion>
+ <artifactId>testng</artifactId>
+ <groupId>org.testng</groupId>
+ </exclusion>
</exclusions>
</dependency>
<dependency>
@@ -741,12 +723,29 @@
<version>3.0.11-oss</version>
<scope>provided</scope>
</dependency>
- <dependency>
- <groupId>org.apache.kafka</groupId>
- <artifactId>kafka_2.12</artifactId>
- <version>2.7.0</version>
+<!-- <dependency>-->
+<!-- <groupId>org.apache.kafka</groupId>-->
+<!-- <artifactId>kafka_2.12</artifactId>-->
+<!-- <version>2.8.0</version>-->
+<!-- <scope>test</scope>-->
+<!-- <exclusions>-->
+<!-- <exclusion>-->
+<!-- <artifactId>metrics-core</artifactId>-->
+<!-- <groupId>com.yammer.metrics</groupId>-->
+<!-- </exclusion>-->
+<!-- </exclusions>-->
+<!-- </dependency>-->
+ <dependency>
+ <groupId>io.dropwizard.metrics</groupId>
+ <artifactId>metrics-core</artifactId>
+ <version>4.2.3</version>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-api</artifactId>
+ <version>1.7.32</version>
+ </dependency>
</dependencies>
<profiles>
<!-- Use this profile to run the AJSC locally. This profile can be successfully
@@ -833,173 +832,6 @@
</plugins>
</build>
</profile>
-
- <profile>
- <id>runLocal</id>
- <build>
- <defaultGoal>initialize</defaultGoal>
- <plugins>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-antrun-plugin</artifactId>
- <executions>
- <execution>
- <phase>initialize</phase>
- <configuration>
- <target name="runLocal">
- <ant antfile="${basedir}/antBuild/build.xml" target="runLocal" />
- </target>
- </configuration>
- <goals>
- <goal>run</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- </plugins>
- </build>
- </profile>
-
- <profile>
- <id>runAjsc</id>
- <build>
- <defaultGoal>initialize</defaultGoal>
- <plugins>
- <plugin>
- <groupId>org.codehaus.mojo</groupId>
- <artifactId>exec-maven-plugin</artifactId>
- <version>1.3.2</version>
- <executions>
- <execution>
- <phase>initialize</phase>
- <goals>
- <goal>java</goal>
- </goals>
- <configuration>
- <!-- In order to better mimic a SOA cloud installation of AJSC (and
- to help eliminate Maven/Eclipse/AJSC classpath issues that may be difficult
- to diagnose), within this profile used to run locally, we are NOT including
- project dependencies. These will be loaded by AJSC from $AJSC_HOME/extJars.
- The only jar needed to run AJSC is the ajsc-runner.jar, and therefore is
- the only dependency required by this profile to run locally. -->
- <includeProjectDependencies>false</includeProjectDependencies>
- <includePluginDependencies>true</includePluginDependencies>
- <executable>java</executable>
- <mainClass>com.att.ajsc.runner.Runner</mainClass>
- <executableDependency>
- <groupId>com.att.ajsc</groupId>
- <artifactId>ajsc-runner</artifactId>
- </executableDependency>
- <additionalClasspathElements>
- <additionalClasspathElement>${basedir}/ajsc-shared-config/etc</additionalClasspathElement>
- </additionalClasspathElements>
-
- <environmentVariables>
- <AJSC_HOME>${runAjscHome}</AJSC_HOME>
- </environmentVariables>
-
- <!-- Main AJSC System Properties below (necessary for proper startup) -->
- <systemProperties>
- <systemProperty>
- <key>AJSC_HOME</key>
- <value>${runAjscHome}</value>
- </systemProperty>
- <systemProperty>
- <key>AJSC_HOME</key>
- <value>${runAjscHome}</value>
- </systemProperty>
- <systemProperty>
- <key>AJSC_CONF_HOME</key>
- <value>${basedir}/bundleconfig-local</value>
- </systemProperty>
- <systemProperty>
- <key>logback.configurationFile</key>
- <value>${basedir}/ajsc-shared-config/etc/logback.xml</value>
- </systemProperty>
- <systemProperty>
- <key>com.att.eelf.logging.file</key>
- <value>logback.xml</value>
- </systemProperty>
- <systemProperty>
- <key>com.att.eelf.logging.path</key>
- <value>${basedir}/ajsc-shared-config/etc</value>
- </systemProperty>
- <systemProperty>
- <key>logging.config</key>
- <value>${basedir}/ajsc-shared-config/etc/logback.xml</value>
- </systemProperty>
- <systemProperty>
- <key>AJSC_SHARED_CONFIG</key>
- <value>${basedir}/ajsc-shared-config</value>
- </systemProperty>
-
- <!-- Please, NOTE: The following 2 system properties will normally
- be set within the sys-props.properties file once deployed to a node. We are
- setting them HERE to run locally to make more efficient use of maven variable
- replacement for ${basedir} -->
- <!-- AJSC_EXTERNAL_LIB_FOLDERS represents the particular jars that
- will be externalized on a CSI node. This includes dme2 and csm related artifact. -->
- <sysproperty>
- <key>AJSC_EXTERNAL_LIB_FOLDERS</key>
- <value>${basedir}/target/commonLibs</value>
- </sysproperty>
- <!-- AJSC_EXTERNAL_PROPERTIES_FOLDERS represents the particular
- files that may need to be added to the classpath. These files will be externalized
- on a CSI node. This includes dme2 and csm related artifact (such as csm-config-app.properties).
- Failure to have these files on the classpath may result in errors thrown
- by csm framework. -->
- <sysproperty>
- <key>AJSC_EXTERNAL_PROPERTIES_FOLDERS</key>
- <value>${basedir}/ajsc-shared-config/etc</value>
- </sysproperty>
-
- <systemProperty>
- <key>AJSC_SERVICE_NAMESPACE</key>
- <value>${module.ajsc.namespace.name}</value>
- </systemProperty>
- <systemProperty>
- <key>AJSC_SERVICE_VERSION</key>
- <value>${module.ajsc.namespace.version}</value>
- </systemProperty>
- <systemProperty>
- <key>SOACLOUD_SERVICE_VERSION</key>
- <value>${project.version}</value>
- </systemProperty>
- <systemProperty>
- <key>server.port</key>
- <value>${serverPort}</value>
- </systemProperty>
- </systemProperties>
-
- <!-- Command Line Arguments to add to the java command. Here, you
- can specify the port as well as the Context you want your service to run
- in. Use context=/ to run in an unnamed Context (Root Context). The default
- configuration of the AJSC is to run under the / Context. Setting the port
- here can aid during the development phase of your service. However, you can
- leave this argument out entirely, and the AJSC will default to using an Ephemeral
- port. -->
- <arguments>
- <argument>context=/</argument>
- <argument>port=${serverPort}</argument>
- <argument>sslport=${sslport}</argument>
- </arguments>
- </configuration>
- </execution>
- </executions>
- <configuration>
- <executable>java</executable>
- </configuration>
- <dependencies>
- <dependency>
- <groupId>com.att.ajsc</groupId>
- <artifactId>ajsc-runner</artifactId>
- <version>${ajscRuntimeVersion}</version>
- </dependency>
- </dependencies>
- </plugin>
- </plugins>
- </build>
- </profile>
<profile>
<id>standardbuild</id>
<activation>
@@ -1292,19 +1124,6 @@
</configuration>
</execution>
<execution>
- <id>copy-dependencies-ajsc-arch-parent-commonLibs</id>
- <phase>prepare-package</phase>
- <goals>
- <goal>copy-dependencies</goal>
- </goals>
- <configuration>
- <includeScope>provided</includeScope>
- <includeGroupIds>net.cingular.enterprise,com.att.aft,dom4j</includeGroupIds>
- <outputDirectory>${project.build.directory}/commonLibs</outputDirectory>
- <silent>true</silent>
- </configuration>
- </execution>
- <execution>
<id>copyrunner</id>
<phase>prepare-package</phase>
<goals>
@@ -1337,21 +1156,6 @@
</configuration>
</execution>
<execution>
- <id>copyscript</id>
- <phase>package</phase>
- <goals>
- <goal>copy-resources</goal>
- </goals>
- <configuration>
- <outputDirectory>${basedir}/target/swm/package/nix/dist_files${distFilesRoot}/bin</outputDirectory>
- <resources>
- <resource>
- <directory>src/main/scripts</directory>
- </resource>
- </resources>
- </configuration>
- </execution>
- <execution>
<id>copy-docker-file</id>
<phase>prepare-package</phase>
<goals>
@@ -1373,66 +1177,66 @@
</execution>
</executions>
</plugin>
- <plugin>
- <groupId>com.google.code.maven-replacer-plugin</groupId>
- <artifactId>replacer</artifactId>
- <version>1.5.2</version>
- <executions>
- <execution>
- <id>google-replacer-ajsc-arch-parent</id>
- <phase>prepare-package</phase>
- <goals>
- <goal>replace</goal>
- </goals>
- <configuration>
- <basedir>${project.build.directory}</basedir>
- <includes>
- <include>staticContent/**/*.html</include>
- <include>CDP/**</include>
- <include>bundleconfig/**</include>
- <include>versioned-runtime/**/*.context</include>
- <include>versioned-runtime/**/*.json</include>
- <include>versioned-ajsc/**/*.route</include>
- <include>versioned-ajsc/**/*.groovy</include>
- <include>versioned-ajsc/**/*.xml</include>
- <include>classes/*.properties</include>
- <include>swm/package/nix/dist_files${distFilesRoot}/etc/*.properties</include>
- </includes>
- <escape>true</escape>
- <replacements>
- <replacement>
- <token>__module_ajsc_namespace_name__</token>
- <value>${module.ajsc.namespace.name}</value>
- </replacement>
- <replacement>
- <token>__module_ajsc_namespace_version__</token>
- <value>${module.ajsc.namespace.version}</value>
- </replacement>
- <replacement>
- <token>__ajscHome__</token>
- <value>${replaceWinFilePath}</value>
- </replacement>
- <replacement>
- <token>__projectGroupId__</token>
- <value>${project.groupId}</value>
- </replacement>
- <replacement>
- <token>__projectArtifactId__</token>
- <value>${project.artifactId}</value>
- </replacement>
- <replacement>
- <token>__projectVersion__</token>
- <value>${project.version}</value>
- </replacement>
- <replacement>
- <token>__basedir__</token>
- <value>${replaceWinBasedirPath}</value>
- </replacement>
- </replacements>
- </configuration>
- </execution>
- </executions>
- </plugin>
+<!-- <plugin>-->
+<!-- <groupId>com.google.code.maven-replacer-plugin</groupId>-->
+<!-- <artifactId>replacer</artifactId>-->
+<!-- <version>1.5.2</version>-->
+<!-- <executions>-->
+<!-- <execution>-->
+<!-- <id>google-replacer-ajsc-arch-parent</id>-->
+<!-- <phase>prepare-package</phase>-->
+<!-- <goals>-->
+<!-- <goal>replace</goal>-->
+<!-- </goals>-->
+<!-- <configuration>-->
+<!-- <basedir>${project.build.directory}</basedir>-->
+<!-- <includes>-->
+<!-- <include>staticContent/**/*.html</include>-->
+<!-- <include>CDP/**</include>-->
+<!-- <include>bundleconfig/**</include>-->
+<!-- <include>versioned-runtime/**/*.context</include>-->
+<!-- <include>versioned-runtime/**/*.json</include>-->
+<!-- <include>versioned-ajsc/**/*.route</include>-->
+<!-- <include>versioned-ajsc/**/*.groovy</include>-->
+<!-- <include>versioned-ajsc/**/*.xml</include>-->
+<!-- <include>classes/*.properties</include>-->
+<!-- <include>swm/package/nix/dist_files${distFilesRoot}/etc/*.properties</include>-->
+<!-- </includes>-->
+<!-- <escape>true</escape>-->
+<!-- <replacements>-->
+<!-- <replacement>-->
+<!-- <token>__module_ajsc_namespace_name__</token>-->
+<!-- <value>${module.ajsc.namespace.name}</value>-->
+<!-- </replacement>-->
+<!-- <replacement>-->
+<!-- <token>__module_ajsc_namespace_version__</token>-->
+<!-- <value>${module.ajsc.namespace.version}</value>-->
+<!-- </replacement>-->
+<!-- <replacement>-->
+<!-- <token>__ajscHome__</token>-->
+<!-- <value>${replaceWinFilePath}</value>-->
+<!-- </replacement>-->
+<!-- <replacement>-->
+<!-- <token>__projectGroupId__</token>-->
+<!-- <value>${project.groupId}</value>-->
+<!-- </replacement>-->
+<!-- <replacement>-->
+<!-- <token>__projectArtifactId__</token>-->
+<!-- <value>${project.artifactId}</value>-->
+<!-- </replacement>-->
+<!-- <replacement>-->
+<!-- <token>__projectVersion__</token>-->
+<!-- <value>${project.version}</value>-->
+<!-- </replacement>-->
+<!-- <replacement>-->
+<!-- <token>__basedir__</token>-->
+<!-- <value>${replaceWinBasedirPath}</value>-->
+<!-- </replacement>-->
+<!-- </replacements>-->
+<!-- </configuration>-->
+<!-- </execution>-->
+<!-- </executions>-->
+<!-- </plugin>-->
</plugins>
<pluginManagement>
<plugins>
@@ -1444,6 +1248,9 @@
<excludes>
<exclude>**/AbstractTest.java</exclude>
</excludes>
+ <argLine>
+ --illegal-access=permit
+ </argLine>
</configuration>
</plugin>
</plugins>
diff --git a/services/README.txt b/services/README.txt
deleted file mode 100644
index c20898d..0000000
--- a/services/README.txt
+++ /dev/null
@@ -1,8 +0,0 @@
-Place any Deployment Packages (zips) in this folder to be deployed with your service.
-This can be used for importing older Nimbus deployment packages to the AJSC. Not all
-prior Nimbus services are available and therefore, not ALL former Nimbus deployment
-packages will convert directly to AJSC. However, for Service Development, you may create
-other Services (Deployment Packages) as separate services and test by themselves. Then,
-simply take the created zip (deployment package) of the service and place in THIS services folder for it
-to be deployed within this AJSC Container. This folder will be copied to the ultimate AJSC_HOME/services
-folder from which all services are deployed. \ No newline at end of file
diff --git a/src/main/config/ajsc-chef.jks b/src/main/config/ajsc-chef.jks
deleted file mode 100644
index 69e5671..0000000
--- a/src/main/config/ajsc-chef.jks
+++ /dev/null
Binary files differ
diff --git a/src/main/config/ajscJetty.jks b/src/main/config/ajscJetty.jks
deleted file mode 100644
index 8fb6328..0000000
--- a/src/main/config/ajscJetty.jks
+++ /dev/null
Binary files differ
diff --git a/src/main/config/keyfile b/src/main/config/keyfile
deleted file mode 100644
index f086f84..0000000
--- a/src/main/config/keyfile
+++ /dev/null
@@ -1,27 +0,0 @@
-I2AQRRjEe4nCZvHUxYeJorPx8cli8KdeCVGl_N5kOSX5R_rI3eKNJDGzpGu3y4jLAxVerSfrnxDp
-tyTUlBEUhTIfpgIcMutY3vZZysMUz8no7tjBe2fWupW72b7d3IK-gCOlb8kla0R7ZchwS3tOZ36j
-p9h1MHcVOTUFiAzwNsgHHOgSPPAA800btWm0Fkrik4NZns_RXCnWW5Mlt9-UEeCT02Cyauf9rhuI
-2edZbR0pb9VXZohCo70qcrg7UJXFl-SwMsqNMXCMnxIy0X_CM_gHwU29whPUGBjekTWqZTauEEZi
-CHrB4uizYOLjB1rH6bOriXsxey5hGx9oCt-6Ixhunj1VEOg_WSiwmpJCtamcb6fL6GcTNOkDcPpg
-PQgaviBOOn0aL6WHzeZl2O1FjeNwOQp0DNvuUx8K4aNVDP3-ZLourwMQaLADIuux4ZRHRZnMYTGZ
-zkUWMI8rge5l7CaqMN8BIZlYJZk5I6DM4ct8uA8UVKzJtf6zS71uCSMV_nzW2-qJQ_vO-E-vJwBO
-bvee6Rq46BE6q3oLDCH3e687QryDxoB7maKoycZSTeQvWQDt3VpmLXoQjHoDFXwsXA3BnIBWa522
-MBhGZC3xD8z4JXbM3rLmpPVeTFQJBR1Rkn0TNj1baa0av3PySZFUAF9x3PkIYqC6dl-jY4HrTUlg
-41NuhCYv-JSL2To7SFkPzK5hXEQuWmuDxXJkQso2-P8_qwUWEDXs6aDvwR3CmNCyN8n6IKkJF167
-1OHZ4QvVbI1XdUUAG3_2ZSENDGlItjjlwCfMeyjxH9SOJhWNm26YDH3lQ008A67jkoiooUgTudKF
-5aE3msIz0j57icW1HOevRN2qE1KmlHPD45cXMJND8G6vl7ZLR9m96SkLuE_QslVtv_Ue2ONEvmfF
-AhrERhhlRzLNCy6vpEvnkjhp1ffF0tkABGL2CuoJFagVCHDfL8Ki7wUEb1z8kmNk7Wlb6eHM5W2c
-__BtydiXioTNjbSzBJVjJTYRk5wU8TLPm6IIO2WL4uXiYM-11w_gAW-R9YNoeAe3o0VwFRHlRsUF
-IOv7sqOQ6ue2J5kcK9XHpmn8sQqrd2npKCpKL3hveLh6EL8NzE4clMvr87QakRKRpwsDXG4Vjv_F
-SvJy5RHawjFiFSi6cdBefZNThBqSpGg_5HVjUrmcioBTGoMBeJw2XJEE-ghhvImfPd9ypqZCU_SG
-tdDWbay7SPigQASXtjy1lYR7yG9-ibv00uu2TTU-wSWm3iu1_cA67LmsfiTjOWYFAS9agBX-Zkb5
-q555ysrQwd4FPZByxosLbB-31CIDQbnce2SiZjswlTTgY8rSStYyJ39vWA-G5ogUzuUKNiJcNerY
-5ii4YWPL-5OuVSPN2VwBlr6Af2iZpuGhCxknKkOa88a4rTgGKtvWb6b7FTrJKR_LH36RNThuWF5d
-ar8bLXc0m_0WdmvHxuTHWRkFgGB05sZYarZUk8vPF32YDtrqsd4ZV_rmSfPBVfqHItXPOnLxG7Fv
-hZ0mavL5-3jnrn_dlODgqGEolVvwHelNaukeU4_ZKa84uGG2QV7epWHflcTxoD7GoURihY4ATQgt
-eNi7Q-4kHiwObG6PMhzhVlBBoQArR4zICyC9nXUrVIt_MHtDWqbLZUpGQISvV_jnOsJ3wkcWxNoi
-jZGe9H-UddfWV_sxmRgHamsNHF9Ee_bwOU6sY48MPv8yO6aHCoPhuE4eh3DCsd5cU44nc3isqrGD
-hyYJmz1BD6IH4QlQO0daMCY8mkuF1zV5OsdropHvjaN7DSuplpJy2uMLtvNUJJxRXh8vD4GF68aL
-9A1d7fNcfYQxP0Lux0LjaPUERAF6xaePbO0Qop2oiO77qRs6qsDFGdgosUfXa9JrBtwUAU5Q9O4R
-5MvAeqxJ8fux6HJMOl9DOfMPC58Uxzi3C0ERKcaORTBVQJGauzySZ6Dwk9JOlZ0CXEGbXZeYThYe
-bR2YrN3xoWjrgB4gEtqHLhNCfLvnZtMtoSw4CjynRHXlIayBUMBtrhJ4TjpdTvba9PVNuleT \ No newline at end of file
diff --git a/src/main/config/keyfile1 b/src/main/config/keyfile1
deleted file mode 100644
index 20d6254..0000000
--- a/src/main/config/keyfile1
+++ /dev/null
@@ -1,27 +0,0 @@
-Y9xUekNIvxqllfhTgQy3Jnw99wr32caQRQ5ZF1q3jI7jH0AWHxjeIf_5LvFNGBtxRFWtPvA69G5t
-h8Pm6dwh8Fh8jqIzlyr0gY7H4tMWhOGLG20SFE5HgsnlkGS1obAZ30EJIDmmAqP0R4EpdnRWDA_S
-BIAWYqiRlI23aqtRcIP321W9f6dHfyU_9MNMi3ZdMqg6bH5MQXpNw6ZPqs089QVa8Fhmna1dZa2o
-96Al7aN7Sxc3J_AP0zfwgC4JtvDG0Ie-F-SYX3EJ-WACcdCpmxIIwgfCE9P02vapzQ5l6tAFlr00
-A3hTFOvR6EDw_UzNR3mQflKIS5mAlxt0d7lFubRSjHxf9Oal5137cAb4I28Lyitqrp8L6rnkMdr1
-sJSR0fHTvawBoGFfdRIUzKLAbXWF7RYD807k18fhhq3SIKi0ZdHVLnwqsbjvMCEQD9dlPvfV0U95
-lGxzlYtWtmn_BplHgeZAJ-0F2__5I9fcSiRFHL5C5-30Seax25ydj4JaIVAwss77z7e0oKef1W2W
-NA0jp1Pyhm-G0UVrT-MG0d_PX3IO2sYNwLYRsgz35_UgOnxJdev7pl6VAhPrfeWui4wXwt76Fyto
-VmM6ENkHAEsYe0UzI9zIANz2IVJUD8HQJAFLDdh_bC6ME4qcUcOWrhSGlzQ8NZnXDnPqtcOA1UvH
-R6CGsR5mMlW6OYLiu1l0QNcx96-J3APd_Va6HGn6I4kAYlPXgn51M8bQmUmBBPTTzN3PRxea87pY
-eWYjFW4b4gITiS_XfhyrSPAV9aNiqqfJxvmkbfC7XZRFIZGpsSUuUaHxtGGKqMbSWr6Gl8OxuqVQ
-y7lUdvdBNevtkq1I1i0RY_fyURfPkWFrzFVlGH-trht4TwNicCVofldHZe8ihBSKmYvfwjGdWs3h
-7C4lAUg4WCKApfDKjEbj_0X8UcM78U_rtrgauH-YhxqJgHDC9LzLXzwzyLk-Ky2SFYCO7z6qXOa7
-wLW2UYesi6Bz2lk_jinBx5ZNA8UW5Oyyqgte3OcpJ3geNmAvUUGYVqEFSVwVoVw5IynE8SPitA59
-bihLwGSA35oONrsaLmYH5ftioKPD1v1ac-hUDLsljam4riEQkDB2TyVQmk9JZcDuuPgG0yPnUgZE
-yse83-CHztmm3GJp69WkrVv1_s9KxaWEVKx2UODNmZ1B4_nvsMuAPUZca9LWK6HTIvlH3RsNW0eA
-VBsNVdXs7jaJFOyyFyYg2mQMQRG6hhJIV9ay4cHLQe7kF-qOJkCl9luuOBuhn7QyHPH3VVfCUKHE
-buRqwfPSXuYtnixPlIgZQI6m2JHEZPyStYnZ6ykbSHqLuAwesDVJCbrD6DRQ3NLkhPCrhKy2gTSf
-aIf0ipkFtghQsKExYCCedNs7jDd-2jVhYeYLyqBXV-WEXwSJFjo44WaXQz9vCBvCtkZjsX325_ST
-5irVViW0iTvRQcw0wcb6vyyzSEf0nJOrNJpe5VzxKH-W2YXZD-XAF5fkfyudqQKO5NNIVa5d1zMP
-YgCxww4iQKKGHtNQ7eooxbogpGQuWmupO3iZAkF52s7Frin6PnVU25LGiietG-9xppsTiCYTs5K1
-z2UbTvOXTx1fIlRCbDZAz7T4oXAzIKAdvKiEZvxMqb9aeiUvbgCxm7rOBCNajWwIKkWzYIfH1ajq
-KQRIvj5QDFppe7Fx5ZTBRC7Eq_9jyno7PKwOGoIbZjtcuigHlH4DNm3rRiVNoejRRb_q_xa5U22A
-ce5j2fSHzM2J6cdvWD9BGiaFIOGku02h4LNk7QI6HYa62Kv-QNJ0HRtugFjdDUuWB6fLNr-60dfz
-HDONZsa8oTuJ72jZIMIZmyCcEVLxAUA4DFCT-LkLDAQCEL6tBdyy3CBcFzLeR1HFbt6TxCEm8JS0
-GA3HXvSimoL0K8f3AzYFoVZ6NY897rlJ3_aICIw-6sfiLBmm1aowgfIbnRFhx5V1FEUUSG33XqSX
-XdbF7nTRDl5woGChrDomCUXvuz1gXujDqccyclA2-v03u6mSGel3XVBAsJfhs1oJP0rSTNm_ \ No newline at end of file
diff --git a/src/main/config/keyfile2 b/src/main/config/keyfile2
deleted file mode 100644
index d744844..0000000
--- a/src/main/config/keyfile2
+++ /dev/null
@@ -1,27 +0,0 @@
-rrtJzL4AdOdm6TfWi0644LRIZyXv4YYGyVNMANeDlVw_StIxrV_059ry3BsqUgSa07O-O82MzV6q
-WSftVjmHYdL3Qxz9epjMt7PkcTDHaYLqURcOKSZ2dH7WGhKVCwD-z8R7hSAyMQRUWPRErciaPyq_
-R913Xsj6dcACLHPjV1nQm2uyrdTRv10k6wF02GoaN3FjFo-0QjVWxq6Y1jjxmiXksbjdgvgPUonG
-vJxzI7ip8SgfLl4Y-fpNYW4nb_KhTuSghxAeMi0GjYIe_7YmfY7oDBtzwFMszsK_q9NBHrOF9N9k
-Rskb6iGAML--E5KO5varMWJ4n3xVNA5_y0CdezKBEOin-eNmnwS-vrsT06wmYa1uZPP3haRE0OQF
-XcoPdN0gxPwWrUBPZCaEHS5grbaJYIrFl8en5WYpB_iEVJ2f5TYYM6xzoVU3CAjQqwcuczufyTV9
-L1XUbZbYJFFHYLhxXJXeJ1rHqVrZ9AKchjgtxL6qSLV-w0Vq2l2IwMwK-5dXgbQsfNSWNjM0p9hZ
-8SrfmUT2Ixp4nNk4_CoFujdWv1iCD3Kr-jtV8TX2GvsYoW_9ExvoVl1TJFfqk2A5sOuBaCptNLhg
-DUkcW-sf_BpACwJz-FMJtHhsX5NQECNFkl-_bPA1J3FT49pzw9yxoqdUHB2wdcptLKw-VXJK17xU
-QHGC3k53nKm64yyCw2Ex6yXu8cwvNjolJBiED0qr3vHGFkchjlj7fgEpnnAXI_4UiyKguOGj1ywa
-SAa_azv1W3NewcfpdqeWrLjpl1Hme5-M3gBDoBsGoOePLR9aNDk5la9eg3-6E4jpCv9HlVQ-_MmQ
-syP-idHyV0WrUUpldmUOSWQqZR8nQOU4JJM0eZoR5Cn5b2R0xidXn556SC3lbuWFOzgJh19_4tXb
-0kRnTLW3B-Uj-68KWfB5xUOUlaIXqDGKSSMlzFagZBseQ02fNmgduZ8h3UE2nCvJWQ2radadFnug
-NGpBrxCji9jIqBHUOSKqKHepm3_fRXrur-gwVsovNqOYjcLEXia5IlCMM0M80VdmcxpoizlbjzUh
-xRb1vyhLXCUZv6ZFAbFjCCmJLcWD_OWOYhTBYiQk6Uu1mEdCFbz6Wxhtt7Zugse6DpCuurC2lPsC
-gSHbnsfHDDZokbiKaU4pzTow0LqDvb7TodatGlZI7Ts4vMj-YA_vrlFPcIXcTzYMDZwj5AV3_hZT
-K8W2xTpyDSo3Jo6UB_k_ka6vw82WyKLBW0yvV20LDhokd5aChdtKOU1B05p4iMyhEpiAVeIyaDcg
-1iaW2kIRfK8UXo-ptYITSomYrS8_1QqS2LhhltXc73Rzwc0-Ati9qyMS-wtRvYOQra9Hff_y8Xcw
-Y-w2r3g4MISoXNrrVhZviUVhctpTrhYMinW4-AQ9wcFGbu58C6wH7NhbNztysEKF8sCMA2Jq5G8A
-XrjDY-KiM1k2C2vQxvo4GBKfKfZb4HqIpmc7lDuYH-T7Wo-rnAClzqB_VRc64Rn4GKJih6r7jPjs
--HZUReQ7M7oBXe-X925FpGGYpabpJGLpk4h9WbgHy0iXiZfuDW4KRhBNZ9ZVt7fOa3AwZxHuZfoJ
-ydlVLpuHZ4ENzdNu5q1ycfvr-h3THJd665IerG1Del-By1vrZfqsVdyzEvOqQ30R7NDfBXnk2xJp
-oc_ituvuM_N0L27PH3-YFxXynjUZV2EH8OD3w7WB2Kq8Xi7-cbjqCOKwrjLBoDHBp2UGY47e1yb4
-B5MFHGkWdYZ_4e8mhSsqY5mRWAxS7eOe9NOw_xdrsVBarGgLXTq_WTueVlhEYCC-hHbCr838S6QX
-Q8xJiRpIJGDHY2cvV3Rob9wR7wk4C-cErMRFQHLPQTmpcMDTZ4JknJ_79POiw6DbZ1xloihlM-P4
-cBiHxyb7IeiVsonVutgVo9ViLfYAvJ3gYs9L5Rc5XhNRiNpYVZq__O7VBW7riUF2GlHLURUZPjgq
-8CXroMsnKvGfB-OhcOVxuDO8P5odl28ogwtVf0TGOcaUcPlFySJcOp4DDx-O2erWcBXZ1fHX \ No newline at end of file
diff --git a/src/main/config/keyfile_old b/src/main/config/keyfile_old
deleted file mode 100644
index 6a1657e..0000000
--- a/src/main/config/keyfile_old
+++ /dev/null
@@ -1,27 +0,0 @@
-ctRt8XTd7N57kcm0npZOWSDF5I69w9K97cQS_ep0AgxgHmYB0WtYblsrMGuHfyS1o4697zLiIeoS
-Nn5kE1kedl4c4HevfuwfoJpWyiugYusNOqbTGQJ1MHOwqiBEJnjXepZEoz1btaW_hDO7uz-BoD4t
-SxwNRwVQpcg0_CmBX-yIW2YCIECoxZH9_X_8fcXYHP2VgFxxBpvjgycNQlyN15_VSuLwn3Wj0W8_
-8chRxGURyhp8iEBSb4tIdN5jXkhCma7AP7wreMufFQqXjdfWqIisJPfIpS3znl5IiTOZP22XhHay
-gq2KFwABVqjM71m5czEz1ojGkbFEAGImrY-VFHuug2u4ss4VW7TGeJst0z7I5vrn5M6i9Eb6xiGh
-jNUebRCV3cYGrtD9SlvjJBVVeP_3OrkxlD4oktx-JTRJzYtXADB5if2gtpYxy84kqrz7ltr5rXUH
-zSG7ujKCXOOE_Wk6vQPSjYPnum6R_mxOorCNCvtf6ne85Xd81DZlJM-CleVNdOU7g1xie-gBZPAX
-bOvWf6p_pVNmH76v-m4XLAAUqEzt-9PvNmirODiDiY5bNz6l-1ejw8IyQYb37e_3sN_LjF7A9HgB
-Dia7kNjsfB7_2vB7R4qjwNLsmTMnQCDANnNpl9VpotZ4blPhhOWhB1Tg3lxc-z-VRV7GBbl_2eQd
-3eYUT1Z5Li184W4-pft_TCaDJ1NyaJd1CQxQEuIORdq5B6Q2L9SMmmOOh82Czu5_Ro80IGikHXHp
-Lqf2fIaceY_IBAeGp2iPjtXdkghV24vIT49oRfqf6sBKAPy-88xILnMWM6M5bMCETKn7UvM1kV5y
-ZQYlsi-36n73ETZyiFs1PLqe8D6dRURrcBG_B9i1MafNiWa-elG6E0X0pSK9CadchSA0KRMaKtfE
-6-iyUqE-bx-0ELTbV2y7gLdu5MVtjRmQB5ozoaBq8ik4-jAWAsKpTv4DfWoMp9DkRENlKeauayuT
-j_VAGhqy07pIntQKtbK9EP0tndSKtF3WLwHel1I5C3lthhkxxfzpxURBxO1ZJMFJZ6rLu1Ku03zw
-LJ7nFFR_YfJ7tnGZE4PEt7MOZNiNoD3__9PthO5HmZdk1gPMrKlojU1hyR3IlbVShUst6rA3MkWk
-MD-zlw9mhNgaV3xvPJ945pYPe4C6qIwxXoiXGHyhv_0MpcvuMW-pUuAZXfkuiqNwQnpUTLBD0YJw
-uwMbE7sN40e6-BSxEiMOab7s2gShbaK9JjCMQUH_vAuQSZjU4sn53jsS7U4DHntzgxVYttIwGZaU
-b-1R7jYphNJnCI8rPB_xjJ0OMssNKT7lYRgG_ZuKvifYvJWt-NwD0z2qoePcRGExXuioRDNR4SlB
--RN33dYhp6vRsHKT1oLpl-UJB6dqJlZ2dCsfc7vT1Vs0SYidRYXCUJNBSePI4-1LMlHKOqGASBcg
-pl589601-EtO7ch3RoaL26rNXzA-umUWYRPQPZ76wcgK2j4k5Ndub5dWK9jI6UW3RbF6ixe0Yw2j
-_Pipt4EX8R6-sb87D69JOOnZlFVB6EcCO07Q7j6DavpUNHlLmDmPgArqODh002scvW1ryMxBR2XE
-m3kGQh2IFh5Qru8duxblEYE-lmHGxXVgDtKiKgHwPTkaxcquEtZTEJxaIJIgoKj7SgMzdfbeLlJM
-RwbdvExmnRT9ivFImeIV7ACPnfBP3URd82kTG8FyiMvSpdCLL16FWOd9gjZuMstqZrmIVF8tO2WT
-COMIx-jqvQD2zS1Ul5p0szJaf-CxBjy7-cJIaAyEToR1T5bBFtQt4sEFxG7XG0cCoXShqclL70TV
-W13X5pY55YwHkCR4mRjc0o0ZKStY3OADVLFom1bC9AmMBqU4PsKNAX29LT37WE-I23tQgzid0Ix9
-JuVzlbOTvi19uLYbltrHavU3UbVhYxNNI7Y7tM02xfq3LhGqZG5EPS-WAB9bBixHQqw78cd9iqIr
-hHlZW80l1kgs1ezMqgxfwDuiFOZIu9UWQ6vSnTAvfhwJhcr77gSk5Gu957uxzleaS4gVwTYU
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/constants/CambriaConstants.java b/src/main/java/org/onap/dmaap/dmf/mr/constants/CambriaConstants.java
index fd342b7..a036358 100644
--- a/src/main/java/org/onap/dmaap/dmf/mr/constants/CambriaConstants.java
+++ b/src/main/java/org/onap/dmaap/dmf/mr/constants/CambriaConstants.java
@@ -3,6 +3,7 @@
* org.onap.dmaap
* ================================================================================
* Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * Modification copyright (C) 2021 Nordix Foundation.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -37,7 +38,7 @@ public interface CambriaConstants {
String kSetting_ZkConfigDbRoot = "config.zk.root";
String kDefault_ZkConfigDbRoot = kDefault_ZkRoot + "/config";
-String msgRtr_prop="MsgRtrApi.properties";
+ String msgRtr_prop="MsgRtrApi.properties";
String kBrokerType = "broker.type";
/**
diff --git a/src/main/resources/docker/startup.sh b/src/main/resources/docker/startup.sh
index 9fb3414..4e4e8a2 100644
--- a/src/main/resources/docker/startup.sh
+++ b/src/main/resources/docker/startup.sh
@@ -4,6 +4,7 @@
# org.onap.dmaap
# ================================================================================
# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# Modification copyright (C) 2021 Nordix Foundation.
# ================================================================================
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -30,7 +31,7 @@ echo "Starting using" $runner_file
if [ -z "${MR_JVM_ARGS}" ]; then
-java -jar -Xmx512m -Xms512m -Dcom.sun.management.jmxremote=true -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.local.only=false -Dcom.sun.management.jmxremote.rmi.port=5555 -Dcom.sun.management.jmxremote.port=5555 -DSOACLOUD_SERVICE_VERSION=0.0.1 -DAJSC_HOME=$root_directory -DAJSC_CONF_HOME=$config_directory -DAJSC_SHARED_CONFIG=$config_directory -DAJSC_HTTPS_PORT=3905 -Dplatform=NON-PROD -DPid=1306 -Dlogback.configurationFile=/appl/dmaapMR1/bundleconfig/etc/logback.xml -Xmx512m -Xms512m $runner_file context=/ port=3904 sslport=3905
+java -jar -Dcom.sun.management.jmxremote=true -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.local.only=false -Dcom.sun.management.jmxremote.rmi.port=5555 -Dcom.sun.management.jmxremote.port=5555 -DSOACLOUD_SERVICE_VERSION=0.0.1 -DAJSC_HOME=$root_directory -DAJSC_CONF_HOME=$config_directory -DAJSC_SHARED_CONFIG=$config_directory -DAJSC_HTTPS_PORT=3905 -Dplatform=NON-PROD -DPid=1306 -Dlogback.configurationFile=/appl/dmaapMR1/bundleconfig/etc/logback.xml -Xmx512m -Xms512m $runner_file context=/ port=3904 sslport=3905
else
diff --git a/src/main/resources/images/attLogo.gif b/src/main/resources/images/attLogo.gif
deleted file mode 100644
index 10f184c..0000000
--- a/src/main/resources/images/attLogo.gif
+++ /dev/null
Binary files differ
diff --git a/src/main/resources/images/att_vt_1cp_grd_rev.gif b/src/main/resources/images/att_vt_1cp_grd_rev.gif
deleted file mode 100644
index 034515c..0000000
--- a/src/main/resources/images/att_vt_1cp_grd_rev.gif
+++ /dev/null
Binary files differ
diff --git a/src/main/resources/templates/hello.html b/src/main/resources/templates/hello.html
deleted file mode 100644
index 69a65ab..0000000
--- a/src/main/resources/templates/hello.html
+++ /dev/null
@@ -1,9 +0,0 @@
-#set($tab="")
-#parse("header.html")
-
- <h1>Cambria API</h1>
- <p>This is a Cambria API server, part of the Universal Event Broker service, a general purpose,
- high-throughput pub/sub event routing system.</p>
- <p>Please see <a href="http://sa2020.it.att.com:8888/sw/cambria/intro">the Cambria project</a> information page.
-
-#parse("footer.html")
diff --git a/src/main/test/com/att/nsa/dmaap/DummyTest.java b/src/main/test/com/att/nsa/dmaap/DummyTest.java
deleted file mode 100644
index 17adcea..0000000
--- a/src/main/test/com/att/nsa/dmaap/DummyTest.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
- package org.onap.dmaap;
-
-import static org.junit.Assert.*;
-
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-public class DummyTest {
-
- @Before
- public void setUp() throws Exception {
- }
-
- @After
- public void tearDown() throws Exception {
- }
-
- @Test
- public void test() {
- fail("Not yet implemented");
- }
-
-}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/embed/EmbedConfigurationReader.java b/src/test/java/org/onap/dmaap/mr/cambria/embed/EmbedConfigurationReader.java
index 3e3fd28..f49f615 100644
--- a/src/test/java/org/onap/dmaap/mr/cambria/embed/EmbedConfigurationReader.java
+++ b/src/test/java/org/onap/dmaap/mr/cambria/embed/EmbedConfigurationReader.java
@@ -3,6 +3,7 @@
* org.onap.dmaap
* ================================================================================
* Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * Modification copyright (C) 2021 Nordix Foundation.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -95,7 +96,7 @@ public class EmbedConfigurationReader {
final Properties props = new Properties ();
props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092" );
- props.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='admin_secret'");
+ props.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='admin_secret';");
props.put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT");
props.put("sasl.mechanism", "PLAIN");
fKafkaAdminClient = AdminClient.create ( props );
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/embed/KafkaLocal.java b/src/test/java/org/onap/dmaap/mr/cambria/embed/KafkaLocal.java
index 9f3c05a..17c5bbb 100644
--- a/src/test/java/org/onap/dmaap/mr/cambria/embed/KafkaLocal.java
+++ b/src/test/java/org/onap/dmaap/mr/cambria/embed/KafkaLocal.java
@@ -3,6 +3,7 @@
* ONAP Policy Engine
* ================================================================================
* Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * Modification copyright (C) 2021 Nordix Foundation.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -20,16 +21,16 @@
package org.onap.dmaap.mr.cambria.embed;
-import kafka.server.KafkaConfig;
-import kafka.server.KafkaServerStartable;
-
import java.io.IOException;
import java.util.Properties;
+import kafka.server.KafkaConfig;
+import kafka.server.KafkaServer;
+import org.apache.kafka.common.utils.Time;
public class KafkaLocal {
- public KafkaServerStartable kafka;
+ public KafkaServer kafka;
public ZooKeeperLocal zookeeper;
public KafkaLocal(Properties kafkaProperties, Properties zkProperties) throws IOException, InterruptedException{
@@ -38,10 +39,12 @@ public class KafkaLocal {
//start local zookeeper
System.out.println("starting local zookeeper...");
zookeeper = new ZooKeeperLocal(zkProperties);
+ zookeeper.run();
System.out.println("done");
//start local kafka broker
- kafka = new KafkaServerStartable(kafkaConfig);
+ final scala.Option<String> prefix = scala.Option.apply("kafka");
+ kafka = new KafkaServer(kafkaConfig, Time.SYSTEM, prefix, false);
System.out.println("starting local kafka broker...");
kafka.startup();
System.out.println("done");
@@ -52,6 +55,10 @@ public class KafkaLocal {
//stop kafka broker
System.out.println("stopping kafka...");
kafka.shutdown();
+ kafka.awaitShutdown();
+ System.out.println("done");
+ System.out.println("stopping zookeeper...");
+ zookeeper.stop();
System.out.println("done");
}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/embed/ZooKeeperLocal.java b/src/test/java/org/onap/dmaap/mr/cambria/embed/ZooKeeperLocal.java
index 97447a8..94939c7 100644
--- a/src/test/java/org/onap/dmaap/mr/cambria/embed/ZooKeeperLocal.java
+++ b/src/test/java/org/onap/dmaap/mr/cambria/embed/ZooKeeperLocal.java
@@ -3,6 +3,7 @@
* ONAP Policy Engine
* ================================================================================
* Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * Modification copyright (C) 2021 Nordix Foundation.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -20,41 +21,45 @@
package org.onap.dmaap.mr.cambria.embed;
+import java.util.Properties;
import org.apache.zookeeper.server.ServerConfig;
import org.apache.zookeeper.server.ZooKeeperServerMain;
-import org.apache.zookeeper.server.admin.AdminServer.AdminServerException;
import org.apache.zookeeper.server.quorum.QuorumPeerConfig;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.util.Properties;
-
public class ZooKeeperLocal {
-
- ZooKeeperServerMain zooKeeperServer;
-
- public ZooKeeperLocal(Properties zkProperties) throws FileNotFoundException, IOException{
+
+ ZooKeeperServerMain testingZooKeeperMain = null;
+ ServerConfig conf;
+ Thread t1;
+
+ public ZooKeeperLocal(Properties zkProperties) {
QuorumPeerConfig quorumConfiguration = new QuorumPeerConfig();
try {
quorumConfiguration.parseProperties(zkProperties);
} catch(Exception e) {
throw new RuntimeException(e);
}
-
- zooKeeperServer = new ZooKeeperServerMain();
- final ServerConfig configuration = new ServerConfig();
- configuration.readFrom(quorumConfiguration);
-
-
- new Thread() {
- public void run() {
- try {
- zooKeeperServer.runFromConfig(configuration);
- } catch (IOException | AdminServerException e) {
- System.out.println("ZooKeeper Failed");
- e.printStackTrace(System.err);
- }
- }
- }.start();
+ conf = new ServerConfig();
+ conf.readFrom(quorumConfiguration);
}
+
+ public void run() {
+ if (testingZooKeeperMain == null){
+ t1 = new Thread(() -> {
+ try {
+ testingZooKeeperMain = new ZooKeeperServerMain();
+ testingZooKeeperMain.runFromConfig(conf);
+ } catch (Exception e) {
+ System.out.println("Start of Local ZooKeeper Failed");
+ e.printStackTrace(System.err);
+ }
+ });
+ t1.start();
+ }}
+
+ public void stop() {
+ testingZooKeeperMain.close();
+ t1.stop();
+ }
+
}
diff --git a/src/test/resources/MsgRtrApi.properties b/src/test/resources/MsgRtrApi.properties
index 3aef922..3c2e346 100644
--- a/src/test/resources/MsgRtrApi.properties
+++ b/src/test/resources/MsgRtrApi.properties
@@ -2,7 +2,7 @@
# ============LICENSE_START=======================================================
# org.onap.dmaap
# ================================================================================
-# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# Copyright � 2017 AT&T Intellectual Property. All rights reserved.
# ================================================================================
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -52,7 +52,7 @@ config.zk.servers=<zookeeper_host>
kafka.metadata.broker.list=<kafka_host>:<kafka_port>
##kafka.request.required.acks=-1
#kafka.client.zookeeper=${config.zk.servers}
-consumer.timeout.ms=100
+consumer.timeout.ms=1000
zookeeper.connection.timeout.ms=6000
zookeeper.session.timeout.ms=20000
zookeeper.sync.time.ms=2000