summaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/main/ajsc/dmaap_v1/dmaap/v1/conf/FileMonitorBeans.xml42
-rw-r--r--src/main/ajsc/dmaap_v1/dmaap/v1/conf/HelloWorldBeans.xml29
-rw-r--r--src/main/ajsc/dmaap_v1/dmaap/v1/conf/jaxrsBeans.groovy22
-rw-r--r--src/main/ajsc/dmaap_v1/dmaap/v1/conf/serviceBeans.xml134
-rw-r--r--src/main/ajsc/dmaap_v1/dmaap/v1/docs/README.txt1
-rw-r--r--src/main/ajsc/dmaap_v1/dmaap/v1/lib/README.txt1
-rw-r--r--src/main/ajsc/dmaap_v1/dmaap/v1/props/module.props1
-rw-r--r--src/main/ajsc/dmaap_v1/dmaap/v1/routes/UIService.route9
-rw-r--r--src/main/ajsc/dmaap_v1/dmaap/v1/routes/adminService.route10
-rw-r--r--src/main/ajsc/dmaap_v1/dmaap/v1/routes/adminService2.route10
-rw-r--r--src/main/ajsc/dmaap_v1/dmaap/v1/routes/apiKeyService.route10
-rw-r--r--src/main/ajsc/dmaap_v1/dmaap/v1/routes/apiKeyService2.route9
-rw-r--r--src/main/ajsc/dmaap_v1/dmaap/v1/routes/errorMessage.route4
-rw-r--r--src/main/ajsc/dmaap_v1/dmaap/v1/routes/eventService.route15
-rw-r--r--src/main/ajsc/dmaap_v1/dmaap/v1/routes/helloCAET.route43
-rw-r--r--src/main/ajsc/dmaap_v1/dmaap/v1/routes/helloServlet.route4
-rw-r--r--src/main/ajsc/dmaap_v1/dmaap/v1/routes/helloWorld.route4
-rw-r--r--src/main/ajsc/dmaap_v1/dmaap/v1/routes/metricsService.route9
-rw-r--r--src/main/ajsc/dmaap_v1/dmaap/v1/routes/mirrorService.route15
-rw-r--r--src/main/ajsc/dmaap_v1/dmaap/v1/routes/topicService.route19
-rw-r--r--src/main/ajsc/dmaap_v1/dmaap/v1/routes/transactionService.route9
-rw-r--r--src/main/assemble/ajsc_module_assembly.xml87
-rw-r--r--src/main/assemble/ajsc_props_assembly.xml44
-rw-r--r--src/main/assemble/ajsc_runtime_assembly.xml65
-rw-r--r--src/main/config/DMaaPErrorMesaages.properties59
-rw-r--r--src/main/config/ajsc-chef.jksbin0 -> 5229 bytes
-rw-r--r--src/main/config/ajsc-jetty.xml138
-rw-r--r--src/main/config/ajsc-jolokia-override-web.xml68
-rw-r--r--src/main/config/ajsc-override-web.xml67
-rw-r--r--src/main/config/ajsc-request.xml71
-rw-r--r--src/main/config/ajscJetty.jksbin0 -> 3685 bytes
-rw-r--r--src/main/config/cadi.properties67
-rw-r--r--src/main/config/caet.properties25
-rw-r--r--src/main/config/hazelcast-client.properties46
-rw-r--r--src/main/config/jul-redirect.properties34
-rw-r--r--src/main/config/keyfile27
-rw-r--r--src/main/config/keyfile127
-rw-r--r--src/main/config/keyfile227
-rw-r--r--src/main/config/keyfile_old27
-rw-r--r--src/main/config/runner-web.xml119
-rw-r--r--src/main/config/template.lrm.xml147
-rw-r--r--src/main/java/com/att/nsa/dmaap/DMaaPCambriaExceptionMapper.java143
-rw-r--r--src/main/java/com/att/nsa/dmaap/DMaaPWebExceptionMapper.java202
-rw-r--r--src/main/java/com/att/nsa/dmaap/HelloWorld.java42
-rw-r--r--src/main/java/com/att/nsa/dmaap/JaxrsEchoService.java91
-rw-r--r--src/main/java/com/att/nsa/dmaap/JaxrsUserService.java59
-rw-r--r--src/main/java/com/att/nsa/dmaap/filemonitor/ServicePropertiesListener.java42
-rw-r--r--src/main/java/com/att/nsa/dmaap/filemonitor/ServicePropertiesMap.java126
-rw-r--r--src/main/java/com/att/nsa/dmaap/filemonitor/ServicePropertyService.java164
-rw-r--r--src/main/java/com/att/nsa/dmaap/mmagent/CreateMirrorMaker.java43
-rw-r--r--src/main/java/com/att/nsa/dmaap/mmagent/MirrorMaker.java70
-rw-r--r--src/main/java/com/att/nsa/dmaap/mmagent/UpdateMirrorMaker.java43
-rw-r--r--src/main/java/com/att/nsa/dmaap/mmagent/UpdateWhiteList.java44
-rw-r--r--src/main/java/com/att/nsa/dmaap/service/AdminRestService.java293
-rw-r--r--src/main/java/com/att/nsa/dmaap/service/ApiKeysRestService.java254
-rw-r--r--src/main/java/com/att/nsa/dmaap/service/EventsRestService.java313
-rw-r--r--src/main/java/com/att/nsa/dmaap/service/MMRestService.java1238
-rw-r--r--src/main/java/com/att/nsa/dmaap/service/MetricsRestService.java152
-rw-r--r--src/main/java/com/att/nsa/dmaap/service/TopicRestService.java688
-rw-r--r--src/main/java/com/att/nsa/dmaap/service/TransactionRestService.java176
-rw-r--r--src/main/java/com/att/nsa/dmaap/service/UIRestServices.java198
-rw-r--r--src/main/java/com/att/nsa/dmaap/tools/ConfigTool.java818
-rw-r--r--src/main/java/com/att/nsa/dmaap/tools/ConfigToolContext.java69
-rw-r--r--src/main/java/com/att/nsa/dmaap/util/ContentLengthInterceptor.java132
-rw-r--r--src/main/java/com/att/nsa/dmaap/util/DMaaPAuthFilter.java164
-rw-r--r--src/main/java/com/att/nsa/dmaap/util/ServicePropertiesMapBean.java41
-rw-r--r--src/main/resources/docker-compose/Dockerfile22
-rw-r--r--src/main/resources/docker-compose/LICENSE202
-rw-r--r--src/main/resources/docker-compose/README.md78
-rw-r--r--src/main/resources/docker-compose/broker-list.sh26
-rw-r--r--src/main/resources/docker-compose/create-topics.sh53
-rw-r--r--src/main/resources/docker-compose/docker-compose-single-broker.yml16
-rw-r--r--src/main/resources/docker-compose/docker-compose.yml27
-rw-r--r--src/main/resources/docker-compose/download-kafka.sh26
-rw-r--r--src/main/resources/docker-compose/start-kafka-shell.sh23
-rw-r--r--src/main/resources/docker-compose/start-kafka.sh88
-rw-r--r--src/main/resources/docker/Dockerfile6
-rw-r--r--src/main/resources/docker/startup.sh29
-rw-r--r--src/main/runtime/context/__module.ajsc.namespace.name__#__module.ajsc.namespace.version__.context1
-rw-r--r--src/main/runtime/context/default#0.context1
-rw-r--r--src/main/runtime/deploymentPackage/__module.ajsc.namespace.name__#__module.ajsc.namespace.version__.json1
-rw-r--r--src/main/runtime/shiroRole/ajscadmin.json1
-rw-r--r--src/main/runtime/shiroRole/contextadmin#__module.ajsc.namespace.name__.json1
-rw-r--r--src/main/runtime/shiroRole/contextadmin#default.json1
-rw-r--r--src/main/runtime/shiroUser/ajsc.json1
-rw-r--r--src/main/runtime/shiroUserRole/ajsc#ajscadmin.json1
-rw-r--r--src/main/runtime/shiroUserRole/ajsc#contextadmin#__module.ajsc.namespace.name__.json1
-rw-r--r--src/main/runtime/shiroUserRole/ajsc#contextadmin#default.json1
-rw-r--r--src/main/scripts/cambria.sh49
-rw-r--r--src/main/scripts/cambriaJsonPublisher.sh41
-rw-r--r--src/main/scripts/cambriaMonitor.sh40
-rw-r--r--src/main/scripts/cambriaMonitorWithAuth.sh43
-rw-r--r--src/main/scripts/cambriaSimpleTextPubWithAuth.sh38
-rw-r--r--src/main/scripts/cambriaSimpleTextPublisher.sh35
-rw-r--r--src/main/scripts/cambriaTool.sh55
-rw-r--r--src/main/scripts/swmpkgclean.sh42
-rw-r--r--src/main/swm/common/common.env19
-rw-r--r--src/main/swm/common/deinstall.env15
-rw-r--r--src/main/swm/common/deinstall_postproc.sh26
-rw-r--r--src/main/swm/common/deinstall_preproc.sh45
-rw-r--r--src/main/swm/common/install.env37
-rw-r--r--src/main/swm/common/install_postproc.sh191
-rw-r--r--src/main/swm/common/install_preproc.sh46
-rw-r--r--src/main/swm/common/utils/findreplace.sh94
-rw-r--r--src/main/swm/deinstall/postproc/post_proc4
-rw-r--r--src/main/swm/deinstall/preproc/pre_proc4
-rw-r--r--src/main/swm/descriptor.xml386
-rw-r--r--src/main/swm/fallback/postproc/post_proc4
-rw-r--r--src/main/swm/fallback/preproc/install_preproc.sh26
-rw-r--r--src/main/swm/fallback/preproc/pre_proc4
-rw-r--r--src/main/swm/initinst/postproc/post_proc25
-rw-r--r--src/main/swm/initinst/preproc/pre_proc4
-rw-r--r--src/main/swm/install/postproc/post_proc4
-rw-r--r--src/main/swm/install/preproc/pre_proc4
-rw-r--r--src/main/swm/notes.txt8
-rw-r--r--src/main/test/com/att/nsa/dmaap/DummyTest.java45
-rw-r--r--src/test/java/com/att/nsa/dmaap/DummyTest.java45
117 files changed, 9135 insertions, 0 deletions
diff --git a/src/main/ajsc/dmaap_v1/dmaap/v1/conf/FileMonitorBeans.xml b/src/main/ajsc/dmaap_v1/dmaap/v1/conf/FileMonitorBeans.xml
new file mode 100644
index 0000000..22ae528
--- /dev/null
+++ b/src/main/ajsc/dmaap_v1/dmaap/v1/conf/FileMonitorBeans.xml
@@ -0,0 +1,42 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ============LICENSE_START=======================================================
+ org.onap.dmaap
+ ================================================================================
+ Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ ============LICENSE_END=========================================================
+
+ ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+ -->
+
+<beans xmlns="http://www.springframework.org/schema/beans"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd">
+
+ <bean
+ class="org.springframework.context.annotation.CommonAnnotationBeanPostProcessor" />
+
+ <bean id="ServicePropertiesListener" class="com.att.nsa.dmaap.filemonitor.ServicePropertiesListener" />
+
+ <bean id="ServicePropertiesMap" class="com.att.nsa.dmaap.filemonitor.ServicePropertiesMap" />
+
+ <bean id="ServicePropertyService" class="com.att.nsa.dmaap.filemonitor.ServicePropertyService">
+ <property name="loadOnStartup" value="false" />
+ <property name="fileChangedListener" ref="ServicePropertiesListener" />
+ <property name="filePropertiesMap" ref="ServicePropertiesMap" />
+ <property name="ssfFileMonitorPollingInterval" value="15" />
+ <property name="ssfFileMonitorThreadpoolSize" value="10" />
+ </bean>
+</beans>
diff --git a/src/main/ajsc/dmaap_v1/dmaap/v1/conf/HelloWorldBeans.xml b/src/main/ajsc/dmaap_v1/dmaap/v1/conf/HelloWorldBeans.xml
new file mode 100644
index 0000000..c27414f
--- /dev/null
+++ b/src/main/ajsc/dmaap_v1/dmaap/v1/conf/HelloWorldBeans.xml
@@ -0,0 +1,29 @@
+<!--
+ ============LICENSE_START=======================================================
+ org.onap.dmaap
+ ================================================================================
+ Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ ============LICENSE_END=========================================================
+
+ ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+ -->
+<beans xmlns="http://www.springframework.org/schema/beans"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="
+ http://www.springframework.org/schema/beans
+ http://www.springframework.org/schema/beans/spring-beans.xsd">
+ <bean id="helloWorld" name="helloWorld"
+ class="com.att.nsa.dmaap.HelloWorld" />
+</beans>
diff --git a/src/main/ajsc/dmaap_v1/dmaap/v1/conf/jaxrsBeans.groovy b/src/main/ajsc/dmaap_v1/dmaap/v1/conf/jaxrsBeans.groovy
new file mode 100644
index 0000000..430e723
--- /dev/null
+++ b/src/main/ajsc/dmaap_v1/dmaap/v1/conf/jaxrsBeans.groovy
@@ -0,0 +1,22 @@
+ beans{
+ xmlns cxf: "http://camel.apache.org/schema/cxf"
+ xmlns jaxrs: "http://cxf.apache.org/jaxrs"
+ xmlns util: "http://www.springframework.org/schema/util"
+
+ echoService(com.att.nsa.dmaap.JaxrsEchoService)
+ userService(com.att.nsa.dmaap.JaxrsUserService)
+ topicService(com.att.nsa.dmaap.service.TopicRestService)
+ eventService(com.att.nsa.dmaap.service.EventsRestService)
+ adminService(com.att.nsa.dmaap.service.AdminRestService)
+ apiKeyService(com.att.nsa.dmaap.service.ApiKeysRestService)
+ metricsService(com.att.nsa.dmaap.service.MetricsRestService)
+ transactionService(com.att.nsa.dmaap.service.TransactionRestService)
+ UIService(com.att.nsa.dmaap.service.UIRestServices)
+ mirrorService(com.att.nsa.dmaap.service.MMRestService)
+
+ util.list(id: 'jaxrsServices') {
+ ref(bean:'echoService')
+ ref(bean:'userService')
+
+ }
+} \ No newline at end of file
diff --git a/src/main/ajsc/dmaap_v1/dmaap/v1/conf/serviceBeans.xml b/src/main/ajsc/dmaap_v1/dmaap/v1/conf/serviceBeans.xml
new file mode 100644
index 0000000..1a499f2
--- /dev/null
+++ b/src/main/ajsc/dmaap_v1/dmaap/v1/conf/serviceBeans.xml
@@ -0,0 +1,134 @@
+<!--
+ ============LICENSE_START=======================================================
+ org.onap.dmaap
+ ================================================================================
+ Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ ============LICENSE_END=========================================================
+
+ ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+ -->
+<beans xmlns="http://www.springframework.org/schema/beans"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xmlns:context="http://www.springframework.org/schema/context"
+ xsi:schemaLocation="
+ http://www.springframework.org/schema/beans
+ http://www.springframework.org/schema/beans/spring-beans.xsd
+ http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd">
+
+ <!-- Dependency Injection with annotations -->
+ <context:component-scan
+ base-package="com.att.nsa.cambria.utils, com.att.nsa.cambria, com.att.nsa.cambria.rest,
+ com.att.nsa.cambria.service.impl,com.att.nsa.cambria.beans,com.att.nsa.cambria.security,
+ com.att.nsa.cambria.transaction,com.att.nsa.cambria.exception,com.att.nsa.dmaap,com.att.nsa.dmaap.service,com.att.nsa.dmaap.util" />
+
+ <context:property-placeholder
+ location="file:${AJSC_HOME}/bundleconfig/etc/appprops/MsgRtrApi.properties,file:${AJSC_HOME}/etc/DMaaPErrorMesaages.properties"/>
+
+ <bean id="jsonProvider" class="org.apache.cxf.jaxrs.provider.json.JSONProvider">
+ <property name="dropRootElement" value="true" />
+ <property name="supportUnwrapped" value="true" />
+ </bean>
+
+ <bean id="jacksonProvider" class="org.codehaus.jackson.jaxrs.JacksonJsonProvider" />
+
+ <bean id="cambriaExMapper" class="com.att.nsa.dmaap.DMaaPCambriaExceptionMapper" />
+
+ <bean id="webExMapper" class="com.att.nsa.dmaap.DMaaPWebExceptionMapper" />
+
+
+ <!-- Your bean definitions goes here -->
+<!-- <bean id="performanceLog" name="performanceLog" class="com.att.ajsc.csi.logging.PerformanceTracking" /> -->
+<!-- <bean id="processRestletHeaders" name="processRestletHeaders" class="ajsc.restlet.ProcessRestletHeaders" /> -->
+ <bean id="servicePropsBean" name="servicePropsBean"
+ class="com.att.nsa.dmaap.util.ServicePropertiesMapBean" />
+
+ <!-- Msgrtr beans -->
+ <bean id="propertyReader" class="com.att.nsa.cambria.utils.PropertyReader" />
+ <bean
+ class="org.springframework.beans.factory.config.MethodInvokingFactoryBean">
+ <!-- Next value is the full qualified name of the static setter including
+ method name -->
+ <property name="staticMethod"
+ value="com.att.nsa.cambria.beans.DMaaPKafkaConsumerFactory.populateKafkaInternalDefaultsMap" />
+ <!-- <property name="arguments">
+ <list>
+ <ref bean="propertyReader" />
+ </list>
+ </property>-->
+ </bean>
+
+ <bean id="drumlinRequestRouter"
+ class="com.att.nsa.drumlin.service.framework.routing.DrumlinRequestRouter" />
+
+ <bean id="dMaaPMetricsSet" class="com.att.nsa.cambria.beans.DMaaPMetricsSet">
+ <constructor-arg ref="propertyReader" />
+ </bean>
+
+ <bean id="dMaaPZkClient" class=" com.att.nsa.cambria.beans.DMaaPZkClient">
+ <constructor-arg ref="propertyReader" />
+ </bean>
+
+ <bean id="dMaaPZkConfigDb" class="com.att.nsa.cambria.beans.DMaaPZkConfigDb">
+ <constructor-arg ref="dMaaPZkClient" />
+ <constructor-arg ref="propertyReader" />
+ </bean>
+
+
+ <bean id="kafkaPublisher" class=" com.att.nsa.cambria.backends.kafka.KafkaPublisher">
+ <constructor-arg ref="propertyReader" />
+ </bean>
+
+ <bean id="dMaaPKafkaConsumerFactory" class=" com.att.nsa.cambria.beans.DMaaPKafkaConsumerFactory">
+ <constructor-arg ref="propertyReader" />
+ <constructor-arg ref="dMaaPMetricsSet" />
+ <constructor-arg ref="curator" />
+ </bean>
+
+ <bean id="curator" class="com.att.nsa.cambria.utils.DMaaPCuratorFactory"
+ factory-method="getCurator">
+ <constructor-arg ref="propertyReader" />
+ </bean>
+
+ <bean id="dMaaPKafkaMetaBroker" class=" com.att.nsa.cambria.beans.DMaaPKafkaMetaBroker">
+ <constructor-arg ref="propertyReader" />
+ <constructor-arg ref="dMaaPZkClient" />
+ <constructor-arg ref="dMaaPZkConfigDb" />
+ </bean>
+
+ <bean id="q" class=" com.att.nsa.cambria.backends.memory.MemoryQueue" />
+
+ <bean id="mmb" class=" com.att.nsa.cambria.backends.memory.MemoryMetaBroker">
+ <constructor-arg ref="q" />
+ <constructor-arg ref="dMaaPZkConfigDb" />
+ <!-- <constructor-arg ref="propertyReader" />-->
+ </bean>
+
+ <bean id="dMaaPNsaApiDb" class="com.att.nsa.cambria.beans.DMaaPNsaApiDb"
+ factory-method="buildApiKeyDb">
+ <constructor-arg ref="propertyReader" />
+ <constructor-arg ref="dMaaPZkConfigDb" />
+ </bean>
+
+ <!-- <bean id="dMaaPTranDb" class="com.att.nsa.cambria.transaction.DMaaPTransactionDB"
+ factory-method="buildTransactionDb"> <constructor-arg ref="propertyReader"
+ /> <constructor-arg ref="dMaaPZkConfigDb" /> </bean> -->
+
+ <bean id="dMaaPAuthenticatorImpl" class="com.att.nsa.cambria.security.DMaaPAuthenticatorImpl">
+ <constructor-arg ref="dMaaPNsaApiDb" />
+ </bean>
+ <bean id="defLength" class="com.att.nsa.filter.DefaultLength">
+ <property name="defaultLength" value="${maxcontentlength}"></property>
+ </bean>
+</beans>
diff --git a/src/main/ajsc/dmaap_v1/dmaap/v1/docs/README.txt b/src/main/ajsc/dmaap_v1/dmaap/v1/docs/README.txt
new file mode 100644
index 0000000..3707179
--- /dev/null
+++ b/src/main/ajsc/dmaap_v1/dmaap/v1/docs/README.txt
@@ -0,0 +1 @@
+Place any docs here that you want to access within the ajsc upon deployment of your service.
diff --git a/src/main/ajsc/dmaap_v1/dmaap/v1/lib/README.txt b/src/main/ajsc/dmaap_v1/dmaap/v1/lib/README.txt
new file mode 100644
index 0000000..639e21b
--- /dev/null
+++ b/src/main/ajsc/dmaap_v1/dmaap/v1/lib/README.txt
@@ -0,0 +1 @@
+3rd party JAR's needed by your jars (if any) for a ajsc deployment package go here... \ No newline at end of file
diff --git a/src/main/ajsc/dmaap_v1/dmaap/v1/props/module.props b/src/main/ajsc/dmaap_v1/dmaap/v1/props/module.props
new file mode 100644
index 0000000..17ebc08
--- /dev/null
+++ b/src/main/ajsc/dmaap_v1/dmaap/v1/props/module.props
@@ -0,0 +1 @@
+EXAMPLE.PROPERTY=EXAMLE_VALUE \ No newline at end of file
diff --git a/src/main/ajsc/dmaap_v1/dmaap/v1/routes/UIService.route b/src/main/ajsc/dmaap_v1/dmaap/v1/routes/UIService.route
new file mode 100644
index 0000000..d9a0fa9
--- /dev/null
+++ b/src/main/ajsc/dmaap_v1/dmaap/v1/routes/UIService.route
@@ -0,0 +1,9 @@
+<route xmlns="http://camel.apache.org/schema/spring" trace="true">
+ <from uri="att-dme2-servlet:///UI?matchOnUriPrefix=true" />
+ <to uri="cxfbean:UIService?providers=#jacksonProvider,#cambriaExMapper,#webExMapper" />
+ <setBody>
+ <simple>${in.header.exception}</simple>
+ </setBody>
+
+ <removeHeader headerName="exception"/>
+</route> \ No newline at end of file
diff --git a/src/main/ajsc/dmaap_v1/dmaap/v1/routes/adminService.route b/src/main/ajsc/dmaap_v1/dmaap/v1/routes/adminService.route
new file mode 100644
index 0000000..fc0ab5c
--- /dev/null
+++ b/src/main/ajsc/dmaap_v1/dmaap/v1/routes/adminService.route
@@ -0,0 +1,10 @@
+<route xmlns="http://camel.apache.org/schema/spring" trace="true">
+ <from uri="att-dme2-servlet:///admin?matchOnUriPrefix=true" />
+ <to uri="cxfbean:adminService?providers=#jacksonProvider,#cambriaExMapper,#webExMapper" />
+ <setBody>
+ <simple>${in.header.exception}</simple>
+ </setBody>
+
+ <removeHeader headerName="exception"/>
+
+</route> \ No newline at end of file
diff --git a/src/main/ajsc/dmaap_v1/dmaap/v1/routes/adminService2.route b/src/main/ajsc/dmaap_v1/dmaap/v1/routes/adminService2.route
new file mode 100644
index 0000000..8eb75f0
--- /dev/null
+++ b/src/main/ajsc/dmaap_v1/dmaap/v1/routes/adminService2.route
@@ -0,0 +1,10 @@
+<route xmlns="http://camel.apache.org/schema/spring" trace="true">
+ <from uri="att-dme2-servlet:///__module_ajsc_namespace_version__/admin?matchOnUriPrefix=true" />
+ <to uri="cxfbean:adminService?providers=#jacksonProvider,#cambriaExMapper,#webExMapper" />
+ <setBody>
+ <simple>${in.header.exception}</simple>
+ </setBody>
+
+ <removeHeader headerName="exception"/>
+
+</route> \ No newline at end of file
diff --git a/src/main/ajsc/dmaap_v1/dmaap/v1/routes/apiKeyService.route b/src/main/ajsc/dmaap_v1/dmaap/v1/routes/apiKeyService.route
new file mode 100644
index 0000000..4d2f3b6
--- /dev/null
+++ b/src/main/ajsc/dmaap_v1/dmaap/v1/routes/apiKeyService.route
@@ -0,0 +1,10 @@
+<route xmlns="http://camel.apache.org/schema/spring" trace="true">
+ <from uri="att-dme2-servlet:///__module_ajsc_namespace_version__/apiKeys?matchOnUriPrefix=true" />
+ <to uri="cxfbean:apiKeyService?providers=#jacksonProvider,#cambriaExMapper,#webExMapper" />
+ <setBody>
+ <simple>${in.header.exception}</simple>
+ </setBody>
+
+ <removeHeader headerName="exception"/>
+
+</route> \ No newline at end of file
diff --git a/src/main/ajsc/dmaap_v1/dmaap/v1/routes/apiKeyService2.route b/src/main/ajsc/dmaap_v1/dmaap/v1/routes/apiKeyService2.route
new file mode 100644
index 0000000..e4a46d6
--- /dev/null
+++ b/src/main/ajsc/dmaap_v1/dmaap/v1/routes/apiKeyService2.route
@@ -0,0 +1,9 @@
+<route xmlns="http://camel.apache.org/schema/spring" trace="true">
+ <from uri="att-dme2-servlet:///apiKeys?matchOnUriPrefix=true" />
+ <to uri="cxfbean:apiKeyService?providers=#jacksonProvider,#cambriaExMapper,#webExMapper" />
+ <setBody>
+ <simple>${in.header.exception}</simple>
+ </setBody>
+ <removeHeader headerName="exception"/>
+
+</route> \ No newline at end of file
diff --git a/src/main/ajsc/dmaap_v1/dmaap/v1/routes/errorMessage.route b/src/main/ajsc/dmaap_v1/dmaap/v1/routes/errorMessage.route
new file mode 100644
index 0000000..7c08576
--- /dev/null
+++ b/src/main/ajsc/dmaap_v1/dmaap/v1/routes/errorMessage.route
@@ -0,0 +1,4 @@
+<route xmlns="http://camel.apache.org/schema/spring" trace="true">
+ <from uri="servlet:/__module_ajsc_namespace_name__/__module_ajsc_namespace_version__/errorMessageLookupService2"/>
+ <to uri="bean:errorMessageLookupService?method=getExceptionDetails"/>
+</route> \ No newline at end of file
diff --git a/src/main/ajsc/dmaap_v1/dmaap/v1/routes/eventService.route b/src/main/ajsc/dmaap_v1/dmaap/v1/routes/eventService.route
new file mode 100644
index 0000000..54613c4
--- /dev/null
+++ b/src/main/ajsc/dmaap_v1/dmaap/v1/routes/eventService.route
@@ -0,0 +1,15 @@
+<route xmlns="http://camel.apache.org/schema/spring" trace="true">
+
+
+
+ <from uri="att-dme2-servlet:///events?matchOnUriPrefix=true" />
+ <to uri="cxfbean:eventService?providers=#jacksonProvider,#cambriaExMapper,#webExMapper" />
+ <setBody>
+ <simple>${in.header.exception}</simple>
+ </setBody>
+
+ <removeHeader headerName="exception"/>
+
+</route>
+
+ \ No newline at end of file
diff --git a/src/main/ajsc/dmaap_v1/dmaap/v1/routes/helloCAET.route b/src/main/ajsc/dmaap_v1/dmaap/v1/routes/helloCAET.route
new file mode 100644
index 0000000..0c147a3
--- /dev/null
+++ b/src/main/ajsc/dmaap_v1/dmaap/v1/routes/helloCAET.route
@@ -0,0 +1,43 @@
+<route xmlns="http://camel.apache.org/schema/spring" trace="true">
+
+ <onException>
+ <exception>java.lang.Throwable</exception>
+
+ <setHeader headerName="AJSC_CAET_ERRORCODE">
+ <constant>CD-0001</constant>
+ </setHeader>
+ <setHeader headerName="AJSC_CAET_APPID">
+ <constant>AJSC</constant>
+ </setHeader>
+ <setHeader headerName="AJSC_CAET_MESSAGE_TEXT">
+ <constant>Unable to retrive client details</constant>
+ </setHeader>
+ <setHeader headerName="AJSC_CAET_IS_REST_SERVICE">
+ <constant>Y</constant>
+ </setHeader>
+ <setHeader headerName="X-CSI-ClientApp">
+ <constant>AJSC-CSI</constant>
+ </setHeader>
+ <setHeader headerName="CALL_TYPE">
+ <constant>GATEWAY</constant>
+ </setHeader>
+
+ <to uri="bean:errorMessageLookupService?method=setCAETHeaders"/>
+
+ <removeHeaders pattern="AJSC_CAET*"/>
+
+ <handled>
+ <constant>true</constant>
+ </handled>
+ </onException>
+
+ <from uri="restlet:/__module_ajsc_namespace_name__/__module_ajsc_namespace_version__/helloCAET"/>
+
+
+ <convertBodyTo type="java.lang.String"/>
+ <setBody>
+ <groovy>
+ throw new Exception("new Change")
+ </groovy>
+ </setBody>
+ </route> \ No newline at end of file
diff --git a/src/main/ajsc/dmaap_v1/dmaap/v1/routes/helloServlet.route b/src/main/ajsc/dmaap_v1/dmaap/v1/routes/helloServlet.route
new file mode 100644
index 0000000..5ede9c1
--- /dev/null
+++ b/src/main/ajsc/dmaap_v1/dmaap/v1/routes/helloServlet.route
@@ -0,0 +1,4 @@
+<route xmlns="http://camel.apache.org/schema/spring" trace="true">
+ <from uri="servlet:///__module_ajsc_namespace_name__/__module_ajsc_namespace_version__/helloServlet?matchOnUriPrefix=true" />
+ <to uri="bean:helloWorld?method=speak"/>
+</route> \ No newline at end of file
diff --git a/src/main/ajsc/dmaap_v1/dmaap/v1/routes/helloWorld.route b/src/main/ajsc/dmaap_v1/dmaap/v1/routes/helloWorld.route
new file mode 100644
index 0000000..bc3e178
--- /dev/null
+++ b/src/main/ajsc/dmaap_v1/dmaap/v1/routes/helloWorld.route
@@ -0,0 +1,4 @@
+<route xmlns="http://camel.apache.org/schema/spring" trace="true">
+ <from uri="restlet:/__module_ajsc_namespace_name__/__module_ajsc_namespace_version__/helloWorld"/>
+ <to uri="bean:helloWorld?method=speak"/>
+</route> \ No newline at end of file
diff --git a/src/main/ajsc/dmaap_v1/dmaap/v1/routes/metricsService.route b/src/main/ajsc/dmaap_v1/dmaap/v1/routes/metricsService.route
new file mode 100644
index 0000000..704a452
--- /dev/null
+++ b/src/main/ajsc/dmaap_v1/dmaap/v1/routes/metricsService.route
@@ -0,0 +1,9 @@
+<route xmlns="http://camel.apache.org/schema/spring" trace="true">
+ <from uri="att-dme2-servlet:///metrics?matchOnUriPrefix=true" />
+ <to uri="cxfbean:metricsService?providers=#jacksonProvider,#cambriaExMapper,#webExMapper" />
+ <setBody>
+ <simple>${in.header.exception}</simple>
+ </setBody>
+
+ <removeHeader headerName="exception"/>
+</route> \ No newline at end of file
diff --git a/src/main/ajsc/dmaap_v1/dmaap/v1/routes/mirrorService.route b/src/main/ajsc/dmaap_v1/dmaap/v1/routes/mirrorService.route
new file mode 100644
index 0000000..238f54d
--- /dev/null
+++ b/src/main/ajsc/dmaap_v1/dmaap/v1/routes/mirrorService.route
@@ -0,0 +1,15 @@
+<route xmlns="http://camel.apache.org/schema/spring" trace="true">
+
+
+
+ <from uri="att-dme2-servlet:///mirrormakers?matchOnUriPrefix=true" />
+ <to uri="cxfbean:mirrorService?providers=#jacksonProvider,#cambriaExMapper,#webExMapper" />
+ <setBody>
+ <simple>${in.header.exception}</simple>
+ </setBody>
+
+ <removeHeader headerName="exception"/>
+
+</route>
+
+ \ No newline at end of file
diff --git a/src/main/ajsc/dmaap_v1/dmaap/v1/routes/topicService.route b/src/main/ajsc/dmaap_v1/dmaap/v1/routes/topicService.route
new file mode 100644
index 0000000..eddace7
--- /dev/null
+++ b/src/main/ajsc/dmaap_v1/dmaap/v1/routes/topicService.route
@@ -0,0 +1,19 @@
+<route xmlns="http://camel.apache.org/schema/spring" trace="true" >
+
+ <from uri="att-dme2-servlet:///topics?matchOnUriPrefix=true" />
+
+ <to uri="cxfbean:topicService?providers=#jacksonProvider,#cambriaExMapper,#webExMapper" />
+
+ <setBody>
+ <simple>${in.header.exception}</simple>
+ </setBody>
+
+ <removeHeader headerName="exception"/>
+
+<log message="Body= ** ${body}" loggingLevel="DEBUG" logName="com.att.nsa.dmaap"/>
+<log message="Body= ** ${body}" loggingLevel="DEBUG" logName="com.att.nsa.dmaap.service"/>
+
+</route>
+
+
+
diff --git a/src/main/ajsc/dmaap_v1/dmaap/v1/routes/transactionService.route b/src/main/ajsc/dmaap_v1/dmaap/v1/routes/transactionService.route
new file mode 100644
index 0000000..8b043fc
--- /dev/null
+++ b/src/main/ajsc/dmaap_v1/dmaap/v1/routes/transactionService.route
@@ -0,0 +1,9 @@
+<route xmlns="http://camel.apache.org/schema/spring" trace="true">
+ <from uri="att-dme2-servlet:///transaction?matchOnUriPrefix=true" />
+ <to uri="cxfbean:transactionService?providers=#jacksonProvider,#cambriaExMapper,#webExMapper" />
+ <setBody>
+ <simple>${in.header.exception}</simple>
+ </setBody>
+
+ <removeHeader headerName="exception"/>
+</route> \ No newline at end of file
diff --git a/src/main/assemble/ajsc_module_assembly.xml b/src/main/assemble/ajsc_module_assembly.xml
new file mode 100644
index 0000000..d894417
--- /dev/null
+++ b/src/main/assemble/ajsc_module_assembly.xml
@@ -0,0 +1,87 @@
+<!--
+ ============LICENSE_START=======================================================
+ org.onap.dmaap
+ ================================================================================
+ Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ ============LICENSE_END=========================================================
+
+ ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+ -->
+<assembly
+ xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2 http://maven.apache.org/xsd/assembly-1.1.2.xsd">
+ <id>${version}</id>
+ <includeBaseDirectory>false</includeBaseDirectory>
+ <formats>
+ <format>zip</format>
+ </formats>
+ <fileSets>
+ <fileSet>
+ <directory>${project.basedir}/target/versioned-ajsc/routes/</directory>
+ <outputDirectory>${module.ajsc.namespace.name}/${module.ajsc.namespace.version}/routes/</outputDirectory>
+ <includes>
+ <include>*.route</include>
+ </includes>
+
+ </fileSet>
+
+ <fileSet>
+ <directory>${project.basedir}/target/versioned-ajsc/docs/</directory>
+ <outputDirectory>${module.ajsc.namespace.name}/${module.ajsc.namespace.version}/docs/</outputDirectory>
+ <includes>
+ <include>*.*</include>
+ <!-- <include>*.vm</include> -->
+ </includes>
+
+ </fileSet>
+
+ <fileSet>
+ <directory>${project.basedir}/target/versioned-ajsc/lib/</directory>
+ <outputDirectory>${module.ajsc.namespace.name}/${module.ajsc.namespace.version}/lib/</outputDirectory>
+ <includes>
+ <include>*.jar</include>
+ </includes>
+
+ </fileSet>
+ <fileSet>
+ <directory>${project.basedir}/target/versioned-ajsc/extJars/</directory>
+ <outputDirectory>${module.ajsc.namespace.name}/${module.ajsc.namespace.version}/extJars/</outputDirectory>
+ <includes>
+ <include>*.jar</include>
+ </includes>
+ </fileSet>
+
+ <!-- also try to grab outputs from the "jar" plugin's package phase -->
+ <fileSet>
+ <directory>${project.basedir}/target/</directory>
+ <outputDirectory>${module.ajsc.namespace.name}/${module.ajsc.namespace.version}/lib/</outputDirectory>
+ <includes>
+ <include>*.jar</include>
+ </includes>
+ </fileSet>
+
+ <fileSet>
+ <directory>${project.basedir}/target/versioned-ajsc/conf/</directory>
+ <outputDirectory>${module.ajsc.namespace.name}/${module.ajsc.namespace.version}/conf/</outputDirectory>
+ <includes>
+ <include>*.*</include>
+ </includes>
+
+ </fileSet>
+ </fileSets>
+
+</assembly>
+
diff --git a/src/main/assemble/ajsc_props_assembly.xml b/src/main/assemble/ajsc_props_assembly.xml
new file mode 100644
index 0000000..99c9ba6
--- /dev/null
+++ b/src/main/assemble/ajsc_props_assembly.xml
@@ -0,0 +1,44 @@
+<!--
+ ============LICENSE_START=======================================================
+ org.onap.dmaap
+ ================================================================================
+ Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ ============LICENSE_END=========================================================
+
+ ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+ -->
+<assembly
+ xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2 http://maven.apache.org/xsd/assembly-1.1.2.xsd">
+ <id>${version}_properties</id>
+ <includeBaseDirectory>false</includeBaseDirectory>
+ <formats>
+ <format>zip</format>
+ </formats>
+ <fileSets>
+ <fileSet>
+ <directory>${project.basedir}/target/versioned-ajsc/props</directory>
+ <outputDirectory>${module.ajsc.namespace.name}/${module.ajsc.namespace.version}/props/</outputDirectory>
+ <includes>
+ <include>*.props</include>
+ </includes>
+
+ </fileSet>
+
+ </fileSets>
+
+</assembly>
+
diff --git a/src/main/assemble/ajsc_runtime_assembly.xml b/src/main/assemble/ajsc_runtime_assembly.xml
new file mode 100644
index 0000000..2575d04
--- /dev/null
+++ b/src/main/assemble/ajsc_runtime_assembly.xml
@@ -0,0 +1,65 @@
+<!--
+ ============LICENSE_START=======================================================
+ org.onap.dmaap
+ ================================================================================
+ Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ ============LICENSE_END=========================================================
+
+ ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+ -->
+<assembly
+ xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2 http://maven.apache.org/xsd/assembly-1.1.2.xsd">
+ <id>runtimeEnvironment</id>
+ <includeBaseDirectory>false</includeBaseDirectory>
+ <formats>
+ <format>zip</format>
+ </formats>
+ <fileSets>
+ <fileSet>
+ <directory>${project.basedir}/target/versioned-runtime/context/</directory>
+ <outputDirectory>runtime/context/</outputDirectory>
+ <includes>
+ <include>*.context</include>
+ </includes>
+ </fileSet>
+ <fileSet>
+ <directory>${project.basedir}/target/versioned-runtime/serviceProperties/</directory>
+ <outputDirectory>runtime/serviceProperties/</outputDirectory>
+ <includes>
+ <include>*.props</include>
+ </includes>
+ </fileSet><fileSet>
+ <directory>${project.basedir}/target/versioned-runtime/shiroRole</directory>
+ <outputDirectory>runtime/shiroRole/</outputDirectory>
+ <includes>
+ <include>*.json</include>
+ </includes>
+ </fileSet><fileSet>
+ <directory>${project.basedir}/target/versioned-runtime/shiroUser</directory>
+ <outputDirectory>runtime/shiroUser/</outputDirectory>
+ <includes>
+ <include>*.json</include>
+ </includes>
+ </fileSet><fileSet>
+ <directory>${project.basedir}/target/versioned-runtime/shiroUserRole</directory>
+ <outputDirectory>runtime/shiroUserRole</outputDirectory>
+ <includes>
+ <include>*.json</include>
+ </includes>
+ </fileSet>
+ </fileSets>
+</assembly> \ No newline at end of file
diff --git a/src/main/config/DMaaPErrorMesaages.properties b/src/main/config/DMaaPErrorMesaages.properties
new file mode 100644
index 0000000..a3d6ce7
--- /dev/null
+++ b/src/main/config/DMaaPErrorMesaages.properties
@@ -0,0 +1,59 @@
+###############################################################################
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+###############################################################################
+###############################################################################
+##
+## DMaaP Error Messages
+###############################################################################
+
+##
+# Generic WebApplication Exceptions
+##
+resource.not.found=The requested resource was not found.Please verify the URL and try again.
+server.unavailable=Server is temporarily unavailable or busy.Try again later, or try another server in the cluster.
+http.method.not.allowed=The specified HTTP method is not allowed for the requested resource.Enter a valid HTTP method and try again.
+incorrect.request.json=Incorrect JSON object. Please correct the JSON format and try again.
+network.time.out=Connection to the DMaaP MR was timed out.Please try again.
+
+##
+# AAF Errors
+##
+authentication.failure=Access Denied: Invalid Credentials. Enter a valid MechId and Password and try again.
+not.permitted.access.1=Access Denied.User does not have permission to perform
+not.permitted.access.2=operation on Topic:
+unable.to.authorize=Unable to authorize the user. Please try again later.
+
+
+##
+#Topic
+##
+get.topic.failure=Failed to retrieve list of all topics.
+get.topic.details.failure=Failed to retrieve details of topic:
+create.topic.failure=Failed to create topic:
+delete.topic.failure=Failed to delete topic:
+
+consume.msg.error=Error while reading data from topic.
+publish.msg.error=Error while publishing data to topic.
+msg_size_exceeds=Message size exceeds the default size.
+publish.msg.count=Successfully published number of messages :
+
+incorrect.json=Incorrect JSON object.Could not parse JSON. Please correct the JSON format and try again.
+topic.not.exist=No such topic exists. \ No newline at end of file
diff --git a/src/main/config/ajsc-chef.jks b/src/main/config/ajsc-chef.jks
new file mode 100644
index 0000000..69e5671
--- /dev/null
+++ b/src/main/config/ajsc-chef.jks
Binary files differ
diff --git a/src/main/config/ajsc-jetty.xml b/src/main/config/ajsc-jetty.xml
new file mode 100644
index 0000000..f5a1ef3
--- /dev/null
+++ b/src/main/config/ajsc-jetty.xml
@@ -0,0 +1,138 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ============LICENSE_START=======================================================
+ org.onap.dmaap
+ ================================================================================
+ Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ ============LICENSE_END=========================================================
+
+ ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+ -->
+
+<!DOCTYPE Configure PUBLIC "-//Jetty//Configure//EN" "http://www.eclipse.org/jetty/configure_9_0.dtd">
+<Configure id="ajsc-server" class="org.eclipse.jetty.server.Server">
+ <!-- DO NOT REMOVE!!!! This is setting up the AJSC Context -->
+ <New id="ajscContext" class="org.eclipse.jetty.webapp.WebAppContext">
+ <Set name="contextPath"><SystemProperty name="AJSC_CONTEXT_PATH" /></Set>
+ <Set name="extractWAR">true</Set>
+ <Set name="tempDirectory"><SystemProperty name="AJSC_TEMP_DIR" /></Set>
+ <Set name="war"><SystemProperty name="AJSC_WAR_PATH" /></Set>
+ <Set name="descriptor"><SystemProperty name="AJSC_HOME" />/etc/runner-web.xml</Set>
+ <Set name="overrideDescriptor"><SystemProperty name="AJSC_HOME" />/etc/ajsc-override-web.xml</Set>
+ <Set name="throwUnavailableOnStartupException">true</Set>
+ <Set name="extraClasspath"><SystemProperty name="AJSC_HOME" />/extJars/json-20131018.jar,<SystemProperty name="AJSC_HOME" />/extJars/mail-1.4.7.jar</Set>
+ <Set name="servletHandler">
+ <New class="org.eclipse.jetty.servlet.ServletHandler">
+ <Set name="startWithUnavailable">false</Set>
+ </New>
+ </Set>
+ </New>
+
+ <Set name="handler">
+ <New id="Contexts" class="org.eclipse.jetty.server.handler.ContextHandlerCollection">
+ <Set name="Handlers">
+ <Array type="org.eclipse.jetty.webapp.WebAppContext">
+ <Item>
+ <Ref refid="ajscContext" />
+ </Item>
+ </Array>
+ </Set>
+ </New>
+ </Set>
+
+ <Call name="addBean">
+ <Arg>
+ <New id="DeploymentManager" class="org.eclipse.jetty.deploy.DeploymentManager">
+ <Set name="contexts">
+ <Ref refid="Contexts" />
+ </Set>
+ <Call id="extAppHotDeployProvider" name="addAppProvider">
+ <Arg>
+ <New class="org.eclipse.jetty.deploy.providers.WebAppProvider">
+ <Set name="monitoredDirName"><SystemProperty name="AJSC_HOME" />/extApps</Set>
+ <Set name="scanInterval">10</Set>
+ <Set name="extractWars">true</Set>
+ </New>
+ </Arg>
+ </Call>
+ </New>
+ </Arg>
+ </Call>
+
+ <Call name="addConnector">
+ <Arg>
+ <New class="org.eclipse.jetty.server.ServerConnector">
+ <Arg name="server">
+ <Ref refid="ajsc-server" />
+ </Arg>
+ <Set name="port"><SystemProperty name="AJSC_HTTP_PORT" default="8080" /></Set>
+ </New>
+ </Arg>
+ </Call>
+
+
+ <!-- SSL Keystore configuration -->
+
+ <New id="sslContextFactory" class="org.eclipse.jetty.util.ssl.SslContextFactory">
+ <Set name="KeyStorePath">/appl/dmaapMR1/bundleconfig/etc/keystore.jks</Set>
+ <Set name="KeyStorePassword">changeit</Set>
+ <Set name="KeyManagerPassword">changeit</Set>
+ </New>
+ <Call id="sslConnector" name="addConnector">
+ <Arg>
+ <New class="org.eclipse.jetty.server.ServerConnector">
+ <Arg name="server">
+ <Ref refid="ajsc-server" />
+ </Arg>
+ <Arg name="factories">
+ <Array type="org.eclipse.jetty.server.ConnectionFactory">
+ <Item>
+ <New class="org.eclipse.jetty.server.SslConnectionFactory">
+ <Arg name="next">http/1.1</Arg>
+ <Arg name="sslContextFactory">
+ <Ref refid="sslContextFactory" />
+ </Arg>
+ </New>
+ </Item>
+ <Item>
+ <New class="org.eclipse.jetty.server.HttpConnectionFactory">
+ <Arg name="config">
+ <New class="org.eclipse.jetty.server.HttpConfiguration">
+ <Call name="addCustomizer"> <Arg>
+ <New class="org.eclipse.jetty.server.SecureRequestCustomizer" />
+ </Arg>
+ </Call>
+ </New>
+ </Arg>
+ </New>
+ </Item>
+ </Array>
+ </Arg>
+ <Set name="port"><SystemProperty name="AJSC_HTTPS_PORT" default="0" /></Set>
+ <Set name="idleTimeout">30000</Set>
+ </New>
+ </Arg>
+ </Call>
+
+
+
+ <Get name="ThreadPool">
+ <Set name="minThreads"><SystemProperty name="AJSC_JETTY_ThreadCount_MIN" /></Set>
+ <Set name="maxThreads"><SystemProperty name="AJSC_JETTY_ThreadCount_MAX" /></Set>
+ <Set name="idleTimeout"><SystemProperty name="AJSC_JETTY_IDLETIME_MAX" /></Set>
+ <Set name="detailedDump">false</Set>
+ </Get>
+
+</Configure> \ No newline at end of file
diff --git a/src/main/config/ajsc-jolokia-override-web.xml b/src/main/config/ajsc-jolokia-override-web.xml
new file mode 100644
index 0000000..51b198d
--- /dev/null
+++ b/src/main/config/ajsc-jolokia-override-web.xml
@@ -0,0 +1,68 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+ ============LICENSE_START=======================================================
+ org.onap.dmaap
+ ================================================================================
+ Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ ============LICENSE_END=========================================================
+
+ ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+ -->
+
+<web-app xmlns="http://java.sun.com/xml/ns/javaee" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://java.sun.com/xml/ns/javaee http://java.sun.com/xml/ns/javaee/web-app_3_0.xsd"
+ metadata-complete="false" version="3.0">
+
+ <filter-mapping>
+ <filter-name>InterceptorFilter</filter-name>
+ <url-pattern>/services/*</url-pattern>
+ </filter-mapping>
+ <filter-mapping>
+ <filter-name>InterceptorFilter</filter-name>
+ <url-pattern>/rest/*</url-pattern>
+ </filter-mapping>
+
+ <filter-mapping>
+ <filter-name>springSecurityFilterChain</filter-name>
+ <url-pattern>/*</url-pattern>
+ </filter-mapping>
+
+ <servlet-mapping>
+ <servlet-name>ManagementServlet</servlet-name>
+ <url-pattern>/mgmt</url-pattern>
+ </servlet-mapping>
+
+ <servlet-mapping>
+ <servlet-name>RestletServlet</servlet-name>
+ <url-pattern>/rest/*</url-pattern>
+ </servlet-mapping>
+
+ <servlet-mapping>
+ <servlet-name>CamelServlet</servlet-name>
+ <url-pattern>/services/*</url-pattern>
+ </servlet-mapping>
+
+ <servlet>
+ <servlet-name>jolokia-agent</servlet-name>
+ <servlet-class>org.jolokia.http.AgentServlet</servlet-class>
+ <load-on-startup>2</load-on-startup>
+ </servlet>
+
+ <servlet-mapping>
+ <servlet-name>jolokia-agent</servlet-name>
+ <url-pattern>/jolokia/*</url-pattern>
+ </servlet-mapping>
+
+</web-app> \ No newline at end of file
diff --git a/src/main/config/ajsc-override-web.xml b/src/main/config/ajsc-override-web.xml
new file mode 100644
index 0000000..42652d4
--- /dev/null
+++ b/src/main/config/ajsc-override-web.xml
@@ -0,0 +1,67 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+ ============LICENSE_START=======================================================
+ org.onap.dmaap
+ ================================================================================
+ Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ ============LICENSE_END=========================================================
+
+ ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+ -->
+
+<web-app xmlns="http://java.sun.com/xml/ns/javaee" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://java.sun.com/xml/ns/javaee http://java.sun.com/xml/ns/javaee/web-app_3_0.xsd"
+ metadata-complete="false" version="3.0">
+
+ <filter-mapping>
+ <filter-name>DMaaPAuthFilter</filter-name>
+ <url-pattern>/*</url-pattern>
+ </filter-mapping>
+
+ <filter-mapping>
+ <filter-name>WriteableRequestFilter</filter-name>
+ <url-pattern>/*</url-pattern>
+ </filter-mapping>
+
+ <filter-mapping>
+ <filter-name>InterceptorFilter</filter-name>
+ <url-pattern>/dmaap/*</url-pattern>
+ </filter-mapping>
+ <filter-mapping>
+ <filter-name>InterceptorFilter</filter-name>
+ <url-pattern>/rest/*</url-pattern>
+ </filter-mapping>
+
+ <filter-mapping>
+ <filter-name>springSecurityFilterChain</filter-name>
+ <url-pattern>/*</url-pattern>
+ </filter-mapping>
+
+ <servlet-mapping>
+ <servlet-name>ManagementServlet</servlet-name>
+ <url-pattern>/mgmt</url-pattern>
+ </servlet-mapping>
+
+ <servlet-mapping>
+ <servlet-name>RestletServlet</servlet-name>
+ <url-pattern>/rest/*</url-pattern>
+ </servlet-mapping>
+
+ <servlet-mapping>
+ <servlet-name>CamelServlet</servlet-name>
+ <url-pattern>/*</url-pattern>
+ </servlet-mapping>
+
+</web-app> \ No newline at end of file
diff --git a/src/main/config/ajsc-request.xml b/src/main/config/ajsc-request.xml
new file mode 100644
index 0000000..ed0a864
--- /dev/null
+++ b/src/main/config/ajsc-request.xml
@@ -0,0 +1,71 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ============LICENSE_START=======================================================
+ org.onap.dmaap
+ ================================================================================
+ Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ ============LICENSE_END=========================================================
+
+ ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+ -->
+
+<ns1:ErrorTranslationRequest
+ xsi:schemaLocation="http://csi.cingular.com/CSI/Namespaces/Types/Private/ErrorTranslationRequest.xsd ErrorTranslationRequest.xsd"
+ xmlns:ns1="http://csi.cingular.com/CSI/Namespaces/Types/Private/ErrorTranslationRequest.xsd"
+ xmlns:th="http://csi.cingular.com/CSI/Namespaces/Types/Private/Implementation/TransactionHeader.xsd"
+ xmlns:err="http://csi.cingular.com/CSI/Namespaces/Types/Public/ErrorResponse.xsd"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+ <ns1:TransactionHeader>
+ <th:applicationId></th:applicationId>
+ <th:MessageQualifier>
+ <th:messageQualifier></th:messageQualifier>
+ </th:MessageQualifier>
+ <th:activityCode></th:activityCode>
+ <th:sequenceNumber></th:sequenceNumber>
+ <th:extendedSequenceNumber></th:extendedSequenceNumber>
+ <th:creationDate></th:creationDate>
+ <th:transactionDate></th:transactionDate>
+ <th:timeToLive></th:timeToLive>
+ <th:reasonCode></th:reasonCode>
+ <th:systemId></th:systemId>
+ <th:operatorId></th:operatorId>
+ <th:reference></th:reference>
+ <th:replyToAddress></th:replyToAddress>
+ <th:originatorId></th:originatorId>
+ <th:atlasMessageId></th:atlasMessageId>
+ </ns1:TransactionHeader>
+ <ns1:operation></ns1:operation>
+ <ns1:ServiceEntityFault>
+ <err:reportingServiceEntity></err:reportingServiceEntity>
+ <err:faultDate></err:faultDate>
+ <err:faultSequenceNumber></err:faultSequenceNumber>
+ <err:faultLevel></err:faultLevel>
+ <err:faultCode></err:faultCode>
+ <err:faultDescription></err:faultDescription>
+ <err:ServiceProviderRawError>
+ <err:code></err:code>
+ <err:description></err:description>
+ <err:BISError>
+ <err:code></err:code>
+ <err:description></err:description>
+ <err:origination></err:origination>
+ <err:severity></err:severity>
+ </err:BISError>
+ </err:ServiceProviderRawError>
+ </ns1:ServiceEntityFault>
+ <ns1:conversationID></ns1:conversationID>
+ <ns1:partnerName></ns1:partnerName>
+ <ns1:isRESTService></ns1:isRESTService>
+</ns1:ErrorTranslationRequest> \ No newline at end of file
diff --git a/src/main/config/ajscJetty.jks b/src/main/config/ajscJetty.jks
new file mode 100644
index 0000000..8fb6328
--- /dev/null
+++ b/src/main/config/ajscJetty.jks
Binary files differ
diff --git a/src/main/config/cadi.properties b/src/main/config/cadi.properties
new file mode 100644
index 0000000..a36fcac
--- /dev/null
+++ b/src/main/config/cadi.properties
@@ -0,0 +1,67 @@
+###############################################################################
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+###############################################################################
+#This properties file is used for defining AAF properties related to the CADI framework. This file is used for running AAF framework
+#using the ajsc-cadi plugin. For more information on the ajsc-cadi plugin, please goto wiki link:
+#http://wiki.web.att.com/display/ajsc/CADI-AJSC-Plugin
+#For more information on utilizing the AAF framework, please goto wiki link:
+#AAF wiki link: http://wiki.web.att.com/display/aaf/AAF+Documentation
+
+#In order to test functionality of cadi-ajsc-plugin locally (pertaining to GLO (AT&T Global Login)), you will need an AT&T cross
+#domain cookie. Cadi "should" find your hostname for you. However, we have seen some situations where this fails. A Local testing
+#modification can include modifying your hosts file so that you can use "mywebserver.att.com" for your localhost in order
+#to test/verify GLO functionality locally. If you are on a Windows machine, you will already have a machine name associated with
+#it that will utilize an AT&T domain such as "sbc.com". You may need to add your AT&T domain to this as a comma separated list depending
+#upon your particular machine domain. This property is commented out as cadi SHOULD find your machine name. With version 1.2.1 of cadi,
+#it appears to resolve Mac machine names as well, now. But, this can be somewhat inconsistent depending on your specific working envrironment.
+hostname=mywebserver.att.com
+
+#Setting csp_domain to PROD will allow for testing using your attuid and password through GLO.
+csp_domain=PROD
+csp_devl_localhost=true
+
+basic_realm=csp.att.com
+#basic_realm=aaf.att.com
+basic_warn=TRUE
+
+cadi_loglevel=DEBUG
+#cadi_keyfile=target/swm/package/nix/dist_files/appl/${artifactId}/etc/keyfile2
+cadi_keyfile=etc/keyfile
+# Configure AAF
+aaf_url=https://DME2RESOLVE/service=com.att.authz.AuthorizationService/version=2.0/envContext=TEST/routeOffer=BAU_SE
+
+#AJSC - MECHID
+
+aaf_id=<aaf_id>
+aaf_password=<aaf_password>
+
+
+aaf_timeout=5000
+aaf_clean_interval=1200000
+aaf_user_expires=60000
+aaf_high_count=1000000
+
+
+# Some Libs need System Property Sets (i.e. AT&T Discovery)
+# The following properties are being set by the AJSC Container and should NOT need to be set here.
+#AFT_LATITUDE=33.823589
+#AFT_LONGITUDE=-84.366982
+#AFT_ENVIRONMENT=AFTUAT \ No newline at end of file
diff --git a/src/main/config/caet.properties b/src/main/config/caet.properties
new file mode 100644
index 0000000..54305e4
--- /dev/null
+++ b/src/main/config/caet.properties
@@ -0,0 +1,25 @@
+###############################################################################
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+###############################################################################
+#caet_service=http://DME2RESOLVE/service=com.att.csid.CAET/version=3/envContext=TEST/routeOffer=TEST_CAET
+#caet_service=http://DME2RESOLVE/service=com.att.csid.CAET/version=3/envContext=TEST/routeOffer=D3A_CAET
+#caet_service=dme2://DME2RESOLVE/service=com.att.csid.CAET/version=4.0/envContext=TEST/routeOffer=TEST_CAET
+caet_service=http://DME2RESOLVE/service=com.att.csid.CAET/version=4.0/envContext=TEST/routeOffer=TEST_CAET \ No newline at end of file
diff --git a/src/main/config/hazelcast-client.properties b/src/main/config/hazelcast-client.properties
new file mode 100644
index 0000000..981a1ef
--- /dev/null
+++ b/src/main/config/hazelcast-client.properties
@@ -0,0 +1,46 @@
+###############################################################################
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+###############################################################################
+#
+# Copyright (c) 2008-2013, Hazelcast, Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+hazelcast.client.group.name = ajsc
+hazelcast.client.group.pass = ajscpass
+hazelcast.client.connection.timeout = 30000
+hazelcast.client.connection.attempts.limit = 3
+hazelcast.client.reconnection.timeout = 5000
+hazelcast.client.reconnection.attempts.limit= 5
+hazelcast.client.shuffle.addresses = false
+hazelcast.client.update.automatic = true
+hazelcast.client.addresses = localhost, 127.0.0.1 \ No newline at end of file
diff --git a/src/main/config/jul-redirect.properties b/src/main/config/jul-redirect.properties
new file mode 100644
index 0000000..b025248
--- /dev/null
+++ b/src/main/config/jul-redirect.properties
@@ -0,0 +1,34 @@
+###############################################################################
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+###############################################################################
+
+# Bridge JUL->slf4j Logging Configuration File
+#
+# This file bridges the JUL logging infrastructure into
+# SLF4J so JUL logs go to logback implementation provided
+# in this project. SLF4J also captures log4j and has
+# other framework options as well providing a common
+# logging infrastructure for capturing all logs from different
+# libraries using different frameworks in one place.
+
+# Global properties
+handlers=org.slf4j.bridge.SLF4JBridgeHandler
+.level= ALL
diff --git a/src/main/config/keyfile b/src/main/config/keyfile
new file mode 100644
index 0000000..f086f84
--- /dev/null
+++ b/src/main/config/keyfile
@@ -0,0 +1,27 @@
+I2AQRRjEe4nCZvHUxYeJorPx8cli8KdeCVGl_N5kOSX5R_rI3eKNJDGzpGu3y4jLAxVerSfrnxDp
+tyTUlBEUhTIfpgIcMutY3vZZysMUz8no7tjBe2fWupW72b7d3IK-gCOlb8kla0R7ZchwS3tOZ36j
+p9h1MHcVOTUFiAzwNsgHHOgSPPAA800btWm0Fkrik4NZns_RXCnWW5Mlt9-UEeCT02Cyauf9rhuI
+2edZbR0pb9VXZohCo70qcrg7UJXFl-SwMsqNMXCMnxIy0X_CM_gHwU29whPUGBjekTWqZTauEEZi
+CHrB4uizYOLjB1rH6bOriXsxey5hGx9oCt-6Ixhunj1VEOg_WSiwmpJCtamcb6fL6GcTNOkDcPpg
+PQgaviBOOn0aL6WHzeZl2O1FjeNwOQp0DNvuUx8K4aNVDP3-ZLourwMQaLADIuux4ZRHRZnMYTGZ
+zkUWMI8rge5l7CaqMN8BIZlYJZk5I6DM4ct8uA8UVKzJtf6zS71uCSMV_nzW2-qJQ_vO-E-vJwBO
+bvee6Rq46BE6q3oLDCH3e687QryDxoB7maKoycZSTeQvWQDt3VpmLXoQjHoDFXwsXA3BnIBWa522
+MBhGZC3xD8z4JXbM3rLmpPVeTFQJBR1Rkn0TNj1baa0av3PySZFUAF9x3PkIYqC6dl-jY4HrTUlg
+41NuhCYv-JSL2To7SFkPzK5hXEQuWmuDxXJkQso2-P8_qwUWEDXs6aDvwR3CmNCyN8n6IKkJF167
+1OHZ4QvVbI1XdUUAG3_2ZSENDGlItjjlwCfMeyjxH9SOJhWNm26YDH3lQ008A67jkoiooUgTudKF
+5aE3msIz0j57icW1HOevRN2qE1KmlHPD45cXMJND8G6vl7ZLR9m96SkLuE_QslVtv_Ue2ONEvmfF
+AhrERhhlRzLNCy6vpEvnkjhp1ffF0tkABGL2CuoJFagVCHDfL8Ki7wUEb1z8kmNk7Wlb6eHM5W2c
+__BtydiXioTNjbSzBJVjJTYRk5wU8TLPm6IIO2WL4uXiYM-11w_gAW-R9YNoeAe3o0VwFRHlRsUF
+IOv7sqOQ6ue2J5kcK9XHpmn8sQqrd2npKCpKL3hveLh6EL8NzE4clMvr87QakRKRpwsDXG4Vjv_F
+SvJy5RHawjFiFSi6cdBefZNThBqSpGg_5HVjUrmcioBTGoMBeJw2XJEE-ghhvImfPd9ypqZCU_SG
+tdDWbay7SPigQASXtjy1lYR7yG9-ibv00uu2TTU-wSWm3iu1_cA67LmsfiTjOWYFAS9agBX-Zkb5
+q555ysrQwd4FPZByxosLbB-31CIDQbnce2SiZjswlTTgY8rSStYyJ39vWA-G5ogUzuUKNiJcNerY
+5ii4YWPL-5OuVSPN2VwBlr6Af2iZpuGhCxknKkOa88a4rTgGKtvWb6b7FTrJKR_LH36RNThuWF5d
+ar8bLXc0m_0WdmvHxuTHWRkFgGB05sZYarZUk8vPF32YDtrqsd4ZV_rmSfPBVfqHItXPOnLxG7Fv
+hZ0mavL5-3jnrn_dlODgqGEolVvwHelNaukeU4_ZKa84uGG2QV7epWHflcTxoD7GoURihY4ATQgt
+eNi7Q-4kHiwObG6PMhzhVlBBoQArR4zICyC9nXUrVIt_MHtDWqbLZUpGQISvV_jnOsJ3wkcWxNoi
+jZGe9H-UddfWV_sxmRgHamsNHF9Ee_bwOU6sY48MPv8yO6aHCoPhuE4eh3DCsd5cU44nc3isqrGD
+hyYJmz1BD6IH4QlQO0daMCY8mkuF1zV5OsdropHvjaN7DSuplpJy2uMLtvNUJJxRXh8vD4GF68aL
+9A1d7fNcfYQxP0Lux0LjaPUERAF6xaePbO0Qop2oiO77qRs6qsDFGdgosUfXa9JrBtwUAU5Q9O4R
+5MvAeqxJ8fux6HJMOl9DOfMPC58Uxzi3C0ERKcaORTBVQJGauzySZ6Dwk9JOlZ0CXEGbXZeYThYe
+bR2YrN3xoWjrgB4gEtqHLhNCfLvnZtMtoSw4CjynRHXlIayBUMBtrhJ4TjpdTvba9PVNuleT \ No newline at end of file
diff --git a/src/main/config/keyfile1 b/src/main/config/keyfile1
new file mode 100644
index 0000000..20d6254
--- /dev/null
+++ b/src/main/config/keyfile1
@@ -0,0 +1,27 @@
+Y9xUekNIvxqllfhTgQy3Jnw99wr32caQRQ5ZF1q3jI7jH0AWHxjeIf_5LvFNGBtxRFWtPvA69G5t
+h8Pm6dwh8Fh8jqIzlyr0gY7H4tMWhOGLG20SFE5HgsnlkGS1obAZ30EJIDmmAqP0R4EpdnRWDA_S
+BIAWYqiRlI23aqtRcIP321W9f6dHfyU_9MNMi3ZdMqg6bH5MQXpNw6ZPqs089QVa8Fhmna1dZa2o
+96Al7aN7Sxc3J_AP0zfwgC4JtvDG0Ie-F-SYX3EJ-WACcdCpmxIIwgfCE9P02vapzQ5l6tAFlr00
+A3hTFOvR6EDw_UzNR3mQflKIS5mAlxt0d7lFubRSjHxf9Oal5137cAb4I28Lyitqrp8L6rnkMdr1
+sJSR0fHTvawBoGFfdRIUzKLAbXWF7RYD807k18fhhq3SIKi0ZdHVLnwqsbjvMCEQD9dlPvfV0U95
+lGxzlYtWtmn_BplHgeZAJ-0F2__5I9fcSiRFHL5C5-30Seax25ydj4JaIVAwss77z7e0oKef1W2W
+NA0jp1Pyhm-G0UVrT-MG0d_PX3IO2sYNwLYRsgz35_UgOnxJdev7pl6VAhPrfeWui4wXwt76Fyto
+VmM6ENkHAEsYe0UzI9zIANz2IVJUD8HQJAFLDdh_bC6ME4qcUcOWrhSGlzQ8NZnXDnPqtcOA1UvH
+R6CGsR5mMlW6OYLiu1l0QNcx96-J3APd_Va6HGn6I4kAYlPXgn51M8bQmUmBBPTTzN3PRxea87pY
+eWYjFW4b4gITiS_XfhyrSPAV9aNiqqfJxvmkbfC7XZRFIZGpsSUuUaHxtGGKqMbSWr6Gl8OxuqVQ
+y7lUdvdBNevtkq1I1i0RY_fyURfPkWFrzFVlGH-trht4TwNicCVofldHZe8ihBSKmYvfwjGdWs3h
+7C4lAUg4WCKApfDKjEbj_0X8UcM78U_rtrgauH-YhxqJgHDC9LzLXzwzyLk-Ky2SFYCO7z6qXOa7
+wLW2UYesi6Bz2lk_jinBx5ZNA8UW5Oyyqgte3OcpJ3geNmAvUUGYVqEFSVwVoVw5IynE8SPitA59
+bihLwGSA35oONrsaLmYH5ftioKPD1v1ac-hUDLsljam4riEQkDB2TyVQmk9JZcDuuPgG0yPnUgZE
+yse83-CHztmm3GJp69WkrVv1_s9KxaWEVKx2UODNmZ1B4_nvsMuAPUZca9LWK6HTIvlH3RsNW0eA
+VBsNVdXs7jaJFOyyFyYg2mQMQRG6hhJIV9ay4cHLQe7kF-qOJkCl9luuOBuhn7QyHPH3VVfCUKHE
+buRqwfPSXuYtnixPlIgZQI6m2JHEZPyStYnZ6ykbSHqLuAwesDVJCbrD6DRQ3NLkhPCrhKy2gTSf
+aIf0ipkFtghQsKExYCCedNs7jDd-2jVhYeYLyqBXV-WEXwSJFjo44WaXQz9vCBvCtkZjsX325_ST
+5irVViW0iTvRQcw0wcb6vyyzSEf0nJOrNJpe5VzxKH-W2YXZD-XAF5fkfyudqQKO5NNIVa5d1zMP
+YgCxww4iQKKGHtNQ7eooxbogpGQuWmupO3iZAkF52s7Frin6PnVU25LGiietG-9xppsTiCYTs5K1
+z2UbTvOXTx1fIlRCbDZAz7T4oXAzIKAdvKiEZvxMqb9aeiUvbgCxm7rOBCNajWwIKkWzYIfH1ajq
+KQRIvj5QDFppe7Fx5ZTBRC7Eq_9jyno7PKwOGoIbZjtcuigHlH4DNm3rRiVNoejRRb_q_xa5U22A
+ce5j2fSHzM2J6cdvWD9BGiaFIOGku02h4LNk7QI6HYa62Kv-QNJ0HRtugFjdDUuWB6fLNr-60dfz
+HDONZsa8oTuJ72jZIMIZmyCcEVLxAUA4DFCT-LkLDAQCEL6tBdyy3CBcFzLeR1HFbt6TxCEm8JS0
+GA3HXvSimoL0K8f3AzYFoVZ6NY897rlJ3_aICIw-6sfiLBmm1aowgfIbnRFhx5V1FEUUSG33XqSX
+XdbF7nTRDl5woGChrDomCUXvuz1gXujDqccyclA2-v03u6mSGel3XVBAsJfhs1oJP0rSTNm_ \ No newline at end of file
diff --git a/src/main/config/keyfile2 b/src/main/config/keyfile2
new file mode 100644
index 0000000..d744844
--- /dev/null
+++ b/src/main/config/keyfile2
@@ -0,0 +1,27 @@
+rrtJzL4AdOdm6TfWi0644LRIZyXv4YYGyVNMANeDlVw_StIxrV_059ry3BsqUgSa07O-O82MzV6q
+WSftVjmHYdL3Qxz9epjMt7PkcTDHaYLqURcOKSZ2dH7WGhKVCwD-z8R7hSAyMQRUWPRErciaPyq_
+R913Xsj6dcACLHPjV1nQm2uyrdTRv10k6wF02GoaN3FjFo-0QjVWxq6Y1jjxmiXksbjdgvgPUonG
+vJxzI7ip8SgfLl4Y-fpNYW4nb_KhTuSghxAeMi0GjYIe_7YmfY7oDBtzwFMszsK_q9NBHrOF9N9k
+Rskb6iGAML--E5KO5varMWJ4n3xVNA5_y0CdezKBEOin-eNmnwS-vrsT06wmYa1uZPP3haRE0OQF
+XcoPdN0gxPwWrUBPZCaEHS5grbaJYIrFl8en5WYpB_iEVJ2f5TYYM6xzoVU3CAjQqwcuczufyTV9
+L1XUbZbYJFFHYLhxXJXeJ1rHqVrZ9AKchjgtxL6qSLV-w0Vq2l2IwMwK-5dXgbQsfNSWNjM0p9hZ
+8SrfmUT2Ixp4nNk4_CoFujdWv1iCD3Kr-jtV8TX2GvsYoW_9ExvoVl1TJFfqk2A5sOuBaCptNLhg
+DUkcW-sf_BpACwJz-FMJtHhsX5NQECNFkl-_bPA1J3FT49pzw9yxoqdUHB2wdcptLKw-VXJK17xU
+QHGC3k53nKm64yyCw2Ex6yXu8cwvNjolJBiED0qr3vHGFkchjlj7fgEpnnAXI_4UiyKguOGj1ywa
+SAa_azv1W3NewcfpdqeWrLjpl1Hme5-M3gBDoBsGoOePLR9aNDk5la9eg3-6E4jpCv9HlVQ-_MmQ
+syP-idHyV0WrUUpldmUOSWQqZR8nQOU4JJM0eZoR5Cn5b2R0xidXn556SC3lbuWFOzgJh19_4tXb
+0kRnTLW3B-Uj-68KWfB5xUOUlaIXqDGKSSMlzFagZBseQ02fNmgduZ8h3UE2nCvJWQ2radadFnug
+NGpBrxCji9jIqBHUOSKqKHepm3_fRXrur-gwVsovNqOYjcLEXia5IlCMM0M80VdmcxpoizlbjzUh
+xRb1vyhLXCUZv6ZFAbFjCCmJLcWD_OWOYhTBYiQk6Uu1mEdCFbz6Wxhtt7Zugse6DpCuurC2lPsC
+gSHbnsfHDDZokbiKaU4pzTow0LqDvb7TodatGlZI7Ts4vMj-YA_vrlFPcIXcTzYMDZwj5AV3_hZT
+K8W2xTpyDSo3Jo6UB_k_ka6vw82WyKLBW0yvV20LDhokd5aChdtKOU1B05p4iMyhEpiAVeIyaDcg
+1iaW2kIRfK8UXo-ptYITSomYrS8_1QqS2LhhltXc73Rzwc0-Ati9qyMS-wtRvYOQra9Hff_y8Xcw
+Y-w2r3g4MISoXNrrVhZviUVhctpTrhYMinW4-AQ9wcFGbu58C6wH7NhbNztysEKF8sCMA2Jq5G8A
+XrjDY-KiM1k2C2vQxvo4GBKfKfZb4HqIpmc7lDuYH-T7Wo-rnAClzqB_VRc64Rn4GKJih6r7jPjs
+-HZUReQ7M7oBXe-X925FpGGYpabpJGLpk4h9WbgHy0iXiZfuDW4KRhBNZ9ZVt7fOa3AwZxHuZfoJ
+ydlVLpuHZ4ENzdNu5q1ycfvr-h3THJd665IerG1Del-By1vrZfqsVdyzEvOqQ30R7NDfBXnk2xJp
+oc_ituvuM_N0L27PH3-YFxXynjUZV2EH8OD3w7WB2Kq8Xi7-cbjqCOKwrjLBoDHBp2UGY47e1yb4
+B5MFHGkWdYZ_4e8mhSsqY5mRWAxS7eOe9NOw_xdrsVBarGgLXTq_WTueVlhEYCC-hHbCr838S6QX
+Q8xJiRpIJGDHY2cvV3Rob9wR7wk4C-cErMRFQHLPQTmpcMDTZ4JknJ_79POiw6DbZ1xloihlM-P4
+cBiHxyb7IeiVsonVutgVo9ViLfYAvJ3gYs9L5Rc5XhNRiNpYVZq__O7VBW7riUF2GlHLURUZPjgq
+8CXroMsnKvGfB-OhcOVxuDO8P5odl28ogwtVf0TGOcaUcPlFySJcOp4DDx-O2erWcBXZ1fHX \ No newline at end of file
diff --git a/src/main/config/keyfile_old b/src/main/config/keyfile_old
new file mode 100644
index 0000000..6a1657e
--- /dev/null
+++ b/src/main/config/keyfile_old
@@ -0,0 +1,27 @@
+ctRt8XTd7N57kcm0npZOWSDF5I69w9K97cQS_ep0AgxgHmYB0WtYblsrMGuHfyS1o4697zLiIeoS
+Nn5kE1kedl4c4HevfuwfoJpWyiugYusNOqbTGQJ1MHOwqiBEJnjXepZEoz1btaW_hDO7uz-BoD4t
+SxwNRwVQpcg0_CmBX-yIW2YCIECoxZH9_X_8fcXYHP2VgFxxBpvjgycNQlyN15_VSuLwn3Wj0W8_
+8chRxGURyhp8iEBSb4tIdN5jXkhCma7AP7wreMufFQqXjdfWqIisJPfIpS3znl5IiTOZP22XhHay
+gq2KFwABVqjM71m5czEz1ojGkbFEAGImrY-VFHuug2u4ss4VW7TGeJst0z7I5vrn5M6i9Eb6xiGh
+jNUebRCV3cYGrtD9SlvjJBVVeP_3OrkxlD4oktx-JTRJzYtXADB5if2gtpYxy84kqrz7ltr5rXUH
+zSG7ujKCXOOE_Wk6vQPSjYPnum6R_mxOorCNCvtf6ne85Xd81DZlJM-CleVNdOU7g1xie-gBZPAX
+bOvWf6p_pVNmH76v-m4XLAAUqEzt-9PvNmirODiDiY5bNz6l-1ejw8IyQYb37e_3sN_LjF7A9HgB
+Dia7kNjsfB7_2vB7R4qjwNLsmTMnQCDANnNpl9VpotZ4blPhhOWhB1Tg3lxc-z-VRV7GBbl_2eQd
+3eYUT1Z5Li184W4-pft_TCaDJ1NyaJd1CQxQEuIORdq5B6Q2L9SMmmOOh82Czu5_Ro80IGikHXHp
+Lqf2fIaceY_IBAeGp2iPjtXdkghV24vIT49oRfqf6sBKAPy-88xILnMWM6M5bMCETKn7UvM1kV5y
+ZQYlsi-36n73ETZyiFs1PLqe8D6dRURrcBG_B9i1MafNiWa-elG6E0X0pSK9CadchSA0KRMaKtfE
+6-iyUqE-bx-0ELTbV2y7gLdu5MVtjRmQB5ozoaBq8ik4-jAWAsKpTv4DfWoMp9DkRENlKeauayuT
+j_VAGhqy07pIntQKtbK9EP0tndSKtF3WLwHel1I5C3lthhkxxfzpxURBxO1ZJMFJZ6rLu1Ku03zw
+LJ7nFFR_YfJ7tnGZE4PEt7MOZNiNoD3__9PthO5HmZdk1gPMrKlojU1hyR3IlbVShUst6rA3MkWk
+MD-zlw9mhNgaV3xvPJ945pYPe4C6qIwxXoiXGHyhv_0MpcvuMW-pUuAZXfkuiqNwQnpUTLBD0YJw
+uwMbE7sN40e6-BSxEiMOab7s2gShbaK9JjCMQUH_vAuQSZjU4sn53jsS7U4DHntzgxVYttIwGZaU
+b-1R7jYphNJnCI8rPB_xjJ0OMssNKT7lYRgG_ZuKvifYvJWt-NwD0z2qoePcRGExXuioRDNR4SlB
+-RN33dYhp6vRsHKT1oLpl-UJB6dqJlZ2dCsfc7vT1Vs0SYidRYXCUJNBSePI4-1LMlHKOqGASBcg
+pl589601-EtO7ch3RoaL26rNXzA-umUWYRPQPZ76wcgK2j4k5Ndub5dWK9jI6UW3RbF6ixe0Yw2j
+_Pipt4EX8R6-sb87D69JOOnZlFVB6EcCO07Q7j6DavpUNHlLmDmPgArqODh002scvW1ryMxBR2XE
+m3kGQh2IFh5Qru8duxblEYE-lmHGxXVgDtKiKgHwPTkaxcquEtZTEJxaIJIgoKj7SgMzdfbeLlJM
+RwbdvExmnRT9ivFImeIV7ACPnfBP3URd82kTG8FyiMvSpdCLL16FWOd9gjZuMstqZrmIVF8tO2WT
+COMIx-jqvQD2zS1Ul5p0szJaf-CxBjy7-cJIaAyEToR1T5bBFtQt4sEFxG7XG0cCoXShqclL70TV
+W13X5pY55YwHkCR4mRjc0o0ZKStY3OADVLFom1bC9AmMBqU4PsKNAX29LT37WE-I23tQgzid0Ix9
+JuVzlbOTvi19uLYbltrHavU3UbVhYxNNI7Y7tM02xfq3LhGqZG5EPS-WAB9bBixHQqw78cd9iqIr
+hHlZW80l1kgs1ezMqgxfwDuiFOZIu9UWQ6vSnTAvfhwJhcr77gSk5Gu957uxzleaS4gVwTYU
diff --git a/src/main/config/runner-web.xml b/src/main/config/runner-web.xml
new file mode 100644
index 0000000..a94cbe6
--- /dev/null
+++ b/src/main/config/runner-web.xml
@@ -0,0 +1,119 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+ ============LICENSE_START=======================================================
+ org.onap.dmaap
+ ================================================================================
+ Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ ============LICENSE_END=========================================================
+
+ ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+ -->
+
+<web-app xmlns="http://java.sun.com/xml/ns/javaee" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://java.sun.com/xml/ns/javaee http://java.sun.com/xml/ns/javaee/web-app_3_0.xsd"
+ metadata-complete="false" version="3.0">
+
+ <context-param>
+ <param-name>contextConfigLocation</param-name>
+ <param-value>/WEB-INF/spring-servlet.xml,
+ classpath:applicationContext.xml
+ </param-value>
+ </context-param>
+
+ <context-param>
+ <param-name>spring.profiles.default</param-name>
+ <param-value>nooauth</param-value>
+ </context-param>
+
+ <listener>
+ <listener-class>org.springframework.web.context.ContextLoaderListener</listener-class>
+ </listener>
+
+ <servlet>
+ <servlet-name>ManagementServlet</servlet-name>
+ <servlet-class>ajsc.ManagementServlet</servlet-class>
+ </servlet>
+
+ <filter>
+ <filter-name>WriteableRequestFilter</filter-name>
+ <filter-class>com.att.ajsc.csi.writeablerequestfilter.WriteableRequestFilter</filter-class>
+ </filter>
+
+ <filter>
+ <filter-name>InterceptorFilter</filter-name>
+ <filter-class>ajsc.filters.InterceptorFilter</filter-class>
+ <init-param>
+ <param-name>preProcessor_interceptor_config_file</param-name>
+ <param-value>/etc/PreProcessorInterceptors.properties</param-value>
+ </init-param>
+ <init-param>
+ <param-name>postProcessor_interceptor_config_file</param-name>
+ <param-value>/etc/PostProcessorInterceptors.properties</param-value>
+ </init-param>
+
+ </filter>
+
+<!-- Content length filter for Msgrtr -->
+ <filter>
+ <display-name>DMaaPAuthFilter</display-name>
+ <filter-name>DMaaPAuthFilter</filter-name>
+ <filter-class>com.att.nsa.dmaap.util.DMaaPAuthFilter</filter-class>
+ <init-param>
+ <param-name>cadi_prop_files</param-name>
+<!-- param-name was changed from prop_files to cadi_prop_files in 1.1.1, so please use "cadi_prop_files" for param-name moving forward -->
+<!-- old value <param-name>prop_files</param-name> -->
+ <!-- Please, note: configuration files were located under the conf/ directory for 4.4.0 and lower versions
+ of the ajsc. Beginning with the 4.5.1 Release version of the ajsc, these configuration files have ALL
+ been moved to the etc/ directory to better utilize some SWM packaging functionality. -->
+ <!-- <param-value>conf/cadi.properties</param-value> -->
+ <!-- <param-value>etc/cadi.properties</param-value> -->
+ <!-- <param-value>etc/cadi.properties </param-value>-->
+ <param-value>/appl/dmaapMR1/etc/cadi.properties </param-value>
+ </init-param>
+ </filter>
+
+ <!-- End Content length filter for Msgrtr -->
+ <servlet>
+ <servlet-name>RestletServlet</servlet-name>
+ <servlet-class>ajsc.restlet.RestletSpringServlet</servlet-class>
+ <init-param>
+ <param-name>org.restlet.component</param-name>
+ <param-value>restletComponent</param-value>
+ </init-param>
+ </servlet>
+
+ <servlet>
+ <servlet-name>CamelServlet</servlet-name>
+ <servlet-class>ajsc.servlet.AjscCamelServlet</servlet-class>
+ </servlet>
+
+
+ <filter>
+ <filter-name>springSecurityFilterChain</filter-name>
+ <filter-class>org.springframework.web.filter.DelegatingFilterProxy</filter-class>
+ </filter>
+
+ <servlet>
+ <servlet-name>spring</servlet-name>
+ <servlet-class>org.springframework.web.servlet.DispatcherServlet</servlet-class>
+ <load-on-startup>1</load-on-startup>
+ </servlet>
+
+ <servlet-mapping>
+ <servlet-name>spring</servlet-name>
+ <url-pattern>/</url-pattern>
+ </servlet-mapping>
+
+</web-app>
diff --git a/src/main/config/template.lrm.xml b/src/main/config/template.lrm.xml
new file mode 100644
index 0000000..c727b6f
--- /dev/null
+++ b/src/main/config/template.lrm.xml
@@ -0,0 +1,147 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!--
+ ============LICENSE_START=======================================================
+ org.onap.dmaap
+ ================================================================================
+ Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ ============LICENSE_END=========================================================
+
+ ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+ -->
+
+<ns2:ManagedResourceList xmlns="http://scld.att.com/lrm/commontypes"
+ xmlns:ns2="http://scld.att.com/lrm/util" xmlns:ns3="http://scld.att.com/lrm/types">
+ <ns2:ManagedResource>
+ <ResourceDescriptor>
+ <ResourceName>__SOA_CLOUD_NAMESPACE__.${artifactId}</ResourceName>
+ <ResourceVersion>
+ <Major>__MAJOR_VERSION__</Major>
+ <Minor>__MINOR_VERSION__</Minor>
+ <Patch>__PATCH_VERSION__</Patch>
+ </ResourceVersion>
+ <RouteOffer>__AFT_SERVICE_ENV__</RouteOffer>
+ </ResourceDescriptor>
+ <ResourceType>Java</ResourceType>
+ <ResourceContainerType>Ajsc</ResourceContainerType>
+ <ResourceContainerTypeVendor>ATT</ResourceContainerTypeVendor>
+ <ResourcePath>__INSTALL_ROOT__${distFilesRoot}</ResourcePath>
+ <ResourceProps>
+ <Tag>process.path</Tag>
+ <Value>/usr/bin:/usr/sbin:${PATH}</Value>
+ </ResourceProps>
+ <ResourceProps>
+ <Tag>process.workdir</Tag>
+ <Value>__INSTALL_ROOT__${distFilesRoot}</Value>
+ </ResourceProps>
+ <ResourceProps>
+ <Tag>process.libpath</Tag>
+ <Value>${LD_LIBRARY_PATH}</Value>
+ </ResourceProps>
+
+ <!-- The following process.args are for your context, port, and sslport.
+ NOTE: The "context" argument will set your context. context=/ will run your service under a "root" context.
+ Port selection is NOT necessary and should NOT be hardcoded if you are choosing to utilize ephemeral ports.
+ If NO port values are given, the AJSC will default to running on ephemeral ports (for both your http port
+ AND your https port), and your port will be selected for you. -->
+
+ <!-- Add JMX port -->
+
+ <ResourceProps>
+ <Tag>jmx.port</Tag>
+ <Value>__JMX_PORT_MRS__</Value>
+ </ResourceProps>
+
+
+ <!-- End -->
+
+ <ResourceProps>
+ <Tag>process.args</Tag>
+ <Value>context=__AJSC_CONTEXT__ port=__AJSC_SVC_PORT__ sslport=__AJSC_SSL_PORT__</Value>
+ </ResourceProps>
+ <ResourceProps>
+ <Tag>jvm.version</Tag>
+ <Value>__JAVA_VERSION__</Value>
+ </ResourceProps>
+ <ResourceProps>
+ <Tag>jvm.classpath</Tag>
+ <Value>:.:${CLASSPATH}:__AJSC_SHARED_CONFIG__/etc:lib/*:</Value>
+ </ResourceProps>
+ <ResourceProps>
+ <Tag>jvm.args.pre</Tag>
+ <Value>__PRE_JVM_ARGS__ -XX:MaxPermSize=__MAX_PERM_SIZE__
+ -XX:PermSize=__PERM_SIZE__
+ __INTROSCOPE_VARS__
+ -DSOACLOUD_SERVICE_VERSION=__AFTSWM_ACTION_NEW_VERSION__
+ -DAJSC_HOME=__INSTALL_ROOT__${distFilesRoot}
+ -DAJSC_CONF_HOME=__AJSC_CONF_HOME__
+ -DAJSC_SHARED_CONFIG=__AJSC_SHARED_CONFIG__
+ __POST_JVM_ARGS__
+ __SCLD_OPTIONAL_PLATFORM_FLAG__
+ </Value>
+ </ResourceProps>
+ <ResourceProps>
+ <Tag>jvm.heap.min</Tag>
+ <Value>__MIN_HEAP_SIZE__</Value>
+ </ResourceProps>
+ <ResourceProps>
+ <Tag>jvm.heap.max</Tag>
+ <Value>__MAX_HEAP_SIZE__</Value>
+ </ResourceProps>
+ <ResourceProps>
+ <Tag>start.class</Tag>
+ <Value>com.att.ajsc.runner.Runner</Value>
+ </ResourceProps>
+ <ResourceProps>
+ <Tag>stdout.redirect</Tag>
+ <Value>log/stdout.log</Value>
+ </ResourceProps>
+ <ResourceProps>
+ <Tag>stderr.redirect</Tag>
+ <Value>log/stdout.log</Value>
+ </ResourceProps>
+ <ResourceProps>
+ <Tag>validatePID.waitime.seconds</Tag>
+ <Value>__LRM_VALIDATEPID_WAITTIME_SECONDS__</Value>
+ </ResourceProps>
+ <ResourceProps>
+ <Tag>mbean.name</Tag>
+ <!-- <Value>JmxInterface:type=Ajsc,JmxInterface:type=DME2</Value> -->
+ <Value>JmxInterface:type=DME2</Value>
+ </ResourceProps>
+ <ResourceOSID>${installOwnerUser}</ResourceOSID>
+ <ResourceStartType>__LRM_RESOURCE_START_TYPE__</ResourceStartType>
+ <ResourceStartPriority>__LRM_START_PRIORITY__</ResourceStartPriority>
+ <ResourceStartTimeout>__LRM_START_TIMEOUT__</ResourceStartTimeout>
+ <ResourceMinCount>__RESOURCE_MIN_COUNT__</ResourceMinCount>
+ <ResourceMaxCount>__RESOURCE_MAX_COUNT__</ResourceMaxCount>
+ <ResourceMaxRestart>__LRM_RESOURCE_MAX_RESTART__</ResourceMaxRestart>
+ <ResourceHeartBeat>__LRM_RESOURCE_HEARTBEAT__</ResourceHeartBeat>
+ <ResourceHeartBeatFailedLimit>__LRM_RESOURCE_HEARTBEAT_FAILED_LIMIT__</ResourceHeartBeatFailedLimit>
+ <ResourceHeartBeatTimeout>__LRM_RESOURCE_HEARTBEAT_TIMEOUT__</ResourceHeartBeatTimeout>
+ <ResourceShutdownWaitTimeInSecs>__RESOURCE_MANAGER_WAIT_TIME_IN_SECONDS__</ResourceShutdownWaitTimeInSecs>
+ <ResourceRegistration>__LRM_RESOURCE_REGISTRATION__</ResourceRegistration>
+ <GroupName>${installOwnerGroup}</GroupName>
+ <ResourceErrorNotify>
+ <NotifyListEntry>
+ <Loglevel>WARNING</Loglevel>
+ <EmailList>__CLDLRM_WARNING_NOTIFY__</EmailList>
+ </NotifyListEntry>
+ <NotifyListEntry>
+ <Loglevel>SEVERE</Loglevel>
+ <EmailList>__CLDLRM_SEVERE_NOTIFY__</EmailList>
+ </NotifyListEntry>
+ </ResourceErrorNotify>
+ </ns2:ManagedResource>
+</ns2:ManagedResourceList>
diff --git a/src/main/java/com/att/nsa/dmaap/DMaaPCambriaExceptionMapper.java b/src/main/java/com/att/nsa/dmaap/DMaaPCambriaExceptionMapper.java
new file mode 100644
index 0000000..53c3bed
--- /dev/null
+++ b/src/main/java/com/att/nsa/dmaap/DMaaPCambriaExceptionMapper.java
@@ -0,0 +1,143 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package com.att.nsa.dmaap;
+
+
+import javax.inject.Singleton;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.ext.ExceptionMapper;
+import javax.ws.rs.ext.Provider;
+
+import org.apache.http.HttpStatus;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import com.att.nsa.cambria.CambriaApiException;
+import com.att.nsa.cambria.exception.DMaaPErrorMessages;
+import com.att.nsa.cambria.exception.DMaaPResponseCode;
+import com.att.nsa.cambria.exception.ErrorResponse;
+
+/**
+ * Exception Mapper class to handle
+ * CambriaApiException
+ * @author author
+ *
+ */
+@Provider
+@Singleton
+public class DMaaPCambriaExceptionMapper implements ExceptionMapper<CambriaApiException>{
+
+/**
+ * Error response obj
+ */
+ private ErrorResponse errRes;
+
+/**
+ * Logger obj
+ */
+
+
+ private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(DMaaPCambriaExceptionMapper.class);
+
+
+ /**
+ * Error msg obj
+ */
+ @Autowired
+ private DMaaPErrorMessages msgs;
+
+ /**
+ * HttpServletRequest obj
+ */
+ @Context
+ private HttpServletRequest req;
+
+ /**
+ * HttpServletResponse obj
+ */
+ @Context
+ private HttpServletResponse res;
+
+ /**
+ * Contructor for DMaaPCambriaExceptionMapper
+ */
+ public DMaaPCambriaExceptionMapper() {
+ super();
+ LOGGER.info("Cambria Exception Mapper Created..");
+ }
+
+ /**
+ * The toResponse method is called when
+ * an exception of type CambriaApiException
+ * is thrown.This method will send a custom error
+ * response to the client.
+ */
+ @Override
+ public Response toResponse(CambriaApiException ex) {
+
+ LOGGER.info("Reached Cambria Exception Mapper..");
+
+ /**
+ * Cambria Generic Exception
+ */
+ if(ex instanceof CambriaApiException)
+ {
+
+ errRes = ex.getErrRes();
+ if(errRes!=null) {
+
+ Response response = Response.status(errRes.getHttpStatusCode()).header("exception",
+ errRes.getErrMapperStr()).build();
+
+ return response;
+ }
+ else
+ {
+
+ Response response = Response.status(ex.getStatus()).header("exception",
+ ex.getMessage()).build();
+
+ return response;
+ }
+
+
+ }
+ else
+ {
+ errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND,
+ DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(), msgs.getServerUnav());
+
+ Response response = Response.status(errRes.getHttpStatusCode()).header("exception",
+ errRes.getErrMapperStr()).build();
+
+ return response;
+ }
+
+ }
+
+
+
+}
diff --git a/src/main/java/com/att/nsa/dmaap/DMaaPWebExceptionMapper.java b/src/main/java/com/att/nsa/dmaap/DMaaPWebExceptionMapper.java
new file mode 100644
index 0000000..7a9d0ba
--- /dev/null
+++ b/src/main/java/com/att/nsa/dmaap/DMaaPWebExceptionMapper.java
@@ -0,0 +1,202 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package com.att.nsa.dmaap;
+
+
+import javax.inject.Singleton;
+import javax.ws.rs.BadRequestException;
+import javax.ws.rs.InternalServerErrorException;
+import javax.ws.rs.NotAllowedException;
+import javax.ws.rs.NotAuthorizedException;
+import javax.ws.rs.NotFoundException;
+import javax.ws.rs.ServiceUnavailableException;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.ext.ExceptionMapper;
+import javax.ws.rs.ext.Provider;
+
+import org.apache.http.HttpStatus;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import com.att.nsa.cambria.exception.DMaaPErrorMessages;
+import com.att.nsa.cambria.exception.DMaaPResponseCode;
+import com.att.nsa.cambria.exception.ErrorResponse;
+
+/**
+ * Exception Mapper class to handle
+ * Web Exceptions
+ * @author author
+ *
+ */
+@Provider
+@Singleton
+public class DMaaPWebExceptionMapper implements ExceptionMapper<WebApplicationException>{
+
+ /**
+ * Logger obj
+ */
+
+ private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(DMaaPWebExceptionMapper.class);
+ /**
+ * Error response obj
+ */
+ private ErrorResponse errRes;
+ /**
+ * Error msg obj
+ */
+ @Autowired
+ private DMaaPErrorMessages msgs;
+
+ /**
+ * Contructor for DMaaPWebExceptionMapper
+ */
+ public DMaaPWebExceptionMapper() {
+ super();
+ LOGGER.info("WebException Mapper Created..");
+ }
+
+ /**
+ * The toResponse method is called when
+ * an exception of type WebApplicationException
+ * is thrown.This method will send a custom error
+ * response to the client
+ */
+ @Override
+ public Response toResponse(WebApplicationException ex) {
+
+ LOGGER.info("Reached WebException Mapper");
+
+ /**
+ * Resource Not Found
+ */
+ if(ex instanceof NotFoundException)
+ {
+ errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND,DMaaPResponseCode.RESOURCE_NOT_FOUND.
+ getResponseCode(),msgs.getNotFound());
+
+ LOGGER.info(errRes.toString());
+ Response response = Response.status(errRes.getHttpStatusCode()).header("exception",
+ errRes.getErrMapperStr()).build();
+
+ return response;
+
+ }
+ /**
+ * Internal Server Error
+ */
+ if(ex instanceof InternalServerErrorException)
+ {
+
+ int errCode = HttpStatus.SC_INTERNAL_SERVER_ERROR;
+ int dmaapErrCode = DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode();
+ String errMsg = msgs.getServerUnav();
+
+
+ if(ex.getCause().toString().contains("Json")) {
+ errCode = HttpStatus.SC_BAD_REQUEST;
+ dmaapErrCode = DMaaPResponseCode.INCORRECT_JSON.getResponseCode();
+ errMsg = ex.getCause().getMessage().substring(0, ex.getCause().getMessage().indexOf("[Source")-3);
+ }
+ else if (ex.getCause().toString().contains("UnrecognizedPropertyException")) {
+ errCode = HttpStatus.SC_BAD_REQUEST;
+ dmaapErrCode = DMaaPResponseCode.INCORRECT_JSON.getResponseCode();
+ errMsg = ex.getCause().getMessage().substring(0, ex.getCause().getMessage().indexOf("[Source")-3);
+ }
+ errRes = new ErrorResponse(errCode,dmaapErrCode,errMsg);
+
+ LOGGER.info(errRes.toString());
+ Response response = Response.status(errRes.getHttpStatusCode()).header("exception",
+ errRes.getErrMapperStr()).build();
+
+ return response;
+
+ }
+ /**
+ * UnAuthorized
+ */
+ if(ex instanceof NotAuthorizedException)
+ {
+ errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,DMaaPResponseCode.ACCESS_NOT_PERMITTED.
+ getResponseCode(),msgs.getAuthFailure());
+
+ LOGGER.info(errRes.toString());
+ Response response = Response.status(errRes.getHttpStatusCode()).header("exception",
+ errRes.getErrMapperStr()).build();
+
+ return response;
+ }
+ /**
+ * Malformed request
+ */
+ if(ex instanceof BadRequestException)
+ {
+ errRes = new ErrorResponse(HttpStatus.SC_BAD_REQUEST,DMaaPResponseCode.INCORRECT_JSON.
+ getResponseCode(),msgs.getBadRequest());
+
+ LOGGER.info(errRes.toString());
+ Response response = Response.status(errRes.getHttpStatusCode()).header("exception",
+ errRes.getErrMapperStr()).build();
+
+ return response;
+ }
+ /**
+ * HTTP Method not allowed
+ */
+ if(ex instanceof NotAllowedException)
+ {
+ errRes = new ErrorResponse(HttpStatus.SC_METHOD_NOT_ALLOWED,DMaaPResponseCode.METHOD_NOT_ALLOWED.
+ getResponseCode(),msgs.getMethodNotAllowed());
+
+ LOGGER.info(errRes.toString());
+ Response response = Response.status(errRes.getHttpStatusCode()).header("exception",
+ errRes.getErrMapperStr()).build();
+
+ return response;
+ }
+
+ /**
+ * Server unavailable
+ */
+ if(ex instanceof ServiceUnavailableException)
+ {
+ errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE,DMaaPResponseCode.SERVER_UNAVAILABLE.
+ getResponseCode(),msgs.getServerUnav());
+
+ LOGGER.info(errRes.toString());
+ Response response = Response.status(errRes.getHttpStatusCode()).header("exception",
+ errRes.getErrMapperStr()).build();
+
+ return response;
+ }
+
+
+
+ return Response.serverError().build();
+ }
+
+
+
+
+}
+
diff --git a/src/main/java/com/att/nsa/dmaap/HelloWorld.java b/src/main/java/com/att/nsa/dmaap/HelloWorld.java
new file mode 100644
index 0000000..7dc2e0c
--- /dev/null
+++ b/src/main/java/com/att/nsa/dmaap/HelloWorld.java
@@ -0,0 +1,42 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package com.att.nsa.dmaap;
+
+import org.apache.camel.Exchange;
+
+/**
+ * Hello World Sample Camel Service
+ * @author author
+ *
+ */
+public class HelloWorld {
+ public HelloWorld () {
+ }
+ /**
+ * speak method
+ * @param e exchange
+ */
+ public final void speak(Exchange e) {
+ e.setOut(e.getIn());
+ e.getOut().setBody("Hello World!");
+ }
+} \ No newline at end of file
diff --git a/src/main/java/com/att/nsa/dmaap/JaxrsEchoService.java b/src/main/java/com/att/nsa/dmaap/JaxrsEchoService.java
new file mode 100644
index 0000000..9fcef98
--- /dev/null
+++ b/src/main/java/com/att/nsa/dmaap/JaxrsEchoService.java
@@ -0,0 +1,91 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package com.att.nsa.dmaap;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+
+import com.att.ajsc.beans.PropertiesMapBean;
+import com.att.nsa.dmaap.filemonitor.ServicePropertiesMap;
+
+/**
+ * Example JAX-RS Service
+ * @author author
+ *
+ */
+@Path("/jaxrs-services")
+public class JaxrsEchoService {
+
+ /**
+ * Logger obj
+ */
+ /*private static final Logger LOGGER = Logger
+ .getLogger(JaxrsEchoService.class);*/
+
+ private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(JaxrsEchoService.class);
+
+ /**
+ * Method ping
+ * @param input input
+ * @return str
+ */
+ @GET
+ @Path("/echo/{input}")
+ @Produces("text/plain")
+ public String ping(@PathParam("input") String input) {
+ return "Hello, " + input + ".";
+ }
+
+ /**
+ * Method to fetch property
+ * @param fileName file
+ * @param input input
+ * @return prop
+ */
+ @GET
+ @Path("/property/{fileName}/{input:.*}")
+ @Produces("text/plain")
+ public String getProperty(@PathParam("fileName") String fileName, @PathParam("input") String input) {
+ String val=null;
+ try {
+ val = ServicePropertiesMap.getProperty(fileName, input);
+ if(val == null || val.isEmpty() || val.length() < 1){
+ val = PropertiesMapBean.getProperty(fileName, input);
+ }
+ }
+ catch(Exception ex) {
+ LOGGER.info("*** Error retrieving property "+input+": "+ex);
+
+ }
+ if (val ==null) {
+ return "Property is not available";
+ }
+ return "Property value is, " + val +".";
+ }
+
+} \ No newline at end of file
diff --git a/src/main/java/com/att/nsa/dmaap/JaxrsUserService.java b/src/main/java/com/att/nsa/dmaap/JaxrsUserService.java
new file mode 100644
index 0000000..2724a51
--- /dev/null
+++ b/src/main/java/com/att/nsa/dmaap/JaxrsUserService.java
@@ -0,0 +1,59 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package com.att.nsa.dmaap;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import java.util.Map;
+import java.util.HashMap;
+
+/**
+ * Example JAX-RS User Service
+ * @author author
+ *
+ */
+@Path("/user")
+public class JaxrsUserService {
+
+ private static final Map<String,String> userIdToNameMap;
+ static {
+ userIdToNameMap = new HashMap<String,String>();
+ userIdToNameMap.put("user1","User One");
+ userIdToNameMap.put("user2","User Two");
+ }
+
+ /**
+ * Method to fetch user details
+ * @param userId user
+ * @return userDetails
+ */
+ @GET
+ @Path("/{userId}")
+ @Produces("text/plain")
+ public String lookupUser(@PathParam("userId") String userId) {
+ String name = userIdToNameMap.get(userId);
+ return name != null ? name : "unknown id";
+ }
+
+} \ No newline at end of file
diff --git a/src/main/java/com/att/nsa/dmaap/filemonitor/ServicePropertiesListener.java b/src/main/java/com/att/nsa/dmaap/filemonitor/ServicePropertiesListener.java
new file mode 100644
index 0000000..8333332
--- /dev/null
+++ b/src/main/java/com/att/nsa/dmaap/filemonitor/ServicePropertiesListener.java
@@ -0,0 +1,42 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package com.att.nsa.dmaap.filemonitor;
+
+import java.io.File;
+
+//import com.att.ssf.filemonitor.FileChangedListener;
+/**
+ * Class ServicePropertiesListener
+ * @author author
+ *
+ */
+public class ServicePropertiesListener /*implements FileChangedListener*/ {
+
+ /**
+ * Update method
+ */
+ //@Override
+ public void update(File file) throws Exception
+ {
+ ServicePropertiesMap.refresh(file);
+ }
+}
diff --git a/src/main/java/com/att/nsa/dmaap/filemonitor/ServicePropertiesMap.java b/src/main/java/com/att/nsa/dmaap/filemonitor/ServicePropertiesMap.java
new file mode 100644
index 0000000..731428d
--- /dev/null
+++ b/src/main/java/com/att/nsa/dmaap/filemonitor/ServicePropertiesMap.java
@@ -0,0 +1,126 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package com.att.nsa.dmaap.filemonitor;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+/**
+ * ServicePropertiesMap class
+ * @author author
+ *
+ */
+@SuppressWarnings("squid:S1118")
+public class ServicePropertiesMap
+{
+ private static HashMap<String, HashMap<String, String>> mapOfMaps =
+ new HashMap<String, HashMap<String, String>>();
+// static final Logger logger = LoggerFactory.getLogger(ServicePropertiesMap.class);
+
+ private static final EELFLogger logger = EELFManager.getInstance().getLogger(ServicePropertiesMap.class);
+ /**
+ * refresh method
+ * @param file file
+ * @throws Exception ex
+ */
+ public static void refresh(File file) throws Exception
+ {
+ try
+ {
+ logger.info("Loading properties - " + (file != null?file.getName():""));
+
+ //Store .json & .properties files into map of maps
+ String filePath = file.getPath();
+
+ if(filePath.lastIndexOf(".json")>0){
+
+ ObjectMapper om = new ObjectMapper();
+ TypeReference<HashMap<String, String>> typeRef =
+ new TypeReference<HashMap<String, String>>() {};
+ HashMap<String, String> propMap = om.readValue(file, typeRef);
+ HashMap<String, String> lcasePropMap = new HashMap<String, String>();
+ for (String key : propMap.keySet() )
+ {
+ String lcaseKey = ifNullThenEmpty(key);
+ lcasePropMap.put(lcaseKey, propMap.get(key));
+ }
+
+ mapOfMaps.put(file.getName(), lcasePropMap);
+
+
+ }else if(filePath.lastIndexOf(".properties")>0){
+ Properties prop = new Properties();
+ FileInputStream fis = new FileInputStream(file);
+ prop.load(fis);
+
+ @SuppressWarnings("unchecked")
+ HashMap<String, String> propMap = new HashMap<String, String>((Map)prop);
+
+ mapOfMaps.put(file.getName(), propMap);
+ }
+
+ logger.info("File - " + file.getName() + " is loaded into the map and the "
+ + "corresponding system properties have been refreshed");
+ }
+ catch (Exception e)
+ {
+ logger.error("File " + (file != null?file.getName():"") + " cannot be loaded into the map ", e);
+ throw new Exception("Error reading map file " + (file != null?file.getName():""), e);
+ }
+ }
+ /**
+ * Get property
+ * @param fileName fileName
+ * @param propertyKey propertyKey
+ * @return str
+ */
+ public static String getProperty(String fileName, String propertyKey)
+ {
+ HashMap<String, String> propMap = mapOfMaps.get(fileName);
+ return propMap!=null?propMap.get(ifNullThenEmpty(propertyKey)):"";
+ }
+ /**
+ * get properties
+ * @param fileName fileName
+ * @return mapProp
+ */
+ public static HashMap<String, String> getProperties(String fileName){
+ return mapOfMaps.get(fileName);
+ }
+
+ private static String ifNullThenEmpty(String key) {
+ if (key == null) {
+ return "";
+ } else {
+ return key;
+ }
+ }
+
+}
diff --git a/src/main/java/com/att/nsa/dmaap/filemonitor/ServicePropertyService.java b/src/main/java/com/att/nsa/dmaap/filemonitor/ServicePropertyService.java
new file mode 100644
index 0000000..e4f4e03
--- /dev/null
+++ b/src/main/java/com/att/nsa/dmaap/filemonitor/ServicePropertyService.java
@@ -0,0 +1,164 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package com.att.nsa.dmaap.filemonitor;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.lang.reflect.Method;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Properties;
+
+import javax.annotation.PostConstruct;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+
+//import com.att.ssf.filemonitor.FileChangedListener;
+//import com.att.ssf.filemonitor.FileMonitor;
+
+/**
+ * ServicePropertyService class
+ * @author author
+ *
+ */
+public class ServicePropertyService {
+ private boolean loadOnStartup;
+ private ServicePropertiesListener fileChangedListener;
+ private ServicePropertiesMap filePropertiesMap;
+ private String ssfFileMonitorPollingInterval;
+ private String ssfFileMonitorThreadpoolSize;
+ private List<File> fileList;
+ private static final String FILE_CHANGE_LISTENER_LOC = System
+ .getProperty("AJSC_CONF_HOME") + "/etc";
+ private static final String USER_CONFIG_FILE = "service-file-monitor.properties";
+
+ private static final EELFLogger logger = EELFManager.getInstance().getLogger(ServicePropertyService.class);
+
+ // do not remove the postConstruct annotation, init method will not be
+ // called after constructor
+ /**
+ * Init method
+ * @throws Exception ex
+ */
+ @PostConstruct
+ public void init() throws Exception {
+
+ try {
+ getFileList(FILE_CHANGE_LISTENER_LOC);
+
+// for (File file : fileList) {
+// FileChangedListener fileChangedListener = this.fileChangedListener;
+// Object filePropertiesMap = this.filePropertiesMap;
+// Method m = filePropertiesMap.getClass().getMethod(
+// "refresh", File.class);
+// m.invoke(filePropertiesMap, file);
+// FileMonitor fm = FileMonitor.getInstance();
+// fm.addFileChangedListener(file, fileChangedListener,
+// loadOnStartup);
+//
+// }
+ } catch (Exception ex) {
+ logger.error("Error creating property map ", ex);
+ }
+
+ }
+
+ private void getFileList(String dirName) throws IOException {
+ File directory = new File(dirName);
+ FileInputStream fis = null;
+
+ if (fileList == null)
+ fileList = new ArrayList<File>();
+
+ // get all the files that are ".json" or ".properties", from a directory
+ // & it's sub-directories
+ File[] fList = directory.listFiles();
+
+ for (File file : fList) {
+ // read service property files from the configuration file
+ if (file.isFile() && file.getPath().endsWith(USER_CONFIG_FILE)) {
+ try {
+ fis = new FileInputStream(file);
+ Properties prop = new Properties();
+ prop.load(fis);
+
+ for (String filePath : prop.stringPropertyNames()) {
+ fileList.add(new File(prop.getProperty(filePath)));
+ }
+ } catch (Exception ioe) {
+ logger.error("Error reading the file stream ", ioe);
+ } finally {
+ fis.close();
+ }
+ } else if (file.isDirectory()) {
+ getFileList(file.getPath());
+ }
+ }
+
+ }
+
+ public void setLoadOnStartup(boolean loadOnStartup) {
+ this.loadOnStartup = loadOnStartup;
+ }
+
+ public void setSsfFileMonitorPollingInterval(
+ String ssfFileMonitorPollingInterval) {
+ this.ssfFileMonitorPollingInterval = ssfFileMonitorPollingInterval;
+ }
+
+ public void setSsfFileMonitorThreadpoolSize(
+ String ssfFileMonitorThreadpoolSize) {
+ this.ssfFileMonitorThreadpoolSize = ssfFileMonitorThreadpoolSize;
+ }
+
+ public boolean isLoadOnStartup() {
+ return loadOnStartup;
+ }
+
+ public String getSsfFileMonitorPollingInterval() {
+ return ssfFileMonitorPollingInterval;
+ }
+
+ public String getSsfFileMonitorThreadpoolSize() {
+ return ssfFileMonitorThreadpoolSize;
+ }
+
+ public ServicePropertiesListener getFileChangedListener() {
+ return fileChangedListener;
+ }
+
+ public void setFileChangedListener(
+ ServicePropertiesListener fileChangedListener) {
+ this.fileChangedListener = fileChangedListener;
+ }
+
+ public ServicePropertiesMap getFilePropertiesMap() {
+ return filePropertiesMap;
+ }
+
+ public void setFilePropertiesMap(ServicePropertiesMap filePropertiesMap) {
+ this.filePropertiesMap = filePropertiesMap;
+ }
+}
diff --git a/src/main/java/com/att/nsa/dmaap/mmagent/CreateMirrorMaker.java b/src/main/java/com/att/nsa/dmaap/mmagent/CreateMirrorMaker.java
new file mode 100644
index 0000000..92aca38
--- /dev/null
+++ b/src/main/java/com/att/nsa/dmaap/mmagent/CreateMirrorMaker.java
@@ -0,0 +1,43 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package com.att.nsa.dmaap.mmagent;
+
+public class CreateMirrorMaker {
+ String messageID;
+ MirrorMaker createMirrorMaker;
+
+ public MirrorMaker getCreateMirrorMaker() {
+ return createMirrorMaker;
+ }
+
+ public void setCreateMirrorMaker(MirrorMaker createMirrorMaker) {
+ this.createMirrorMaker = createMirrorMaker;
+ }
+
+ public String getMessageID() {
+ return messageID;
+ }
+
+ public void setMessageID(String messageID) {
+ this.messageID = messageID;
+ }
+}
diff --git a/src/main/java/com/att/nsa/dmaap/mmagent/MirrorMaker.java b/src/main/java/com/att/nsa/dmaap/mmagent/MirrorMaker.java
new file mode 100644
index 0000000..f9e6d89
--- /dev/null
+++ b/src/main/java/com/att/nsa/dmaap/mmagent/MirrorMaker.java
@@ -0,0 +1,70 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package com.att.nsa.dmaap.mmagent;
+
+public class MirrorMaker {
+ public String name;
+ public String consumer;
+ public String producer;
+ public String whitelist;
+ public String status;
+
+ public String getStatus() {
+ return status;
+ }
+
+ public void setStatus(String status) {
+ this.status = status;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public String getConsumer() {
+ return consumer;
+ }
+
+ public void setConsumer(String consumer) {
+ this.consumer = consumer;
+ }
+
+ public String getProducer() {
+ return producer;
+ }
+
+ public void setProducer(String producer) {
+ this.producer = producer;
+ }
+
+ public String getWhitelist() {
+ return whitelist;
+ }
+
+ public void setWhitelist(String whitelist) {
+ this.whitelist = whitelist;
+ }
+} \ No newline at end of file
diff --git a/src/main/java/com/att/nsa/dmaap/mmagent/UpdateMirrorMaker.java b/src/main/java/com/att/nsa/dmaap/mmagent/UpdateMirrorMaker.java
new file mode 100644
index 0000000..4d291f3
--- /dev/null
+++ b/src/main/java/com/att/nsa/dmaap/mmagent/UpdateMirrorMaker.java
@@ -0,0 +1,43 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package com.att.nsa.dmaap.mmagent;
+
+public class UpdateMirrorMaker {
+ String messageID;
+ MirrorMaker updateMirrorMaker;
+
+ public MirrorMaker getUpdateMirrorMaker() {
+ return updateMirrorMaker;
+ }
+
+ public void setUpdateMirrorMaker(MirrorMaker updateMirrorMaker) {
+ this.updateMirrorMaker = updateMirrorMaker;
+ }
+
+ public String getMessageID() {
+ return messageID;
+ }
+
+ public void setMessageID(String messageID) {
+ this.messageID = messageID;
+ }
+}
diff --git a/src/main/java/com/att/nsa/dmaap/mmagent/UpdateWhiteList.java b/src/main/java/com/att/nsa/dmaap/mmagent/UpdateWhiteList.java
new file mode 100644
index 0000000..616dc85
--- /dev/null
+++ b/src/main/java/com/att/nsa/dmaap/mmagent/UpdateWhiteList.java
@@ -0,0 +1,44 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package com.att.nsa.dmaap.mmagent;
+
+public class UpdateWhiteList {
+
+ String messageID;
+ MirrorMaker updateWhiteList;
+
+ public MirrorMaker getUpdateWhiteList() {
+ return updateWhiteList;
+ }
+
+ public void setUpdateWhiteList(MirrorMaker updateWhiteList) {
+ this.updateWhiteList = updateWhiteList;
+ }
+
+ public String getMessageID() {
+ return messageID;
+ }
+
+ public void setMessageID(String messageID) {
+ this.messageID = messageID;
+ }
+}
diff --git a/src/main/java/com/att/nsa/dmaap/service/AdminRestService.java b/src/main/java/com/att/nsa/dmaap/service/AdminRestService.java
new file mode 100644
index 0000000..5201dc8
--- /dev/null
+++ b/src/main/java/com/att/nsa/dmaap/service/AdminRestService.java
@@ -0,0 +1,293 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package com.att.nsa.dmaap.service;
+
+import java.io.IOException;
+import java.util.Enumeration;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.DELETE;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.MediaType;
+
+import org.apache.http.HttpStatus;
+//import org.apache.log4j.Logger;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.json.JSONException;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.stereotype.Component;
+
+import com.att.nsa.cambria.CambriaApiException;
+import com.att.nsa.cambria.beans.DMaaPContext;
+import com.att.nsa.cambria.exception.DMaaPResponseCode;
+import com.att.nsa.cambria.exception.ErrorResponse;
+import com.att.nsa.cambria.service.AdminService;
+import com.att.nsa.cambria.utils.ConfigurationReader;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+
+/**
+ * Rest Service class
+ * for Admin Services
+ * @author author
+ *
+ */
+@Component
+@Path("/")
+public class AdminRestService {
+
+ /**
+ * Logger obj
+ */
+ //private static final Logger LOGGER = Logger
+ // .getLogger(AdminRestService.class);
+ private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(AdminRestService.class);
+ /**
+ * Config Reader
+ */
+ @Autowired
+ @Qualifier("configurationReader")
+ private ConfigurationReader configReader;
+
+ /**
+ * HttpServletRequest obj
+ */
+ @Context
+ private HttpServletRequest request;
+ /**
+ * HttpServletResponse obj
+ */
+ @Context
+ private HttpServletResponse response;
+ /**
+ * AdminService obj
+ */
+ @Autowired
+ private AdminService adminService;
+
+ /**
+ * Fetches a list of all the registered consumers along with their created
+ * time and last accessed details
+ *
+ * @return consumer list in json string format
+ * @throws CambriaApiException
+ * @throws AccessDeniedException
+ * @throws IOException
+ * */
+ @GET
+ @Path("/consumerCache")
+ //@Produces(MediaType.TEXT_PLAIN)
+ public void getConsumerCache() throws CambriaApiException, AccessDeniedException {
+ LOGGER.info("Fetching list of registered consumers.");
+ try {
+ adminService.showConsumerCache(getDMaaPContext());
+ LOGGER.info("Fetching Consumer Cache Successfully");
+ } catch (IOException e) {
+ LOGGER.error("Error while Fetching list of registered consumers : "
+ + e.getMessage(), e);
+
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND,
+ DMaaPResponseCode.GET_CONSUMER_CACHE.getResponseCode(),
+ "Error while Fetching list of registered consumers " + e.getMessage());
+ LOGGER.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+ }
+ }
+
+ /**
+ * Clears consumer cache
+ * @throws CambriaApiException ex
+ * @throws AccessDeniedException
+ *
+ * @throws IOException ex
+ * @throws JSONException ex
+ * */
+ @POST
+ @Path("/dropConsumerCache")
+ //@Produces(MediaType.TEXT_PLAIN)
+ public void dropConsumerCache() throws CambriaApiException, AccessDeniedException {
+ LOGGER.info("Dropping consumer cache");
+ try {
+ adminService.dropConsumerCache(getDMaaPContext());
+ LOGGER.info("Dropping Consumer Cache successfully");
+ } catch ( AccessDeniedException excp) {
+ LOGGER.error("Error while dropConsumerCache : "
+ + excp.getMessage(), excp);
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,
+ DMaaPResponseCode.GET_BLACKLIST.getResponseCode(),
+ "Error while Fetching list of blacklist ips " + excp.getMessage());
+ LOGGER.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+
+ } catch (JSONException | IOException e) {
+ LOGGER.error(
+ "Error while Dropping consumer cache : " + e.getMessage(),
+ e);
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND,
+ DMaaPResponseCode.DROP_CONSUMER_CACHE.getResponseCode(),
+ "Error while Dropping consumer cache " + e.getMessage());
+ LOGGER.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+ }
+ }
+
+ /**
+ * Get list of blacklisted ips
+ * @throws CambriaApiException excp
+ */
+ @GET
+ @Path("/blacklist")
+ //@Produces(MediaType.TEXT_PLAIN)
+ public void getBlacklist() throws CambriaApiException {
+ LOGGER.info("Fetching list of blacklist ips.");
+ try {
+ Enumeration headerNames = getDMaaPContext().getRequest().getHeaderNames();
+ while (headerNames.hasMoreElements()) {
+ String key = (String) headerNames.nextElement();
+ String value = request.getHeader(key);
+
+ }
+
+ adminService.getBlacklist(getDMaaPContext());
+ LOGGER.info("Fetching list of blacklist ips Successfully");
+ }catch ( AccessDeniedException excp) {
+ LOGGER.error("Error while Fetching list of blacklist ips : "
+ + excp.getMessage(), excp);
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,
+ DMaaPResponseCode.GET_BLACKLIST.getResponseCode(),
+ "Error while Fetching list of blacklist ips " + excp.getMessage());
+ LOGGER.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+
+ } catch ( IOException excp) {
+ LOGGER.error("Error while Fetching list of blacklist ips : "
+ + excp.getMessage(), excp);
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND,
+ DMaaPResponseCode.GET_BLACKLIST.getResponseCode(),
+ "Error while Fetching list of blacklist ips " + excp.getMessage());
+ LOGGER.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+
+ }
+
+ }
+
+ /**
+ * Add ip to list of blacklist ips
+ * @param ip ip
+ * @throws CambriaApiException excp
+ */
+ @POST
+ @Path("/blacklist/{ip}")
+ //@Produces(MediaType.TEXT_PLAIN)
+ public void addToBlacklist (@PathParam("ip") String ip ) throws CambriaApiException
+ {
+ LOGGER.info("Adding ip to list of blacklist ips.");
+ try {
+ adminService.addToBlacklist(getDMaaPContext(), ip);
+ LOGGER.info("Fetching list of blacklist ips Successfully");
+ } catch ( AccessDeniedException excp) {
+ LOGGER.error("Error while blacklist : "
+ + excp.getMessage(), excp);
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,
+ DMaaPResponseCode.GET_BLACKLIST.getResponseCode(),
+ "Error while Fetching list of blacklist ips " + excp.getMessage());
+ LOGGER.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+
+ } catch (IOException | ConfigDbException excp) {
+ LOGGER.error("Error while adding ip to list of blacklist ips : "
+ + excp.getMessage(), excp);
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND,
+ DMaaPResponseCode.ADD_BLACKLIST.getResponseCode(),
+ "Error while adding ip to list of blacklist ips " + excp.getMessage());
+ LOGGER.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+
+ }
+
+ }
+ /**
+ * Remove ip from blacklist
+ * @param ip ip
+ * @throws CambriaApiException excp
+ * @throws AccessDeniedException excp
+ * @throws ConfigDbException excp
+ */
+ @DELETE
+ @Path("/blacklist/{ip}")
+ //@Produces(MediaType.TEXT_PLAIN)
+ public void removeFromBlacklist(@PathParam("ip") String ip) throws CambriaApiException, AccessDeniedException, ConfigDbException {
+ LOGGER.info("Fetching list of blacklist ips.");
+ try {
+ adminService.removeFromBlacklist(getDMaaPContext(), ip);
+ LOGGER.info("Fetching list of blacklist ips Successfully");
+ }catch ( AccessDeniedException excp) {
+ LOGGER.error("Error while blacklist : "
+ + excp.getMessage(), excp);
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,
+ DMaaPResponseCode.GET_BLACKLIST.getResponseCode(),
+ "Error while removeFromBlacklist list of blacklist ips " + excp.getMessage());
+ LOGGER.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+
+ } catch (IOException | ConfigDbException excp) {
+ LOGGER.error("Error while removing ip from list of blacklist ips : "
+ + excp.getMessage(), excp);
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND,
+ DMaaPResponseCode.REMOVE_BLACKLIST.getResponseCode(),
+ "Error while removing ip from list of blacklist ips " + excp.getMessage());
+ LOGGER.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+
+ }
+ }
+
+ /**
+ * Create a dmaap context
+ * @return DMaaPContext
+ */
+ private DMaaPContext getDMaaPContext() {
+ DMaaPContext dmaaPContext = new DMaaPContext();
+ dmaaPContext.setConfigReader(configReader);
+ dmaaPContext.setRequest(request);
+ dmaaPContext.setResponse(response);
+ return dmaaPContext;
+ }
+
+}
diff --git a/src/main/java/com/att/nsa/dmaap/service/ApiKeysRestService.java b/src/main/java/com/att/nsa/dmaap/service/ApiKeysRestService.java
new file mode 100644
index 0000000..9f04a1f
--- /dev/null
+++ b/src/main/java/com/att/nsa/dmaap/service/ApiKeysRestService.java
@@ -0,0 +1,254 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package com.att.nsa.dmaap.service;
+
+import java.io.IOException;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.DELETE;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.MediaType;
+
+import org.apache.http.HttpStatus;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.json.JSONException;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.stereotype.Component;
+
+import com.att.nsa.cambria.CambriaApiException;
+import com.att.nsa.cambria.beans.ApiKeyBean;
+import com.att.nsa.cambria.beans.DMaaPContext;
+import com.att.nsa.cambria.exception.DMaaPResponseCode;
+import com.att.nsa.cambria.exception.ErrorResponse;
+import com.att.nsa.cambria.service.ApiKeysService;
+import com.att.nsa.cambria.utils.ConfigurationReader;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.security.db.NsaApiDb.KeyExistsException;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+
+/**
+ * This class is a CXF REST service
+ * which acts as gateway for Cambria Api
+ * Keys.
+ * @author author
+ *
+ */
+@Component
+@Path("/")
+public class ApiKeysRestService {
+
+ /**
+ * Logger obj
+ */
+ //private Logger log = Logger.getLogger(ApiKeysRestService.class.toString());
+ private static final EELFLogger log = EELFManager.getInstance().getLogger(ApiKeysRestService.class);
+ /**
+ * HttpServletRequest obj
+ */
+ @Context
+ private HttpServletRequest request;
+
+ /**
+ * HttpServletResponse obj
+ */
+ @Context
+ private HttpServletResponse response;
+
+ /**
+ * Config Reader
+ */
+ @Autowired
+ @Qualifier("configurationReader")
+ private ConfigurationReader configReader;
+
+ /**
+ * ApiKeysService obj
+ */
+ @Autowired
+ private ApiKeysService apiKeyService;
+
+ /**
+ * Returns a list of all the existing Api keys
+ * @throws CambriaApiException
+ *
+ * @throws IOException
+ * */
+ @GET
+ public void getAllApiKeys() throws CambriaApiException {
+
+ log.info("Inside ApiKeysRestService.getAllApiKeys");
+
+ try {
+ apiKeyService.getAllApiKeys(getDmaapContext());
+ log.info("Fetching all API keys is Successful");
+ } catch (ConfigDbException | IOException e) {
+ log.error("Error while retrieving API keys: " + e);
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND,
+ DMaaPResponseCode.GENERIC_INTERNAL_ERROR.getResponseCode(),
+ "Error while retrieving API keys: "+ e.getMessage());
+ log.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+ }
+
+ }
+
+ /**
+ * Returns details of a particular api key whose <code>name</code> is passed
+ * as a parameter
+ *
+ * @param apiKeyName
+ * - name of the api key
+ * @throws CambriaApiException
+ * @throws IOException
+ * */
+ @GET
+ @Path("/{apiKey}")
+ public void getApiKey(@PathParam("apiKey") String apiKeyName) throws CambriaApiException {
+ log.info("Fetching details of api key: " + apiKeyName);
+
+ try {
+ apiKeyService.getApiKey(getDmaapContext(), apiKeyName);
+ log.info("Fetching specific API key is Successful");
+ } catch (ConfigDbException | IOException e) {
+ log.error("Error while retrieving API key details: " + e);
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND,
+ DMaaPResponseCode.GENERIC_INTERNAL_ERROR.getResponseCode(),
+ "Error while retrieving API key details: "+ e.getMessage());
+ log.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+ }
+ }
+
+
+
+ /**
+ * Creates api key using the <code>email</code> and <code>description</code>
+ *
+ * @param nsaApiKey
+ * @throws CambriaApiException
+ * @throws JSONException
+ * */
+ @POST
+ @Path("/create")
+ @Consumes(MediaType.APPLICATION_JSON)
+ public void createApiKey(ApiKeyBean nsaApiKey) throws CambriaApiException, JSONException {
+ log.info("Creating Api Key.");
+
+ try {
+ apiKeyService.createApiKey(getDmaapContext(), nsaApiKey);
+ log.info("Creating API key is Successful");
+ } catch (KeyExistsException | ConfigDbException | IOException e) {
+ log.error("Error while Creating API key : " + e.getMessage(), e);
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND,
+ DMaaPResponseCode.GENERIC_INTERNAL_ERROR.getResponseCode(),
+ "Error while Creating API key : "+ e.getMessage());
+ log.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+ }
+
+ }
+
+ /**
+ * Updates an existing apiKey using the key name passed a parameter and the
+ * details passed.
+ *
+ * @param apiKeyName
+ * - name of the api key to be updated
+ * @param nsaApiKey
+ * @throws CambriaApiException
+ * @throws JSONException
+ * @throws IOException
+ * @throws AccessDeniedException
+ * */
+ @PUT
+ @Path("/{apiKey}")
+ public void updateApiKey(@PathParam("apiKey") String apiKeyName,
+ ApiKeyBean nsaApiKey) throws CambriaApiException, JSONException {
+ log.info("Updating Api Key.");
+
+ try {
+
+ apiKeyService
+ .updateApiKey(getDmaapContext(), apiKeyName, nsaApiKey);
+ log.error("API key updated sucessfully");
+ } catch (ConfigDbException | IOException | AccessDeniedException e) {
+ log.error("Error while Updating API key : " + apiKeyName, e);
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND,
+ DMaaPResponseCode.GENERIC_INTERNAL_ERROR.getResponseCode(),
+ "Error while Updating API key : "+ e.getMessage());
+ log.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+
+ }
+ }
+
+ /**
+ * Deletes an existing apiKey using the key name passed as a parameter.
+ *
+ * @param apiKeyName
+ * - name of the api key to be updated
+ * @throws CambriaApiException
+ * @throws IOException
+ * @throws AccessDeniedException
+ * */
+ @DELETE
+ @Path("/{apiKey}")
+ public void deleteApiKey(@PathParam("apiKey") String apiKeyName) throws CambriaApiException {
+ log.info("Deleting Api Key: " + apiKeyName);
+ try {
+ apiKeyService.deleteApiKey(getDmaapContext(), apiKeyName);
+ log.info("Api Key deleted successfully: " + apiKeyName);
+ } catch (ConfigDbException | IOException | AccessDeniedException e) {
+ log.error("Error while deleting API key : " + apiKeyName, e);
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND,
+ DMaaPResponseCode.GENERIC_INTERNAL_ERROR.getResponseCode(),
+ "Error while deleting API key : "+ e.getMessage());
+ log.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+
+ }
+ }
+
+ /**
+ * Create a dmaap context
+ * @return DMaaPContext
+ */
+ private DMaaPContext getDmaapContext() {
+ DMaaPContext dmaapContext = new DMaaPContext();
+ dmaapContext.setConfigReader(configReader);
+ dmaapContext.setRequest(request);
+ dmaapContext.setResponse(response);
+ return dmaapContext;
+ }
+
+} \ No newline at end of file
diff --git a/src/main/java/com/att/nsa/dmaap/service/EventsRestService.java b/src/main/java/com/att/nsa/dmaap/service/EventsRestService.java
new file mode 100644
index 0000000..cda431c
--- /dev/null
+++ b/src/main/java/com/att/nsa/dmaap/service/EventsRestService.java
@@ -0,0 +1,313 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package com.att.nsa.dmaap.service;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Date;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Context;
+
+import org.apache.http.HttpStatus;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.stereotype.Component;
+
+import com.att.nsa.cambria.CambriaApiException;
+import com.att.nsa.cambria.backends.ConsumerFactory.UnavailableException;
+import com.att.nsa.cambria.beans.DMaaPContext;
+import com.att.nsa.cambria.exception.DMaaPErrorMessages;
+import com.att.nsa.cambria.exception.DMaaPResponseCode;
+import com.att.nsa.cambria.exception.ErrorResponse;
+import com.att.nsa.cambria.metabroker.Broker.TopicExistsException;
+import com.att.nsa.cambria.service.EventsService;
+import com.att.nsa.cambria.utils.ConfigurationReader;
+import com.att.nsa.cambria.utils.Utils;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import com.att.nsa.cambria.exception.DMaaPAccessDeniedException;
+/**
+ * This class is a CXF REST service which acts
+ * as gateway for MR Event Service.
+ * @author author
+ *
+ */
+@Component
+@Path("/")
+public class EventsRestService {
+
+ /**
+ * Logger obj
+ */
+ //private Logger log = Logger.getLogger(EventsRestService.class.toString());
+ private static final EELFLogger log = EELFManager.getInstance().getLogger(EventsRestService.class);
+ /**
+ * HttpServletRequest obj
+ */
+ @Context
+ private HttpServletRequest request;
+
+ /**
+ * HttpServletResponse obj
+ */
+ @Context
+ private HttpServletResponse response;
+
+
+ /**
+ * Config Reader
+ */
+ @Autowired
+ @Qualifier("configurationReader")
+ private ConfigurationReader configReader;
+
+ @Autowired
+ private EventsService eventsService;
+
+ @Autowired
+ private DMaaPErrorMessages errorMessages;
+
+ /**
+ * This method is used to consume messages.Taking three parameter
+ * topic,consumerGroup and consumerId .Consumer decide to which topic they
+ * want to consume messages.In on consumer Group there might be many
+ * consumer may be present.
+ *
+ * @param topic
+ * specify- the topic name
+ * @param consumergroup
+ * - specify the consumer group
+ * @param consumerid
+ * -specify the consumer id
+ *
+ * handles CambriaApiException | ConfigDbException |
+ * TopicExistsException | AccessDeniedException |
+ * UnavailableException | IOException in try catch block
+ * @throws CambriaApiException
+ *
+ */
+ @GET
+ @Path("/{topic}/{consumergroup}/{consumerid}")
+ public void getEvents(@PathParam("topic") String topic, @PathParam("consumergroup")
+ String consumergroup,
+ @PathParam("consumerid") String consumerid) throws CambriaApiException {
+ // log.info("Consuming message from topic " + topic );
+ DMaaPContext dMaaPContext = getDmaapContext();
+ dMaaPContext.setConsumerRequestTime(Utils.getFormattedDate(new Date()));
+
+ try {
+
+ eventsService.getEvents(dMaaPContext, topic, consumergroup, consumerid);
+ }
+ catch (TopicExistsException e) {
+ log.error("Error while reading data from topic [" + topic + "].", e);
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_CONFLICT,
+ DMaaPResponseCode.CONSUME_MSG_ERROR.getResponseCode(), errorMessages.getConsumeMsgError()
+ + e.getMessage(), null, Utils.getFormattedDate(new Date()), topic, null, null,
+ consumerid,
+ request.getRemoteHost());
+ log.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+
+ }
+ catch (DMaaPAccessDeniedException | AccessDeniedException e) {
+ log.error("Error while reading data from topic [" + topic + "].", e);
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,
+ DMaaPResponseCode.CONSUME_MSG_ERROR.getResponseCode(), errorMessages.getConsumeMsgError()
+ + e.getMessage(), null, Utils.getFormattedDate(new Date()), topic, null, null,
+ consumerid,
+ request.getRemoteHost());
+ log.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+
+ }
+
+ catch (ConfigDbException | UnavailableException | IOException e) {
+ log.error("Error while reading data from topic [" + topic + "].", e);
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND,
+ DMaaPResponseCode.CONSUME_MSG_ERROR.getResponseCode(), errorMessages.getConsumeMsgError()
+ + e.getMessage(), null, Utils.getFormattedDate(new Date()), topic, null, null,
+ consumerid,
+ request.getRemoteHost());
+ log.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+
+ }
+ }
+
+ /**
+ * This method is used to publish messages.Taking two parameter topic and
+ * partition.Publisher decide to which topic they want to publish message
+ * and kafka decide to which partition of topic message will send,
+ *
+ * @param topic
+ * @param msg
+ * @param partitionKey
+ *
+ * handles CambriaApiException | ConfigDbException |
+ * TopicExistsException | AccessDeniedException | IOException in
+ * try catch block
+ * @throws CambriaApiException
+ */
+
+ @POST
+ @Produces("application/json")
+ @Path("/{topic}")
+ public void pushEvents(@PathParam("topic") String topic, InputStream msg,
+ @QueryParam("partitionKey") String partitionKey) throws CambriaApiException {
+ log.info("Publishing message to topic " + topic);
+
+ try {
+ eventsService.pushEvents(getDmaapContext(), topic, msg, partitionKey, null);
+ }
+ catch ( TopicExistsException e) {
+ log.error("Error while publishing to topic [" + topic + "].", e);
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_CONFLICT,
+ DMaaPResponseCode.PUBLISH_MSG_ERROR.getResponseCode(), errorMessages.getPublishMsgError()
+ + e.getMessage(), null, Utils.getFormattedDate(new Date()), topic,
+ Utils.getUserApiKey(request), request.getRemoteHost(), null, null);
+ log.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+ }
+ catch ( DMaaPAccessDeniedException | AccessDeniedException e) {
+ log.error("Error while publishing to topic [" + topic + "].", e);
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,
+ DMaaPResponseCode.PUBLISH_MSG_ERROR.getResponseCode(), errorMessages.getPublishMsgError()
+ + e.getMessage(), null, Utils.getFormattedDate(new Date()), topic,
+ Utils.getUserApiKey(request), request.getRemoteHost(), null, null);
+ log.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+ }
+
+
+ catch (ConfigDbException | IOException | missingReqdSetting e) {
+ log.error("Error while publishing to topic [" + topic + "].", e);
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND,
+ DMaaPResponseCode.PUBLISH_MSG_ERROR.getResponseCode(), errorMessages.getPublishMsgError()
+ + e.getMessage(), null, Utils.getFormattedDate(new Date()), topic,
+ Utils.getUserApiKey(request), request.getRemoteHost(), null, null);
+ log.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+ }
+ }
+
+ /**
+ * This method is used to publish messages by passing an optional header
+ * called 'transactionId'. If the 'transactionId' is not provided in the
+ * input then a new transaction object will be created. Else the existing
+ * transaction object will be updated with the counter details.
+ *
+ * @param topic
+ * @param partitionKey
+ *
+ * handles CambriaApiException | ConfigDbException |
+ * TopicExistsException | AccessDeniedException | IOException in
+ * try catch block
+ * @throws CambriaApiException
+ */
+ @POST
+ @Produces("application/json")
+ @Path("/transaction/{topic}")
+ public void pushEventsWithTransaction(@PathParam("topic") String topic,
+ @QueryParam("partitionKey") String partitionKey) throws CambriaApiException {
+ // log.info("Publishing message with transaction id for topic " + topic
+ // );
+
+ try {
+ eventsService.pushEvents(getDmaapContext(), topic, request.getInputStream(),
+ partitionKey,
+ Utils.getFormattedDate(new Date()));
+ }
+
+ catch ( TopicExistsException e) {
+ log.error("Error while publishing to topic [" + topic + "].", e);
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_CONFLICT,
+ DMaaPResponseCode.PUBLISH_MSG_ERROR.getResponseCode(), errorMessages.getPublishMsgError()
+ + e.getMessage(), null, Utils.getFormattedDate(new Date()), topic,
+ Utils.getUserApiKey(request), request.getRemoteHost(), null, null);
+ log.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+ }
+ catch ( DMaaPAccessDeniedException| AccessDeniedException e) {
+ log.error("Error while publishing to topic [" + topic + "].", e);
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,
+ DMaaPResponseCode.PUBLISH_MSG_ERROR.getResponseCode(), errorMessages.getPublishMsgError()
+ + e.getMessage(), null, Utils.getFormattedDate(new Date()), topic,
+ Utils.getUserApiKey(request), request.getRemoteHost(), null, null);
+ log.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+ }
+
+ catch (ConfigDbException | IOException | missingReqdSetting e) {
+ log.error("Error while publishing to topic : " + topic, e);
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND,
+ DMaaPResponseCode.PUBLISH_MSG_ERROR.getResponseCode(), "Transaction-"
+ + errorMessages.getPublishMsgError() + e.getMessage(), null,
+ Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(request),
+ request.getRemoteHost(),
+ null, null);
+ log.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+
+ }
+ }
+
+ /**
+ * This method is used for taking Configuration Object,HttpServletRequest
+ * Object,HttpServletRequest HttpServletResponse Object,HttpServletSession
+ * Object.
+ *
+ * @return DMaaPContext object from where user can get Configuration
+ * Object,HttpServlet Object
+ *
+ */
+ private DMaaPContext getDmaapContext() {
+
+ DMaaPContext dmaapContext = new DMaaPContext();
+ dmaapContext.setRequest(request);
+ dmaapContext.setResponse(response);
+ dmaapContext.setConfigReader(configReader);
+
+ return dmaapContext;
+ }
+
+} \ No newline at end of file
diff --git a/src/main/java/com/att/nsa/dmaap/service/MMRestService.java b/src/main/java/com/att/nsa/dmaap/service/MMRestService.java
new file mode 100644
index 0000000..0fa396f
--- /dev/null
+++ b/src/main/java/com/att/nsa/dmaap/service/MMRestService.java
@@ -0,0 +1,1238 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package com.att.nsa.dmaap.service;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
+
+
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.POST;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.Context;
+import org.json.JSONObject;
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.lang.StringUtils;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.stereotype.Component;
+
+import com.att.nsa.cambria.utils.ConfigurationReader;
+import com.att.nsa.cambria.utils.DMaaPResponseBuilder;
+import com.att.nsa.cambria.utils.Utils;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.dmaap.mmagent.*;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import com.google.gson.Gson;
+import com.google.gson.JsonSyntaxException;
+
+import edu.emory.mathcs.backport.java.util.Arrays;
+
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+import com.att.nsa.cambria.CambriaApiException;
+import com.att.nsa.cambria.backends.ConsumerFactory.UnavailableException;
+
+import org.json.JSONArray;
+import org.json.JSONException;
+import com.att.nsa.cambria.beans.DMaaPContext;
+import com.att.nsa.cambria.constants.CambriaConstants;
+import com.att.nsa.cambria.exception.DMaaPErrorMessages;
+import com.att.nsa.cambria.metabroker.Broker.TopicExistsException;
+import com.att.nsa.cambria.security.DMaaPAAFAuthenticator;
+import com.att.nsa.cambria.security.DMaaPAAFAuthenticatorImpl;
+import com.att.nsa.cambria.service.MMService;
+
+/**
+ * Rest Service class for Mirror Maker proxy Rest Services
+ *
+ * @author <a href="mailto:"></a>
+ *
+ * @since May 25, 2016
+ */
+
+@Component
+public class MMRestService {
+
+ //private static final Logger LOGGER = Logger.getLogger(MMRestService.class);
+ private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(MMRestService.class);
+ private static final String NO_ADMIN_PERMISSION = "No Mirror Maker Admin permission.";
+ private static final String NO_USER_PERMISSION = "No Mirror Maker User permission.";
+ private static final String NO_USER_CREATE_PERMISSION = "No Mirror Maker User Create permission.";
+ private static final String NAME_DOES_NOT_MEET_REQUIREMENT = "Mirror Maker name can only contain alpha numeric";
+ private static final String INVALID_IPPORT = "This is not a valid IP:Port";
+
+ private String topic;
+ private int timeout;
+ private String consumergroup;
+ private String consumerid;
+
+ @Autowired
+ @Qualifier("configurationReader")
+ private ConfigurationReader configReader;
+
+ @Context
+ private HttpServletRequest request;
+
+ @Context
+ private HttpServletResponse response;
+
+ @Autowired
+ private MMService mirrorService;
+
+ @Autowired
+ private DMaaPErrorMessages errorMessages;
+
+ /**
+ * This method is used for taking Configuration Object,HttpServletRequest
+ * Object,HttpServletRequest HttpServletResponse Object,HttpServletSession
+ * Object.
+ *
+ * @return DMaaPContext object from where user can get Configuration
+ * Object,HttpServlet Object
+ *
+ */
+ private DMaaPContext getDmaapContext() {
+ DMaaPContext dmaapContext = new DMaaPContext();
+ dmaapContext.setRequest(request);
+ dmaapContext.setResponse(response);
+ dmaapContext.setConfigReader(configReader);
+ dmaapContext.setConsumerRequestTime(Utils.getFormattedDate(new Date()));
+
+ return dmaapContext;
+ }
+
+ @POST
+ @Produces("application/json")
+ @Path("/create")
+ public void callCreateMirrorMaker(InputStream msg) {
+
+ DMaaPContext ctx = getDmaapContext();
+ if (checkMirrorMakerPermission(ctx,
+ AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "msgRtr.mirrormakeradmin.aaf"))) {
+
+ loadProperty();
+ String input = null;
+ String randomStr = getRandomNum();
+
+ InputStream inStream = null;
+ Gson gson = new Gson();
+ CreateMirrorMaker createMirrorMaker = new CreateMirrorMaker();
+
+ try {
+ input = IOUtils.toString(msg, "UTF-8");
+
+ if (input != null && input.length() > 0) {
+ input = removeExtraChar(input);
+ }
+
+ // Check if the request has CreateMirrorMaker
+ try {
+ createMirrorMaker = gson.fromJson(input, CreateMirrorMaker.class);
+
+ } catch (JsonSyntaxException ex) {
+
+ sendErrResponse(ctx, errorMessages.getIncorrectJson());
+ }
+ String name = createMirrorMaker.getCreateMirrorMaker().getName();
+ // send error message if it is not a CreateMirrorMaker request.
+ if (createMirrorMaker.getCreateMirrorMaker() == null) {
+ sendErrResponse(ctx, "This is not a CreateMirrorMaker request. Please try again.");
+ }
+
+ // MirrorMaker whitelist and status should not be passed
+ else if (createMirrorMaker.getCreateMirrorMaker().getWhitelist() != null
+ || createMirrorMaker.getCreateMirrorMaker().getStatus() != null) {
+ sendErrResponse(ctx, "This is not a CreateMirrorMaker request. Please try again.");
+ }
+
+ // if empty, blank name is entered
+ else if (StringUtils.isBlank(name)) {
+ sendErrResponse(ctx, "Name can not be empty or blank.");
+ }
+
+ // Check if the name contains only Alpha Numeric
+ else if (!isAlphaNumeric(name)) {
+ sendErrResponse(ctx, NAME_DOES_NOT_MEET_REQUIREMENT);
+
+ }
+
+ // Validate the IP and Port
+ else if (!StringUtils.isBlank(createMirrorMaker.getCreateMirrorMaker().getConsumer())
+ && !StringUtils.isBlank(createMirrorMaker.getCreateMirrorMaker().getProducer())
+ && !validateIPPort(createMirrorMaker.getCreateMirrorMaker().getConsumer())
+ || !validateIPPort(createMirrorMaker.getCreateMirrorMaker().getProducer())) {
+ sendErrResponse(ctx, INVALID_IPPORT);
+
+ }
+ // Set a random number as messageID, convert Json Object to
+ // InputStream and finally call publisher and subscriber
+ else if (isAlphaNumeric(name) && validateIPPort(createMirrorMaker.getCreateMirrorMaker().getConsumer())
+ && validateIPPort(createMirrorMaker.getCreateMirrorMaker().getProducer())) {
+
+ createMirrorMaker.setMessageID(randomStr);
+ inStream = IOUtils.toInputStream(gson.toJson(createMirrorMaker), "UTF-8");
+ callPubSub(randomStr, ctx, inStream);
+ }
+
+ } catch (IOException e) {
+
+ e.printStackTrace();
+ }
+ }
+ // Send error response if user does not provide Authorization
+ else {
+ sendErrResponse(ctx, NO_ADMIN_PERMISSION);
+ }
+ }
+
+ @POST
+ @Produces("application/json")
+ @Path("/listall")
+ public void callListAllMirrorMaker(InputStream msg) throws CambriaApiException {
+ DMaaPContext ctx = getDmaapContext();
+
+ if (checkMirrorMakerPermission(ctx,
+ AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "msgRtr.mirrormakeradmin.aaf"))) {
+
+ loadProperty();
+
+ String input = null;
+
+ try {
+ input = IOUtils.toString(msg, "UTF-8");
+
+ if (input != null && input.length() > 0) {
+ input = removeExtraChar(input);
+ }
+
+ String randomStr = getRandomNum();
+ JSONObject jsonOb = null;
+
+ try {
+ jsonOb = new JSONObject(input);
+
+ } catch (JSONException ex) {
+
+ sendErrResponse(ctx, errorMessages.getIncorrectJson());
+ }
+
+ // Check if request has listAllMirrorMaker and
+ // listAllMirrorMaker is empty
+ if (jsonOb.has("listAllMirrorMaker") && jsonOb.getJSONObject("listAllMirrorMaker").length() == 0) {
+
+ jsonOb.put("messageID", randomStr);
+ InputStream inStream = null;
+
+ try {
+ inStream = IOUtils.toInputStream(jsonOb.toString(), "UTF-8");
+
+ } catch (IOException ioe) {
+ ioe.printStackTrace();
+ }
+
+ callPubSub(randomStr, ctx, inStream);
+
+ } else {
+
+ sendErrResponse(ctx, "This is not a ListAllMirrorMaker request. Please try again.");
+ }
+
+ } catch (IOException ioe) {
+
+ ioe.printStackTrace();
+ }
+
+ } else {
+
+ sendErrResponse(getDmaapContext(), NO_ADMIN_PERMISSION);
+ }
+ }
+
+ @POST
+ @Produces("application/json")
+ @Path("/update")
+ public void callUpdateMirrorMaker(InputStream msg) throws CambriaApiException {
+
+ DMaaPContext ctx = getDmaapContext();
+ if (checkMirrorMakerPermission(ctx,
+ AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "msgRtr.mirrormakeradmin.aaf"))) {
+
+ loadProperty();
+ String input = null;
+ String randomStr = getRandomNum();
+
+ InputStream inStream = null;
+ Gson gson = new Gson();
+ UpdateMirrorMaker updateMirrorMaker = new UpdateMirrorMaker();
+
+ try {
+ input = IOUtils.toString(msg, "UTF-8");
+
+ if (input != null && input.length() > 0) {
+ input = removeExtraChar(input);
+ }
+
+ // Check if the request has UpdateMirrorMaker
+ try {
+ updateMirrorMaker = gson.fromJson(input, UpdateMirrorMaker.class);
+
+ } catch (JsonSyntaxException ex) {
+
+ sendErrResponse(ctx, errorMessages.getIncorrectJson());
+ }
+ String name = updateMirrorMaker.getUpdateMirrorMaker().getName();
+
+ // send error message if it is not a UpdateMirrorMaker request.
+ if (updateMirrorMaker.getUpdateMirrorMaker() == null) {
+ sendErrResponse(ctx, "This is not a UpdateMirrorMaker request. Please try again.");
+ }
+
+ // MirrorMaker whitelist and status should not be passed
+ else if (updateMirrorMaker.getUpdateMirrorMaker().getWhitelist() != null
+ || updateMirrorMaker.getUpdateMirrorMaker().getStatus() != null) {
+ sendErrResponse(ctx, "This is not a UpdateMirrorMaker request. Please try again.");
+ }
+
+ // if empty, blank name is entered
+ else if (StringUtils.isBlank(name)) {
+ sendErrResponse(ctx, "Name can not be empty or blank.");
+ }
+
+ // Check if the name contains only Alpha Numeric
+ else if (!isAlphaNumeric(name)) {
+ sendErrResponse(ctx, NAME_DOES_NOT_MEET_REQUIREMENT);
+
+ }
+
+ // Validate the IP and Port
+ else if (!StringUtils.isBlank(updateMirrorMaker.getUpdateMirrorMaker().getConsumer())
+ && !StringUtils.isBlank(updateMirrorMaker.getUpdateMirrorMaker().getProducer())
+ && !validateIPPort(updateMirrorMaker.getUpdateMirrorMaker().getConsumer())
+ || !validateIPPort(updateMirrorMaker.getUpdateMirrorMaker().getProducer())) {
+ sendErrResponse(ctx, INVALID_IPPORT);
+
+ }
+ // Set a random number as messageID, convert Json Object to
+ // InputStream and finally call publisher and subscriber
+ else if (isAlphaNumeric(name) && validateIPPort(updateMirrorMaker.getUpdateMirrorMaker().getConsumer())
+ && validateIPPort(updateMirrorMaker.getUpdateMirrorMaker().getProducer())) {
+
+ updateMirrorMaker.setMessageID(randomStr);
+ inStream = IOUtils.toInputStream(gson.toJson(updateMirrorMaker), "UTF-8");
+ callPubSub(randomStr, ctx, inStream);
+ }
+
+ } catch (IOException e) {
+
+ e.printStackTrace();
+ }
+ }
+ // Send error response if user does not provide Authorization
+ else {
+ sendErrResponse(ctx, NO_ADMIN_PERMISSION);
+ }
+ }
+
+ @POST
+ @Produces("application/json")
+ @Path("/delete")
+ public void callDeleteMirrorMaker(InputStream msg) throws CambriaApiException {
+ DMaaPContext ctx = getDmaapContext();
+
+ if (checkMirrorMakerPermission(ctx,
+ AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "msgRtr.mirrormakeradmin.aaf"))) {
+
+ loadProperty();
+
+ String input = null;
+
+ try {
+ input = IOUtils.toString(msg, "UTF-8");
+
+ if (input != null && input.length() > 0) {
+ input = removeExtraChar(input);
+ }
+
+ String randomStr = getRandomNum();
+ JSONObject jsonOb = null;
+
+ try {
+ jsonOb = new JSONObject(input);
+
+ } catch (JSONException ex) {
+
+ sendErrResponse(ctx, errorMessages.getIncorrectJson());
+ }
+
+ // Check if request has DeleteMirrorMaker and
+ // DeleteMirrorMaker has MirrorMaker object with name variable
+ // and check if the name contain only alpha numeric
+ if (jsonOb.has("deleteMirrorMaker") && jsonOb.getJSONObject("deleteMirrorMaker").length() == 1
+ && jsonOb.getJSONObject("deleteMirrorMaker").has("name")
+ && !StringUtils.isBlank(jsonOb.getJSONObject("deleteMirrorMaker").getString("name"))
+ && isAlphaNumeric(jsonOb.getJSONObject("deleteMirrorMaker").getString("name"))) {
+
+ jsonOb.put("messageID", randomStr);
+ InputStream inStream = null;
+
+ try {
+ inStream = IOUtils.toInputStream(jsonOb.toString(), "UTF-8");
+
+ } catch (IOException ioe) {
+ ioe.printStackTrace();
+ }
+
+ callPubSub(randomStr, ctx, inStream);
+
+ } else {
+
+ sendErrResponse(ctx, "This is not a DeleteMirrorMaker request. Please try again.");
+ }
+
+ } catch (IOException ioe) {
+
+ ioe.printStackTrace();
+ }
+
+ } else {
+
+ sendErrResponse(getDmaapContext(), NO_ADMIN_PERMISSION);
+ }
+ }
+
+ private boolean isListMirrorMaker(String msg, String messageID) {
+ String topicmsg = msg;
+ topicmsg = removeExtraChar(topicmsg);
+
+ JSONObject jObj = new JSONObject();
+ JSONArray jArray = null;
+ boolean exist = false;
+
+ if (!StringUtils.isBlank(topicmsg) && topicmsg.length() > 2) {
+ jArray = new JSONArray(topicmsg);
+
+ for (int i = 0; i < jArray.length(); i++) {
+ jObj = jArray.getJSONObject(i);
+
+ JSONObject obj = new JSONObject();
+ if (jObj.has("message")) {
+ obj = jObj.getJSONObject("message");
+ }
+ if (obj.has("messageID") && obj.get("messageID").equals(messageID) && obj.has("listMirrorMaker")) {
+ exist = true;
+ break;
+ }
+ }
+ }
+ return exist;
+ }
+
+ private void loadProperty() {
+
+ this.timeout = Integer.parseInt(
+ AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "msgRtr.mirrormaker.timeout").trim());
+ this.topic = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "msgRtr.mirrormaker.topic").trim();
+ this.consumergroup = AJSCPropertiesMap
+ .getProperty(CambriaConstants.msgRtr_prop, "msgRtr.mirrormaker.consumergroup").trim();
+ this.consumerid = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "msgRtr.mirrormaker.consumerid")
+ .trim();
+ }
+
+ private String removeExtraChar(String message) {
+ String str = message;
+ str = checkJsonFormate(str);
+
+ if (str != null && str.length() > 0) {
+ str = str.replace("\\", "");
+ str = str.replace("\"{", "{");
+ str = str.replace("}\"", "}");
+ }
+ return str;
+ }
+
+ private String getRandomNum() {
+ long random = Math.round(Math.random() * 89999) + 10000;
+ String strLong = Long.toString(random);
+ return strLong;
+ }
+
+ private boolean isAlphaNumeric(String name) {
+ String pattern = "^[a-zA-Z0-9]*$";
+ if (name.matches(pattern)) {
+ return true;
+ }
+ return false;
+ }
+
+ // This method validate IPv4
+ private boolean validateIPPort(String ipPort) {
+ String pattern = "^([01]?\\d\\d?|2[0-4]\\d|25[0-5])\\.([01]?\\d\\d?|2[0-4]\\d|25[0-5])\\."
+ + "([01]?\\d\\d?|2[0-4]\\d|25[0-5])\\.([01]?\\d\\d?|2[0-4]\\d|25[0-5]):"
+ + "([1-9][0-9]{0,3}|[1-5][0-9]{4}|6[0-4][0-9]{3}|65[0-4][0-9]{2}|655[0-2][0-9]|6553[0-5])$";
+ if (ipPort.matches(pattern)) {
+ return true;
+ }
+ return false;
+ }
+
+ private String checkJsonFormate(String jsonStr) {
+
+ String json = jsonStr;
+ if (jsonStr != null && jsonStr.length() > 0 && jsonStr.startsWith("[") && !jsonStr.endsWith("]")) {
+ json = json + "]";
+ }
+ return json;
+ }
+
+ private boolean checkMirrorMakerPermission(DMaaPContext ctx, String permission) {
+
+ boolean hasPermission = false;
+
+ DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
+
+ if (aaf.aafAuthentication(ctx.getRequest(), permission)) {
+ hasPermission = true;
+ }
+ return hasPermission;
+ }
+
+ private void callPubSub(String randomstr, DMaaPContext ctx, InputStream inStream) {
+ try {
+ mirrorService.pushEvents(ctx, topic, inStream, null, null);
+ long startTime = System.currentTimeMillis();
+ String msgFrmSubscribe = mirrorService.subscribe(ctx, topic, consumergroup, consumerid);
+
+ while (!isListMirrorMaker(msgFrmSubscribe, randomstr)
+ && (System.currentTimeMillis() - startTime) < timeout) {
+ msgFrmSubscribe = mirrorService.subscribe(ctx, topic, consumergroup, consumerid);
+ }
+
+ JSONObject jsonObj = new JSONObject();
+ JSONObject finalJsonObj = new JSONObject();
+ JSONArray jsonArray = null;
+
+ if (msgFrmSubscribe != null && msgFrmSubscribe.length() > 0
+ && isListMirrorMaker(msgFrmSubscribe, randomstr)) {
+ msgFrmSubscribe = removeExtraChar(msgFrmSubscribe);
+ jsonArray = new JSONArray(msgFrmSubscribe);
+
+ for (int i = 0; i < jsonArray.length(); i++) {
+ jsonObj = jsonArray.getJSONObject(i);
+
+ JSONObject obj = new JSONObject();
+ if (jsonObj.has("message")) {
+ obj = jsonObj.getJSONObject("message");
+ }
+ if (obj.has("messageID") && obj.get("messageID").equals(randomstr) && obj.has("listMirrorMaker")) {
+ finalJsonObj.put("listMirrorMaker", obj.get("listMirrorMaker"));
+ break;
+ }
+ }
+
+ DMaaPResponseBuilder.respondOk(ctx, finalJsonObj);
+
+ } else {
+
+ JSONObject err = new JSONObject();
+ err.append("error", "listMirrorMaker is not available, please make sure MirrorMakerAgent is running");
+ DMaaPResponseBuilder.respondOk(ctx, err);
+ }
+
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private void sendErrResponse(DMaaPContext ctx, String errMsg) {
+ JSONObject err = new JSONObject();
+ err.append("Error", errMsg);
+
+ try {
+ DMaaPResponseBuilder.respondOk(ctx, err);
+ LOGGER.error(errMsg.toString());
+
+ } catch (JSONException | IOException e) {
+ LOGGER.error(errMsg.toString());
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ @POST
+ @Produces("application/json")
+ @Path("/listallwhitelist")
+ public void listWhiteList(InputStream msg) {
+
+ DMaaPContext ctx = getDmaapContext();
+ if (checkMirrorMakerPermission(ctx,
+ AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "msgRtr.mirrormakeruser.aaf"))) {
+
+ loadProperty();
+ String input = null;
+
+ try {
+ input = IOUtils.toString(msg, "UTF-8");
+
+ if (input != null && input.length() > 0) {
+ input = removeExtraChar(input);
+ }
+
+ // Check if it is correct Json object
+ JSONObject jsonOb = null;
+
+ try {
+ jsonOb = new JSONObject(input);
+
+ } catch (JSONException ex) {
+
+ sendErrResponse(ctx, errorMessages.getIncorrectJson());
+ }
+
+ // Check if the request has name and name contains only alpha
+ // numeric
+ // and check if the request has namespace and namespace contains
+ // only alpha numeric
+ if (jsonOb.length() == 2 && jsonOb.has("name") && !StringUtils.isBlank(jsonOb.getString("name"))
+ && isAlphaNumeric(jsonOb.getString("name")) && jsonOb.has("namespace")
+ && !StringUtils.isBlank(jsonOb.getString("namespace"))) {
+
+ String permission = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+ "msgRtr.mirrormakeruser.aaf.create") + jsonOb.getString("namespace") + "|create";
+
+ // Check if the user have create permission for the
+ // namespace
+ if (checkMirrorMakerPermission(ctx, permission)) {
+
+ JSONObject listAll = new JSONObject();
+ JSONObject emptyObject = new JSONObject();
+
+ // Create a listAllMirrorMaker Json object
+ try {
+ listAll.put("listAllMirrorMaker", emptyObject);
+
+ } catch (JSONException e) {
+
+ e.printStackTrace();
+ }
+
+ // set a random number as messageID
+ String randomStr = getRandomNum();
+ listAll.put("messageID", randomStr);
+ InputStream inStream = null;
+
+ // convert listAll Json object to InputStream object
+ try {
+ inStream = IOUtils.toInputStream(listAll.toString(), "UTF-8");
+
+ } catch (IOException ioe) {
+ ioe.printStackTrace();
+ }
+ // call listAllMirrorMaker
+ mirrorService.pushEvents(ctx, topic, inStream, null, null);
+
+ // subscribe for listMirrorMaker
+ long startTime = System.currentTimeMillis();
+ String msgFrmSubscribe = mirrorService.subscribe(ctx, topic, consumergroup, consumerid);
+
+ while (!isListMirrorMaker(msgFrmSubscribe, randomStr)
+ && (System.currentTimeMillis() - startTime) < timeout) {
+ msgFrmSubscribe = mirrorService.subscribe(ctx, topic, consumergroup, consumerid);
+ }
+
+ if (msgFrmSubscribe != null && msgFrmSubscribe.length() > 0
+ && isListMirrorMaker(msgFrmSubscribe, randomStr)) {
+
+ JSONArray listMirrorMaker = new JSONArray();
+ listMirrorMaker = getListMirrorMaker(msgFrmSubscribe, randomStr);
+
+ String whitelist = null;
+ for (int i = 0; i < listMirrorMaker.length(); i++) {
+
+ JSONObject mm = new JSONObject();
+ mm = listMirrorMaker.getJSONObject(i);
+ String name = mm.getString("name");
+
+ if (name.equals(jsonOb.getString("name")) && mm.has("whitelist")) {
+ whitelist = mm.getString("whitelist");
+ break;
+ }
+ }
+
+ if (!StringUtils.isBlank(whitelist)) {
+
+ List<String> topicList = new ArrayList<String>();
+ List<String> finalTopicList = new ArrayList<String>();
+ topicList = Arrays.asList(whitelist.split(","));
+
+ for (String topic : topicList) {
+ if (topic != null && !topic.equals("null")
+ && getNamespace(topic).equals(jsonOb.getString("namespace"))) {
+
+ finalTopicList.add(topic);
+ }
+ }
+
+ String topicNames = "";
+
+ if (finalTopicList.size() > 0) {
+ topicNames = StringUtils.join(finalTopicList, ",");
+ }
+
+ JSONObject listAllWhiteList = new JSONObject();
+ listAllWhiteList.put("name", jsonOb.getString("name"));
+ listAllWhiteList.put("whitelist", topicNames);
+
+ DMaaPResponseBuilder.respondOk(ctx, listAllWhiteList);
+ }
+
+ } else {
+
+ JSONObject err = new JSONObject();
+ err.append("error",
+ "listWhiteList is not available, please make sure MirrorMakerAgent is running");
+ DMaaPResponseBuilder.respondOk(ctx, err);
+ }
+
+ } else {
+ sendErrResponse(ctx, NO_USER_CREATE_PERMISSION);
+ }
+
+ } else {
+
+ sendErrResponse(ctx, "This is not a ListAllWhitelist request. Please try again.");
+ }
+
+ } catch (IOException | CambriaApiException | ConfigDbException | AccessDeniedException
+ | TopicExistsException | missingReqdSetting | UnavailableException e) {
+
+ e.printStackTrace();
+ }
+ } else {
+ sendErrResponse(ctx, NO_USER_PERMISSION);
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ @POST
+ @Produces("application/json")
+ @Path("/createwhitelist")
+ public void createWhiteList(InputStream msg) {
+
+ DMaaPContext ctx = getDmaapContext();
+ if (checkMirrorMakerPermission(ctx,
+ AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "msgRtr.mirrormakeruser.aaf"))) {
+
+ loadProperty();
+ String input = null;
+
+ try {
+ input = IOUtils.toString(msg, "UTF-8");
+
+ if (input != null && input.length() > 0) {
+ input = removeExtraChar(input);
+ }
+
+ // Check if it is correct Json object
+ JSONObject jsonOb = null;
+
+ try {
+ jsonOb = new JSONObject(input);
+
+ } catch (JSONException ex) {
+
+ sendErrResponse(ctx, errorMessages.getIncorrectJson());
+ }
+
+ // Check if the request has name and name contains only alpha numeric,
+ // check if the request has namespace and
+ // check if the request has whitelistTopicName
+ // check if the topic name contains only alpha numeric
+ if (jsonOb.length() == 3 && jsonOb.has("name") && !StringUtils.isBlank(jsonOb.getString("name"))
+ && isAlphaNumeric(jsonOb.getString("name"))
+ && jsonOb.has("namespace") && !StringUtils.isBlank(jsonOb.getString("namespace"))
+ && jsonOb.has("whitelistTopicName") && !StringUtils.isBlank(jsonOb.getString("whitelistTopicName"))
+ && isAlphaNumeric(jsonOb.getString("whitelistTopicName").substring(jsonOb.getString("whitelistTopicName").lastIndexOf(".")+1,
+ jsonOb.getString("whitelistTopicName").length()))) {
+
+ String permission = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+ "msgRtr.mirrormakeruser.aaf.create") + jsonOb.getString("namespace") + "|create";
+
+ // Check if the user have create permission for the
+ // namespace
+ if (checkMirrorMakerPermission(ctx, permission)) {
+
+ JSONObject listAll = new JSONObject();
+ JSONObject emptyObject = new JSONObject();
+
+ // Create a listAllMirrorMaker Json object
+ try {
+ listAll.put("listAllMirrorMaker", emptyObject);
+
+ } catch (JSONException e) {
+
+ e.printStackTrace();
+ }
+
+ // set a random number as messageID
+ String randomStr = getRandomNum();
+ listAll.put("messageID", randomStr);
+ InputStream inStream = null;
+
+ // convert listAll Json object to InputStream object
+ try {
+ inStream = IOUtils.toInputStream(listAll.toString(), "UTF-8");
+
+ } catch (IOException ioe) {
+ ioe.printStackTrace();
+ }
+ // call listAllMirrorMaker
+ mirrorService.pushEvents(ctx, topic, inStream, null, null);
+
+ // subscribe for listMirrorMaker
+ long startTime = System.currentTimeMillis();
+ String msgFrmSubscribe = mirrorService.subscribe(ctx, topic, consumergroup, consumerid);
+
+ while (!isListMirrorMaker(msgFrmSubscribe, randomStr)
+ && (System.currentTimeMillis() - startTime) < timeout) {
+ msgFrmSubscribe = mirrorService.subscribe(ctx, topic, consumergroup, consumerid);
+ }
+
+ JSONArray listMirrorMaker = null;
+
+ if (msgFrmSubscribe != null && msgFrmSubscribe.length() > 0
+ && isListMirrorMaker(msgFrmSubscribe, randomStr)) {
+
+ listMirrorMaker = getListMirrorMaker(msgFrmSubscribe, randomStr);
+ String whitelist = null;
+
+ for (int i = 0; i < listMirrorMaker.length(); i++) {
+ JSONObject mm = new JSONObject();
+ mm = listMirrorMaker.getJSONObject(i);
+ String name = mm.getString("name");
+
+ if (name.equals(jsonOb.getString("name")) && mm.has("whitelist")) {
+ whitelist = mm.getString("whitelist");
+ break;
+ }
+ }
+
+ List<String> topicList = new ArrayList<String>();
+ List<String> finalTopicList = new ArrayList<String>();
+
+ if (whitelist != null) {
+ topicList = Arrays.asList(whitelist.split(","));
+ }
+
+ for (String st : topicList) {
+ if (!StringUtils.isBlank(st)) {
+ finalTopicList.add(st);
+ }
+ }
+
+ String newTopic = jsonOb.getString("whitelistTopicName");
+
+ if (!topicList.contains(newTopic)
+ && getNamespace(newTopic).equals(jsonOb.getString("namespace"))) {
+
+ UpdateWhiteList updateWhiteList = new UpdateWhiteList();
+ MirrorMaker mirrorMaker = new MirrorMaker();
+ mirrorMaker.setName(jsonOb.getString("name"));
+ finalTopicList.add(newTopic);
+ String newWhitelist = "";
+
+ if (finalTopicList.size() > 0) {
+ newWhitelist = StringUtils.join(finalTopicList, ",");
+ }
+
+ mirrorMaker.setWhitelist(newWhitelist);
+
+ String newRandom = getRandomNum();
+ updateWhiteList.setMessageID(newRandom);
+ updateWhiteList.setUpdateWhiteList(mirrorMaker);
+
+ Gson g = new Gson();
+ g.toJson(updateWhiteList);
+ InputStream inputStream = null;
+ inputStream = IOUtils.toInputStream(g.toJson(updateWhiteList), "UTF-8");
+ // callPubSub(newRandom, ctx, inputStream);
+ callPubSubForWhitelist(newRandom, ctx, inputStream, jsonOb.getString("namespace"));
+
+ } else if (topicList.contains(newTopic)) {
+ sendErrResponse(ctx, "The topic already exist.");
+
+ } else if (!getNamespace(newTopic).equals(jsonOb.getString("namespace"))) {
+ sendErrResponse(ctx,
+ "The namespace of the topic does not match with the namespace you provided.");
+ }
+ } else {
+
+ JSONObject err = new JSONObject();
+ err.append("error",
+ "listWhiteList is not available, please make sure MirrorMakerAgent is running");
+ DMaaPResponseBuilder.respondOk(ctx, err);
+ }
+
+ } else {
+ sendErrResponse(ctx, NO_USER_CREATE_PERMISSION);
+ }
+
+ } else {
+
+ sendErrResponse(ctx, "This is not a createWhitelist request. Please try again.");
+ }
+
+ } catch (IOException | CambriaApiException | ConfigDbException | AccessDeniedException
+ | TopicExistsException | missingReqdSetting | UnavailableException e) {
+
+ e.printStackTrace();
+ }
+ }
+ // Send error response if user does not provide Authorization
+ else {
+ sendErrResponse(ctx, NO_USER_PERMISSION);
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ @POST
+ @Produces("application/json")
+ @Path("/deletewhitelist")
+ public void deleteWhiteList(InputStream msg) {
+
+ DMaaPContext ctx = getDmaapContext();
+ if (checkMirrorMakerPermission(ctx,
+ AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "msgRtr.mirrormakeruser.aaf"))) {
+
+ loadProperty();
+ String input = null;
+
+ try {
+ input = IOUtils.toString(msg, "UTF-8");
+
+ if (input != null && input.length() > 0) {
+ input = removeExtraChar(input);
+ }
+
+ // Check if it is correct Json object
+ JSONObject jsonOb = null;
+
+ try {
+ jsonOb = new JSONObject(input);
+
+ } catch (JSONException ex) {
+
+ sendErrResponse(ctx, errorMessages.getIncorrectJson());
+ }
+
+ // Check if the request has name and name contains only alpha numeric,
+ // check if the request has namespace and
+ // check if the request has whitelistTopicName
+ if (jsonOb.length() == 3 && jsonOb.has("name") && isAlphaNumeric(jsonOb.getString("name"))
+ && jsonOb.has("namespace") && jsonOb.has("whitelistTopicName")
+ && isAlphaNumeric(jsonOb.getString("whitelistTopicName").substring(jsonOb.getString("whitelistTopicName").lastIndexOf(".")+1,
+ jsonOb.getString("whitelistTopicName").length()))) {
+
+ String permission = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+ "msgRtr.mirrormakeruser.aaf.create") + jsonOb.getString("namespace") + "|create";
+
+ // Check if the user have create permission for the
+ // namespace
+ if (checkMirrorMakerPermission(ctx, permission)) {
+
+ JSONObject listAll = new JSONObject();
+ JSONObject emptyObject = new JSONObject();
+
+ // Create a listAllMirrorMaker Json object
+ try {
+ listAll.put("listAllMirrorMaker", emptyObject);
+
+ } catch (JSONException e) {
+
+ e.printStackTrace();
+ }
+
+ // set a random number as messageID
+ String randomStr = getRandomNum();
+ listAll.put("messageID", randomStr);
+ InputStream inStream = null;
+
+ // convert listAll Json object to InputStream object
+ try {
+ inStream = IOUtils.toInputStream(listAll.toString(), "UTF-8");
+
+ } catch (IOException ioe) {
+ ioe.printStackTrace();
+ }
+ // call listAllMirrorMaker
+ mirrorService.pushEvents(ctx, topic, inStream, null, null);
+
+ // subscribe for listMirrorMaker
+ long startTime = System.currentTimeMillis();
+ String msgFrmSubscribe = mirrorService.subscribe(ctx, topic, consumergroup, consumerid);
+
+ while (!isListMirrorMaker(msgFrmSubscribe, randomStr)
+ && (System.currentTimeMillis() - startTime) < timeout) {
+ msgFrmSubscribe = mirrorService.subscribe(ctx, topic, consumergroup, consumerid);
+ }
+
+ JSONObject jsonObj = new JSONObject();
+ JSONArray jsonArray = null;
+ JSONArray listMirrorMaker = null;
+
+ if (msgFrmSubscribe != null && msgFrmSubscribe.length() > 0
+ && isListMirrorMaker(msgFrmSubscribe, randomStr)) {
+ msgFrmSubscribe = removeExtraChar(msgFrmSubscribe);
+ jsonArray = new JSONArray(msgFrmSubscribe);
+
+ for (int i = 0; i < jsonArray.length(); i++) {
+ jsonObj = jsonArray.getJSONObject(i);
+
+ JSONObject obj = new JSONObject();
+ if (jsonObj.has("message")) {
+ obj = jsonObj.getJSONObject("message");
+ }
+ if (obj.has("messageID") && obj.get("messageID").equals(randomStr) && obj.has("listMirrorMaker")) {
+ listMirrorMaker = obj.getJSONArray("listMirrorMaker");
+ break;
+ }
+ }
+ String whitelist = null;
+ for (int i = 0; i < listMirrorMaker.length(); i++) {
+
+ JSONObject mm = new JSONObject();
+ mm = listMirrorMaker.getJSONObject(i);
+ String name = mm.getString("name");
+
+ if (name.equals(jsonOb.getString("name")) && mm.has("whitelist")) {
+ whitelist = mm.getString("whitelist");
+ break;
+ }
+ }
+
+ List<String> topicList = new ArrayList<String>();
+
+ if (whitelist != null) {
+ topicList = Arrays.asList(whitelist.split(","));
+ }
+ boolean removeTopic = false;
+ String topicToRemove = jsonOb.getString("whitelistTopicName");
+
+ if (topicList.contains(topicToRemove)) {
+ removeTopic = true;
+ } else {
+ sendErrResponse(ctx, "The topic does not exist.");
+ }
+
+
+ if (removeTopic) {
+ UpdateWhiteList updateWhiteList = new UpdateWhiteList();
+ MirrorMaker mirrorMaker = new MirrorMaker();
+
+ mirrorMaker.setName(jsonOb.getString("name"));
+ mirrorMaker.setWhitelist(removeTopic(whitelist, topicToRemove));
+
+ String newRandom = getRandomNum();
+
+ updateWhiteList.setMessageID(newRandom);
+ updateWhiteList.setUpdateWhiteList(mirrorMaker);
+
+ Gson g = new Gson();
+ g.toJson(updateWhiteList);
+
+ InputStream inputStream = null;
+ inputStream = IOUtils.toInputStream(g.toJson(updateWhiteList), "UTF-8");
+ callPubSubForWhitelist(newRandom, ctx, inputStream, getNamespace(topicToRemove));
+ }
+
+ } else {
+
+ JSONObject err = new JSONObject();
+ err.append("error",
+ "listWhiteList is not available, please make sure MirrorMakerAgent is running");
+ DMaaPResponseBuilder.respondOk(ctx, err);
+ }
+
+ } else {
+ sendErrResponse(ctx, NO_USER_CREATE_PERMISSION);
+ }
+
+ } else {
+
+ sendErrResponse(ctx, "This is not a DeleteAllWhitelist request. Please try again.");
+ }
+
+ } catch (IOException | CambriaApiException | ConfigDbException | AccessDeniedException
+ | TopicExistsException | missingReqdSetting | UnavailableException e) {
+
+ e.printStackTrace();
+ }
+ }
+ // Send error response if user does not provide Authorization
+ else {
+ sendErrResponse(ctx, NO_USER_PERMISSION);
+ }
+ }
+
+ private String getNamespace(String topic) {
+ return topic.substring(0, topic.lastIndexOf("."));
+ }
+
+ private String removeTopic(String whitelist, String topicToRemove) {
+ List<String> topicList = new ArrayList<String>();
+ List<String> newTopicList = new ArrayList<String>();
+
+ if (whitelist.contains(",")) {
+ topicList = Arrays.asList(whitelist.split(","));
+
+ }
+
+ if (topicList.contains(topicToRemove)) {
+ for (String topic : topicList) {
+ if (!topic.equals(topicToRemove)) {
+ newTopicList.add(topic);
+ }
+ }
+ }
+
+ String newWhitelist = StringUtils.join(newTopicList, ",");
+
+ return newWhitelist;
+ }
+
+ private void callPubSubForWhitelist(String randomStr, DMaaPContext ctx, InputStream inStream, String namespace) {
+
+ try {
+ mirrorService.pushEvents(ctx, topic, inStream, null, null);
+ long startTime = System.currentTimeMillis();
+ String msgFrmSubscribe = mirrorService.subscribe(ctx, topic, consumergroup, consumerid);
+
+ while (!isListMirrorMaker(msgFrmSubscribe, randomStr)
+ && (System.currentTimeMillis() - startTime) < timeout) {
+ msgFrmSubscribe = mirrorService.subscribe(ctx, topic, consumergroup, consumerid);
+ }
+
+ JSONObject jsonObj = new JSONObject();
+ JSONArray jsonArray = null;
+ JSONArray jsonArrayNamespace = null;
+
+ if (msgFrmSubscribe != null && msgFrmSubscribe.length() > 0
+ && isListMirrorMaker(msgFrmSubscribe, randomStr)) {
+ msgFrmSubscribe = removeExtraChar(msgFrmSubscribe);
+ jsonArray = new JSONArray(msgFrmSubscribe);
+
+ for (int i = 0; i < jsonArray.length(); i++) {
+ jsonObj = jsonArray.getJSONObject(i);
+
+ JSONObject obj = new JSONObject();
+ if (jsonObj.has("message")) {
+ obj = jsonObj.getJSONObject("message");
+ }
+ if (obj.has("messageID") && obj.get("messageID").equals(randomStr) && obj.has("listMirrorMaker")) {
+ jsonArrayNamespace = obj.getJSONArray("listMirrorMaker");
+ }
+ }
+ JSONObject finalJasonObj = new JSONObject();
+ JSONArray finalJsonArray = new JSONArray();
+
+ for (int i = 0; i < jsonArrayNamespace.length(); i++) {
+
+ JSONObject mmObj = new JSONObject();
+ mmObj = jsonArrayNamespace.getJSONObject(i);
+ String whitelist = null;
+
+ if (mmObj.has("whitelist")) {
+ whitelist = getWhitelistByNamespace(mmObj.getString("whitelist"), namespace);
+
+ if (whitelist != null) {
+ mmObj.remove("whitelist");
+ mmObj.put("whitelist", whitelist);
+ } else {
+ mmObj.remove("whitelist");
+ }
+ }
+ finalJsonArray.put(mmObj);
+ }
+ finalJasonObj.put("listMirrorMaker", finalJsonArray);
+
+ DMaaPResponseBuilder.respondOk(ctx, finalJasonObj);
+
+ } else {
+
+ JSONObject err = new JSONObject();
+ err.append("error", "listMirrorMaker is not available, please make sure MirrorMakerAgent is running");
+ DMaaPResponseBuilder.respondOk(ctx, err);
+ }
+
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private String getWhitelistByNamespace(String originalWhitelist, String namespace) {
+
+ String whitelist = null;
+ List<String> resultList = new ArrayList<String>();
+ List<String> whitelistList = new ArrayList<String>();
+ whitelistList = Arrays.asList(originalWhitelist.split(","));
+
+ for (String topic : whitelistList) {
+ if (StringUtils.isNotBlank(originalWhitelist) && getNamespace(topic).equals(namespace)) {
+ resultList.add(topic);
+ }
+ }
+ if (resultList.size() > 0) {
+ whitelist = StringUtils.join(resultList, ",");
+ }
+
+ return whitelist;
+ }
+
+ private JSONArray getListMirrorMaker(String msgFrmSubscribe, String randomStr) {
+ JSONObject jsonObj = new JSONObject();
+ JSONArray jsonArray = new JSONArray();
+ JSONArray listMirrorMaker = new JSONArray();
+
+ msgFrmSubscribe = removeExtraChar(msgFrmSubscribe);
+ jsonArray = new JSONArray(msgFrmSubscribe);
+
+ for (int i = 0; i < jsonArray.length(); i++) {
+ jsonObj = jsonArray.getJSONObject(i);
+
+ JSONObject obj = new JSONObject();
+ if (jsonObj.has("message")) {
+ obj = jsonObj.getJSONObject("message");
+ }
+ if (obj.has("messageID") && obj.get("messageID").equals(randomStr) && obj.has("listMirrorMaker")) {
+ listMirrorMaker = obj.getJSONArray("listMirrorMaker");
+ break;
+ }
+ }
+ return listMirrorMaker;
+ }
+}
diff --git a/src/main/java/com/att/nsa/dmaap/service/MetricsRestService.java b/src/main/java/com/att/nsa/dmaap/service/MetricsRestService.java
new file mode 100644
index 0000000..8a6240e
--- /dev/null
+++ b/src/main/java/com/att/nsa/dmaap/service/MetricsRestService.java
@@ -0,0 +1,152 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package com.att.nsa.dmaap.service;
+
+import java.io.IOException;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.Context;
+
+import org.apache.http.HttpStatus;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.stereotype.Component;
+
+import com.att.nsa.cambria.CambriaApiException;
+import com.att.nsa.cambria.beans.DMaaPContext;
+import com.att.nsa.cambria.exception.DMaaPResponseCode;
+import com.att.nsa.cambria.exception.ErrorResponse;
+import com.att.nsa.cambria.service.MetricsService;
+import com.att.nsa.cambria.utils.ConfigurationReader;
+
+/**
+ * This class is a CXF REST service which acts
+ * as gateway for MR Metrics Service.
+ * @author author
+ *
+ */
+@Component
+@Path("/")
+public class MetricsRestService {
+
+ /**
+ * Logger obj
+ */
+ //private Logger log = Logger.getLogger(MetricsRestService.class.toString());
+ private static final EELFLogger log = EELFManager.getInstance().getLogger(ConfigurationReader.class);
+ /**
+ * HttpServletRequest obj
+ */
+ @Context
+ private HttpServletRequest request;
+
+ /**
+ * HttpServletResponse obj
+ */
+ @Context
+ private HttpServletResponse response;
+
+ /**
+ * Config Reader
+ */
+ @Autowired
+ @Qualifier("configurationReader")
+ private ConfigurationReader configReader;
+
+ /**
+ * MetricsService obj
+ */
+ @Autowired
+ private MetricsService metricsService;
+
+ /**
+ * Get Metrics method
+ * @throws CambriaApiException ex
+ */
+ @GET
+ @Produces("text/plain")
+ public void getMetrics() throws CambriaApiException {
+ try {
+ log.info("MetricsRestService: getMetrics : START");
+ metricsService.get(getDmaapContext());
+ log.info("MetricsRestService: getMetrics : Completed");
+ } catch (IOException e) {
+ log.error("Error while fetching metrics data : ", e);
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+ DMaaPResponseCode.GET_METRICS_ERROR.getResponseCode(),
+ "Error while fetching metrics data"+ e.getMessage());
+ log.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+ }
+ }
+
+ /**
+ * This method is for get the metrics details by the metrics name
+ *
+ * @param metricName
+ * @throws CambriaApiException
+ */
+ @GET
+ @Path("/{metricName}")
+ @Produces("text/plain")
+ public void getMetricsByName(@PathParam("metricName") String metricName)
+ throws CambriaApiException {
+
+ try {
+ log.info("MetricsProducer: getMetricsByName : START");
+ metricsService.getMetricByName(getDmaapContext(), metricName);
+ log.info("MetricsRestService: getMetricsByName : Completed");
+ } catch (IOException | CambriaApiException e) {
+ log.error("Error while fetching metrics data : ", e);
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND,
+ DMaaPResponseCode.GET_METRICS_ERROR.getResponseCode(),
+ "Error while fetching metrics data"+ e.getMessage());
+ log.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+ }
+ }
+
+ /**
+ * This method is used for taking Configuration Object,HttpServletRequest
+ * Object,HttpServletRequest HttpServletResponse Object,HttpServletSession
+ * Object.
+ *
+ * @return DMaaPContext object from where user can get Configuration
+ * Object,HttpServlet Object
+ *
+ */
+ private DMaaPContext getDmaapContext() {
+ DMaaPContext dmaapContext = new DMaaPContext();
+ dmaapContext.setConfigReader(configReader);
+ dmaapContext.setRequest(request);
+ dmaapContext.setResponse(response);
+ return dmaapContext;
+ }
+
+} \ No newline at end of file
diff --git a/src/main/java/com/att/nsa/dmaap/service/TopicRestService.java b/src/main/java/com/att/nsa/dmaap/service/TopicRestService.java
new file mode 100644
index 0000000..6742cd5
--- /dev/null
+++ b/src/main/java/com/att/nsa/dmaap/service/TopicRestService.java
@@ -0,0 +1,688 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package com.att.nsa.dmaap.service;
+
+import java.io.IOException;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.DELETE;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.MediaType;
+
+import org.apache.http.HttpStatus;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+import org.json.JSONException;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.stereotype.Component;
+import com.att.nsa.cambria.CambriaApiException;
+import com.att.nsa.cambria.beans.DMaaPContext;
+import com.att.nsa.cambria.beans.TopicBean;
+import com.att.nsa.cambria.constants.CambriaConstants;
+import com.att.nsa.cambria.exception.DMaaPAccessDeniedException;
+import com.att.nsa.cambria.exception.DMaaPErrorMessages;
+import com.att.nsa.cambria.exception.DMaaPResponseCode;
+import com.att.nsa.cambria.exception.ErrorResponse;
+import com.att.nsa.cambria.metabroker.Broker.TopicExistsException;
+import com.att.nsa.cambria.security.DMaaPAAFAuthenticator;
+import com.att.nsa.cambria.security.DMaaPAAFAuthenticatorImpl;
+import com.att.nsa.cambria.service.TopicService;
+import com.att.nsa.cambria.utils.ConfigurationReader;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+
+/**
+ * This class is a CXF REST service which acts
+ * as gateway for MR Topic Service.
+ * @author author
+ *
+ */
+
+@Component
+@Path("/")
+public class TopicRestService {
+
+ /**
+ * Logger obj
+ */
+ //private static final Logger LOGGER = Logger .getLogger(TopicRestService.class);
+ private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(TopicRestService.class);
+ /**
+ * Config Reader
+ */
+ @Autowired
+ @Qualifier("configurationReader")
+ private ConfigurationReader configReader;
+
+ /**
+ * HttpServletRequest obj
+ */
+ @Context
+ private HttpServletRequest request;
+
+ /**
+ * HttpServletResponse obj
+ */
+ @Context
+ private HttpServletResponse response;
+
+ /**
+ * TopicService obj
+ */
+ @Autowired
+ private TopicService topicService;
+
+ /**
+ * DMaaPErrorMessages obj
+ */
+ @Autowired
+ private DMaaPErrorMessages errorMessages;
+
+ /**
+ * mrNamespace
+ */
+ //@Value("${msgRtr.namespace.aaf}")
+// private String mrNamespace;
+
+
+ /**
+ * Fetches a list of topics from the current kafka instance and converted
+ * into json object.
+ *
+ * @return list of the topics in json format
+ * @throws AccessDeniedException
+ * @throws CambriaApiException
+ * @throws IOException
+ * @throws JSONException
+ * */
+ @GET
+ //@Produces(MediaType.TEXT_PLAIN)
+ public void getTopics() throws CambriaApiException {
+ try {
+
+ LOGGER.info("Authenticating the user before fetching the topics");
+ //String permission = "com.att.dmaap.mr.topic|*|view";
+ String mrNameS= com.att.ajsc.beans.PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop,"msgRtr.namespace.aaf");
+ String permission =mrNameS+"|"+"*"+"|"+"view";
+ DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
+ //Check if client is using AAF CADI Basic Authorization
+ //If yes then check for AAF role authentication else display all topics
+ if(null!=getDmaapContext().getRequest().getHeader("Authorization"))
+ {
+ if(!aaf.aafAuthentication(getDmaapContext().getRequest(), permission))
+ {
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+ DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+ errorMessages.getNotPermitted1()+" read "+errorMessages.getNotPermitted2());
+ LOGGER.info(errRes.toString());
+ throw new DMaaPAccessDeniedException(errRes);
+
+
+ }
+ }
+
+ LOGGER.info("Fetching all Topics");
+
+ topicService.getTopics(getDmaapContext());
+
+ LOGGER.info("Returning List of all Topics");
+
+
+ } catch (JSONException | ConfigDbException | IOException excp) {
+ LOGGER.error(
+ "Failed to retrieve list of all topics: "
+ + excp.getMessage(), excp);
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND,
+ DMaaPResponseCode.GET_TOPICS_FAIL.getResponseCode(),
+ errorMessages.getTopicsfailure()+ excp.getMessage());
+ LOGGER.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+
+
+ }
+ }
+
+ /**
+ * Fetches a list of topics from the current kafka instance and converted
+ * into json object.
+ *
+ * @return list of the topics in json format
+ * @throws AccessDeniedException
+ * @throws CambriaApiException
+ * @throws IOException
+ * @throws JSONException
+ * */
+ @GET
+ @Path("/listAll")
+ //@Produces(MediaType.TEXT_PLAIN)
+ public void getAllTopics() throws CambriaApiException {
+ try {
+
+ LOGGER.info("Authenticating the user before fetching the topics");
+ //String permission = "com.att.dmaap.mr.topic|*|view";
+ String mrNameS= com.att.ajsc.beans.PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop,"msgRtr.namespace.aaf");
+ String permission =mrNameS+"|"+"*"+"|"+"view";
+ DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
+ //Check if client is using AAF CADI Basic Authorization
+ //If yes then check for AAF role authentication else display all topics
+ if(null!=getDmaapContext().getRequest().getHeader("Authorization"))
+ {
+ if(!aaf.aafAuthentication(getDmaapContext().getRequest(), permission))
+ {
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+ DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+ errorMessages.getNotPermitted1()+" read "+errorMessages.getNotPermitted2());
+ LOGGER.info(errRes.toString());
+ throw new DMaaPAccessDeniedException(errRes);
+
+
+ }
+ }
+
+ LOGGER.info("Fetching all Topics");
+
+ topicService.getAllTopics(getDmaapContext());
+
+ LOGGER.info("Returning List of all Topics");
+
+
+ } catch (JSONException | ConfigDbException | IOException excp) {
+ LOGGER.error(
+ "Failed to retrieve list of all topics: "
+ + excp.getMessage(), excp);
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND,
+ DMaaPResponseCode.GET_TOPICS_FAIL.getResponseCode(),
+ errorMessages.getTopicsfailure()+ excp.getMessage());
+ LOGGER.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+
+
+ }
+ }
+
+
+ /**
+ * Returns details of the topic whose name is passed as a parameter
+ *
+ * @param topicName
+ * - name of the topic
+ * @return details of a topic whose name is mentioned in the request in json
+ * format.
+ * @throws AccessDeniedException
+ * @throws DMaaPAccessDeniedException
+ * @throws IOException
+ * */
+ @GET
+ @Path("/{topicName}")
+ //@Produces(MediaType.TEXT_PLAIN)
+ public void getTopic(@PathParam("topicName") String topicName) throws CambriaApiException {
+ try {
+
+ LOGGER.info("Authenticating the user before fetching the details about topic = "+ topicName);
+ DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
+
+ //String permission= "com.att.ecomp_test.crm.mr.topic|:topic.com.att.ecomp_test.crm.preDemo|view";
+
+ //Check if client is using AAF CADI Basic Authorization
+ //If yes then check for AAF role authentication else display all topics
+ if(null!=getDmaapContext().getRequest().getHeader("Authorization"))
+ {
+ String permission = aaf.aafPermissionString(topicName, "view");
+ if(!aaf.aafAuthentication(getDmaapContext().getRequest(), permission))
+ {
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+ DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+ errorMessages.getNotPermitted1()+" read "+errorMessages.getNotPermitted2());
+ LOGGER.info(errRes.toString());
+ throw new DMaaPAccessDeniedException(errRes);
+ }
+ }
+
+ LOGGER.info("Fetching Topic: " + topicName);
+
+ topicService.getTopic(getDmaapContext(), topicName);
+
+ LOGGER.info("Fetched details of topic: " + topicName);
+
+ } catch (ConfigDbException | IOException | TopicExistsException excp) {
+ LOGGER.error("Failed to retrieve details of topic: " + topicName,
+ excp);
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND,
+ DMaaPResponseCode.GET_TOPICS_DETAILS_FAIL.getResponseCode(),
+ errorMessages.getTopicDetailsFail()+topicName+ excp.getMessage());
+ LOGGER.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+
+
+ }
+ }
+
+
+
+ /**
+ * This method is still not working. Need to check on post call and how to
+ * accept parameters for post call
+ *
+ * @param topicBean
+ * it will have the bean object
+ * @throws TopicExistsException
+ * @throws CambriaApiException
+ * @throws JSONException
+ * @throws IOException
+ * @throws AccessDeniedException
+ *
+ * */
+ @POST
+ @Path("/create")
+ @Consumes({ MediaType.APPLICATION_JSON })
+ //@Produces(MediaType.TEXT_PLAIN)
+ public void createTopic(TopicBean topicBean) throws CambriaApiException, JSONException {
+ try {
+ LOGGER.info("Creating Topic."+topicBean.getTopicName());
+
+ topicService.createTopic(getDmaapContext(), topicBean);
+
+ LOGGER.info("Topic created Successfully.");
+ }
+ catch (TopicExistsException ex){
+
+ LOGGER.error("Error while creating a topic: " + ex.getMessage(),
+ ex);
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_CONFLICT,
+ DMaaPResponseCode.CREATE_TOPIC_FAIL.getResponseCode(),
+ errorMessages.getCreateTopicFail()+ ex.getMessage());
+ LOGGER.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+
+
+
+
+ }catch (AccessDeniedException | DMaaPAccessDeniedException excp) {
+ LOGGER.error("Error while creating a topic: " + excp.getMessage(),
+ excp);
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,
+ DMaaPResponseCode.CREATE_TOPIC_FAIL.getResponseCode(),
+ errorMessages.getCreateTopicFail()+ excp.getMessage());
+ LOGGER.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+
+ }catch (CambriaApiException | IOException excp) {
+ LOGGER.error("Error while creating a topic: " + excp.getMessage(),
+ excp);
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND,
+ DMaaPResponseCode.CREATE_TOPIC_FAIL.getResponseCode(),
+ errorMessages.getCreateTopicFail()+ excp.getMessage());
+ LOGGER.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+
+ }
+ }
+
+ /**
+ * Deletes existing topic whose name is passed as a parameter
+ *
+ * @param topicName
+ * topic
+ * @throws CambriaApiException
+ * @throws IOException
+ * */
+ @DELETE
+ @Path("/{topicName}")
+ //@Produces(MediaType.TEXT_PLAIN)
+ public void deleteTopic(@PathParam("topicName") String topicName) throws CambriaApiException {
+ try {
+ LOGGER.info("Deleting Topic: " + topicName);
+
+ topicService.deleteTopic(getDmaapContext(), topicName);
+
+ LOGGER.info("Topic [" + topicName + "] deleted successfully.");
+ } catch (DMaaPAccessDeniedException| AccessDeniedException excp) {
+ LOGGER.error("Error while creating a topic: " + excp.getMessage(),
+ excp);
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,
+ DMaaPResponseCode.CREATE_TOPIC_FAIL.getResponseCode(),
+ errorMessages.getCreateTopicFail()+ excp.getMessage());
+ LOGGER.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+
+ }catch (IOException | ConfigDbException
+ | CambriaApiException | TopicExistsException excp) {
+ LOGGER.error("Error while deleting topic: " + topicName, excp);
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND,
+ DMaaPResponseCode.DELETE_TOPIC_FAIL.getResponseCode(),
+ errorMessages.getDeleteTopicFail()+ topicName + excp.getMessage());
+ LOGGER.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+
+ }
+ }
+
+ private DMaaPContext getDmaapContext() {
+
+ DMaaPContext dmaapContext = new DMaaPContext();
+ dmaapContext.setRequest(request);
+ dmaapContext.setResponse(response);
+ dmaapContext.setConfigReader(configReader);
+
+ return dmaapContext;
+
+ }
+
+ /**
+ * This method will fetch the details of publisher by giving topic name
+ *
+ * @param topicName
+ * @throws CambriaApiException
+ * @throws AccessDeniedException
+ */
+ @GET
+ @Path("/{topicName}/producers")
+ //@Produces(MediaType.TEXT_PLAIN)
+ public void getPublishersByTopicName(
+ @PathParam("topicName") String topicName) throws CambriaApiException {
+ try {
+
+// String permission = "com.att.dmaap.mr.topic"+"|"+topicName+"|"+"manage";
+// DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
+// String permission = aaf.aafPermissionString(topicName, "view");
+// if(aaf.aafAuthentication(getDmaapContext().getRequest(), permission))
+// {
+ LOGGER.info("Fetching list of all the publishers for topic "
+ + topicName);
+
+ topicService.getPublishersByTopicName(getDmaapContext(), topicName);
+
+ LOGGER.info("Returning list of all the publishers for topic "
+ + topicName);
+// }else{
+// LOGGER.error("Error while fetching list of publishers for topic "+ topicName);
+//
+// ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+// DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+// errorMessages.getNotPermitted1()+" fetch list of publishers "+errorMessages.getNotPermitted2());
+// LOGGER.info(errRes);
+// throw new DMaaPAccessDeniedException(errRes);
+//
+// }
+
+ } catch (IOException | ConfigDbException | TopicExistsException excp) {
+ LOGGER.error("Error while fetching list of publishers for topic "
+ + topicName, excp);
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND,
+ DMaaPResponseCode.GET_PUBLISHERS_BY_TOPIC.getResponseCode(),
+ "Error while fetching list of publishers for topic: "
+ + topicName + excp.getMessage());
+ LOGGER.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+
+ }
+ }
+
+ /**
+ * proving permission for the topic for a particular publisher id
+ *
+ * @param topicName
+ * @param producerId
+ * @throws CambriaApiException
+ */
+ @PUT
+ @Path("/{topicName}/producers/{producerId}")
+ public void permitPublisherForTopic(
+ @PathParam("topicName") String topicName,
+ @PathParam("producerId") String producerId) throws CambriaApiException {
+ try {
+ LOGGER.info("Granting write access to producer [" + producerId
+ + "] for topic " + topicName);
+
+ topicService.permitPublisherForTopic(getDmaapContext(), topicName,
+ producerId);
+
+ LOGGER.info("Write access has been granted to producer ["
+ + producerId + "] for topic " + topicName);
+ } catch (AccessDeniedException | DMaaPAccessDeniedException excp) {
+ LOGGER.error("Error while creating a topic: " + excp.getMessage(),
+ excp);
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,
+ DMaaPResponseCode.CREATE_TOPIC_FAIL.getResponseCode(),
+ errorMessages.getCreateTopicFail()+ excp.getMessage());
+ LOGGER.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+
+ }catch ( ConfigDbException | IOException
+ | TopicExistsException excp) {
+ LOGGER.error("Error while granting write access to producer ["
+ + producerId + "] for topic " + topicName, excp);
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND,
+ DMaaPResponseCode.PERMIT_PUBLISHER_FOR_TOPIC.getResponseCode(),
+ "Error while granting write access to producer ["
+ + producerId + "] for topic " + topicName + excp.getMessage());
+ LOGGER.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+
+ }
+ }
+
+ /**
+ * Removing access for a publisher id for any particular topic
+ *
+ * @param topicName
+ * @param producerId
+ * @throws CambriaApiException
+ */
+ @DELETE
+ @Path("/{topicName}/producers/{producerId}")
+ public void denyPublisherForTopic(@PathParam("topicName") String topicName,
+ @PathParam("producerId") String producerId) throws CambriaApiException {
+ try {
+ LOGGER.info("Revoking write access to producer [" + producerId
+ + "] for topic " + topicName);
+
+ topicService.denyPublisherForTopic(getDmaapContext(), topicName,
+ producerId);
+
+ LOGGER.info("Write access revoked for producer [" + producerId
+ + "] for topic " + topicName);
+ } catch (DMaaPAccessDeniedException | AccessDeniedException excp) {
+ LOGGER.error("Error while creating a topic: " + excp.getMessage(),
+ excp);
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,
+ DMaaPResponseCode.CREATE_TOPIC_FAIL.getResponseCode(),
+ errorMessages.getCreateTopicFail()+ excp.getMessage());
+ LOGGER.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+
+ }catch ( ConfigDbException | IOException
+ | TopicExistsException excp) {
+ LOGGER.error("Error while revoking write access for producer ["
+ + producerId + "] for topic " + topicName, excp);
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+ DMaaPResponseCode.REVOKE_PUBLISHER_FOR_TOPIC.getResponseCode(),
+ "Error while revoking write access to producer ["
+ + producerId + "] for topic " + topicName + excp.getMessage());
+ LOGGER.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+ }
+ }
+
+ /**
+ * Get the consumer details by the topic name
+ *
+ * @param topicName
+ * @throws AccessDeniedException
+ * @throws CambriaApiException
+ */
+ @GET
+ @Path("/{topicName}/consumers")
+ //@Produces(MediaType.TEXT_PLAIN)
+ public void getConsumersByTopicName(@PathParam("topicName") String topicName) throws AccessDeniedException,
+ CambriaApiException {
+ try {
+
+
+// String permission = "com.att.dmaap.mr.topic"+"|"+topicName+"|"+"view";
+// DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
+// String permission = aaf.aafPermissionString(topicName, "view");
+// if(aaf.aafAuthentication(getDmaapContext().getRequest(), permission))
+// {
+ LOGGER.info("Fetching list of all consumers for topic " + topicName);
+
+ topicService.getConsumersByTopicName(getDmaapContext(), topicName);
+
+ LOGGER.info("Returning list of all consumers for topic "
+ + topicName);
+
+// }else{
+// LOGGER.error(
+// "Error while fetching list of all consumers for topic "
+// + topicName);
+// ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+// DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+// errorMessages.getNotPermitted1()+" fetch list of consumers "+errorMessages.getNotPermitted2());
+// LOGGER.info(errRes);
+// throw new DMaaPAccessDeniedException(errRes);
+//
+//
+// }
+
+
+
+ } catch (IOException | ConfigDbException | TopicExistsException excp) {
+ LOGGER.error(
+ "Error while fetching list of all consumers for topic "
+ + topicName, excp);
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+ DMaaPResponseCode.GET_CONSUMERS_BY_TOPIC.getResponseCode(),
+ "Error while fetching list of all consumers for topic: "
+ + topicName+ excp.getMessage());
+ LOGGER.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+
+ }
+ }
+
+ /**
+ * providing access for consumer for any particular topic
+ *
+ * @param topicName
+ * @param consumerId
+ * @throws CambriaApiException
+ */
+ @PUT
+ @Path("/{topicName}/consumers/{consumerId}")
+ public void permitConsumerForTopic(
+ @PathParam("topicName") String topicName,
+ @PathParam("consumerId") String consumerId) throws CambriaApiException {
+ try {
+ LOGGER.info("Granting read access to consumer [" + consumerId
+ + "] for topic " + topicName);
+
+ topicService.permitConsumerForTopic(getDmaapContext(), topicName,
+ consumerId);
+
+ LOGGER.info("Read access granted to consumer [" + consumerId
+ + "] for topic " + topicName);
+ } catch (AccessDeniedException | ConfigDbException | IOException
+ | TopicExistsException excp) {
+ LOGGER.error("Error while granting read access to consumer ["
+ + consumerId + "] for topic " + topicName, excp);
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+ DMaaPResponseCode.PERMIT_CONSUMER_FOR_TOPIC.getResponseCode(),
+ "Error while granting read access to consumer ["
+ + consumerId + "] for topic " + topicName+ excp.getMessage());
+ LOGGER.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+
+ }
+ }
+
+ /**
+ * Removing access for consumer for any particular topic
+ *
+ * @param topicName
+ * @param consumerId
+ * @throws CambriaApiException
+ */
+ @DELETE
+ @Path("/{topicName}/consumers/{consumerId}")
+ public void denyConsumerForTopic(@PathParam("topicName") String topicName,
+ @PathParam("consumerId") String consumerId) throws CambriaApiException {
+ try {
+ LOGGER.info("Revoking read access to consumer [" + consumerId
+ + "] for topic " + topicName);
+
+ topicService.denyConsumerForTopic(getDmaapContext(), topicName,
+ consumerId);
+
+ LOGGER.info("Read access revoked to consumer [" + consumerId
+ + "] for topic " + topicName);
+ } catch ( ConfigDbException | IOException
+ | TopicExistsException excp) {
+ LOGGER.error("Error while revoking read access to consumer ["
+ + consumerId + "] for topic " + topicName, excp);
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+ DMaaPResponseCode.REVOKE_CONSUMER_FOR_TOPIC.getResponseCode(),
+ "Error while revoking read access to consumer ["
+ + consumerId + "] for topic " + topicName+ excp.getMessage());
+ LOGGER.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+ }catch (DMaaPAccessDeniedException | AccessDeniedException excp) {
+ LOGGER.error("Error while creating a topic: " + excp.getMessage(),
+ excp);
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,
+ DMaaPResponseCode.CREATE_TOPIC_FAIL.getResponseCode(),
+ errorMessages.getCreateTopicFail()+ excp.getMessage());
+ LOGGER.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+
+ }
+ }
+
+
+
+
+}
diff --git a/src/main/java/com/att/nsa/dmaap/service/TransactionRestService.java b/src/main/java/com/att/nsa/dmaap/service/TransactionRestService.java
new file mode 100644
index 0000000..a44c2ad
--- /dev/null
+++ b/src/main/java/com/att/nsa/dmaap/service/TransactionRestService.java
@@ -0,0 +1,176 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package com.att.nsa.dmaap.service;
+
+import java.io.IOException;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.core.Context;
+
+import org.apache.http.HttpStatus;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.stereotype.Component;
+
+import com.att.aft.dme2.internal.jettison.json.JSONException;
+import com.att.nsa.cambria.CambriaApiException;
+import com.att.nsa.cambria.beans.DMaaPContext;
+import com.att.nsa.cambria.exception.DMaaPResponseCode;
+import com.att.nsa.cambria.exception.ErrorResponse;
+import com.att.nsa.cambria.service.TransactionService;
+import com.att.nsa.cambria.utils.ConfigurationReader;
+import com.att.nsa.configs.ConfigDbException;
+
+/**
+ * This class is a CXF REST service
+ * which acts as gateway for DMaaP
+ * Transaction Ids.
+ * @author author
+ *
+ */
+@Component
+@Path("/")
+public class TransactionRestService {
+
+ /**
+ * Logger obj
+ */
+ private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(TransactionRestService.class);
+
+ /**
+ * HttpServletRequest obj
+ */
+ @Context
+ private HttpServletRequest request;
+
+ /**
+ * HttpServletResponse obj
+ */
+ @Context
+ private HttpServletResponse response;
+
+ /**
+ * Config Reader
+ */
+ @Autowired
+ @Qualifier("configurationReader")
+ private ConfigurationReader configReader;
+
+ @Autowired
+ private TransactionService transactionService;
+
+ /**
+ *
+ * Returns a list of all the existing Transaction Ids
+ * @throws CambriaApiException
+ *
+ * @throws IOException
+ * @exception ConfigDbException
+ * @exception IOException
+ *
+ *
+ */
+ @GET
+ public void getAllTransactionObjs() throws CambriaApiException {
+ try {
+ LOGGER.info("Retrieving list of all transactions.");
+
+ transactionService.getAllTransactionObjs(getDmaapContext());
+
+ LOGGER.info("Returning list of all transactions.");
+ } catch (ConfigDbException | IOException e) {
+ LOGGER.error("Error while retrieving list of all transactions: "
+ + e.getMessage(), e);
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_EXPECTATION_FAILED,
+ DMaaPResponseCode.RETRIEVE_TRANSACTIONS.getResponseCode(),
+ "Error while retrieving list of all transactions:"+e.getMessage());
+ LOGGER.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+ }
+ }
+
+ /**
+ *
+ * Returns details of a particular transaction id whose <code>name</code> is
+ * passed as a parameter
+ *
+ * @param transactionId
+ * - id of transaction
+ * @throws CambriaApiException
+ * @throws IOException
+ * @exception ConfigDbException
+ * @exception IOException
+ * @exception JSONException
+ *
+ *
+ */
+ @GET
+ @Path("/{transactionId}")
+ public void getTransactionObj(
+ @PathParam("transactionId") String transactionId) throws CambriaApiException {
+
+ LOGGER.info("Fetching details of Transaction ID : " + transactionId);
+
+ try {
+ transactionService.getTransactionObj(getDmaapContext(),
+ transactionId);
+ } catch (ConfigDbException | JSONException | IOException e) {
+ LOGGER.error("Error while retrieving transaction details for id: "
+ + transactionId, e);
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_EXPECTATION_FAILED,
+ DMaaPResponseCode.RETRIEVE_TRANSACTIONS_DETAILS.getResponseCode(),
+ "Error while retrieving transaction details for id: ["
+ + transactionId + "]: " + e.getMessage());
+ LOGGER.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+
+ }
+
+ LOGGER.info("Returning details of transaction " + transactionId);
+
+ }
+
+ /**
+ * This method is used for taking Configuration Object,HttpServletRequest
+ * Object,HttpServletRequest HttpServletResponse Object,HttpServletSession
+ * Object.
+ *
+ * @return DMaaPContext object from where user can get Configuration
+ * Object,HttpServlet Object
+ *
+ */
+ private DMaaPContext getDmaapContext() {
+ DMaaPContext dmaapContext = new DMaaPContext();
+ dmaapContext.setConfigReader(configReader);
+ dmaapContext.setRequest(request);
+ dmaapContext.setResponse(response);
+ return dmaapContext;
+ }
+
+} \ No newline at end of file
diff --git a/src/main/java/com/att/nsa/dmaap/service/UIRestServices.java b/src/main/java/com/att/nsa/dmaap/service/UIRestServices.java
new file mode 100644
index 0000000..79a39fb
--- /dev/null
+++ b/src/main/java/com/att/nsa/dmaap/service/UIRestServices.java
@@ -0,0 +1,198 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package com.att.nsa.dmaap.service;
+
+import java.io.IOException;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.core.Context;
+
+import kafka.common.TopicExistsException;
+
+import org.apache.http.HttpStatus;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.stereotype.Component;
+
+import com.att.nsa.cambria.beans.DMaaPContext;
+import com.att.nsa.cambria.service.UIService;
+import com.att.nsa.cambria.utils.ConfigurationReader;
+import com.att.nsa.cambria.utils.DMaaPResponseBuilder;
+import com.att.nsa.configs.ConfigDbException;
+
+/**
+ * UI Rest Service
+ * @author author
+ *
+ */
+@Component
+public class UIRestServices {
+
+ /**
+ * Logger obj
+ */
+ //private static final Logger LOGGER = Logger.getLogger(UIRestServices.class);
+
+ private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(UIRestServices.class);
+
+ @Autowired
+ private UIService uiService;
+
+ /**
+ * Config Reader
+ */
+ @Autowired
+ @Qualifier("configurationReader")
+ private ConfigurationReader configReader;
+
+ /**
+ * HttpServletRequest obj
+ */
+ @Context
+ private HttpServletRequest request;
+
+ /**
+ * HttpServletResponse obj
+ */
+ @Context
+ private HttpServletResponse response;
+
+ /**
+ * getting the hello
+ */
+ @GET
+ @Path("/")
+ public void hello() {
+ try {
+ LOGGER.info("Calling hello page.");
+
+ uiService.hello(getDmaapContext());
+
+ LOGGER.info("Hello page is returned.");
+ } catch (IOException excp) {
+ LOGGER.error("Error while calling hello page: " + excp.getMessage(), excp);
+ DMaaPResponseBuilder.respondWithError(getDmaapContext(), HttpStatus.SC_NOT_FOUND,
+ "Error while calling hello page: " + excp.getMessage());
+ }
+ }
+
+ /**
+ * getApikeysTable
+ */
+ @GET
+ @Path("/ui/apikeys")
+ public void getApiKeysTable() {
+ try {
+ LOGGER.info("Fetching list of all api keys.");
+
+ uiService.getApiKeysTable(getDmaapContext());
+
+ LOGGER.info("Returning list of all api keys.");
+ } catch (ConfigDbException | IOException excp) {
+ LOGGER.error("Error while fetching list of all api keys: " + excp.getMessage(), excp);
+ DMaaPResponseBuilder.respondWithError(getDmaapContext(), HttpStatus.SC_NOT_FOUND,
+ "Error while fetching list of all api keys: " + excp.getMessage());
+ }
+ }
+
+ /**
+ * getApiKey
+ *
+ * @param apiKey
+ * @exception Exception
+ */
+ @GET
+ @Path("/ui/apikeys/{apiKey}")
+ public void getApiKey(@PathParam("apiKey") String apiKey) {
+ try {
+ LOGGER.info("Fetching details of api key: " + apiKey);
+
+ uiService.getApiKey(getDmaapContext(), apiKey);
+
+ LOGGER.info("Returning details of api key: " + apiKey);
+ } catch (Exception excp) {
+ LOGGER.error("Error while fetching details of api key: " + apiKey, excp);
+ DMaaPResponseBuilder.respondWithError(getDmaapContext(), HttpStatus.SC_NOT_FOUND,
+ "Error while fetching details of api key: " + apiKey);
+ }
+ }
+
+ @GET
+ @Path("/ui/topics")
+ public void getTopicsTable() {
+ try {
+ LOGGER.info("Fetching list of all topics.");
+
+ uiService.getTopicsTable(getDmaapContext());
+
+ LOGGER.info("Returning list of all topics.");
+ } catch (ConfigDbException | IOException excp) {
+ LOGGER.error("Error while fetching list of all topics: " + excp, excp);
+ DMaaPResponseBuilder.respondWithError(getDmaapContext(), HttpStatus.SC_NOT_FOUND,
+ "Error while fetching list of all topics: " + excp.getMessage());
+ }
+ }
+
+ /**
+ *
+ * @param topic
+ */
+ @GET
+ @Path("/ui/topics/{topic}")
+ public void getTopic(@PathParam("topic") String topic) {
+ try {
+ LOGGER.info("Fetching details of topic: " + topic);
+
+ uiService.getTopic(getDmaapContext(), topic);
+
+ LOGGER.info("Returning details of topic: " + topic);
+ } catch (ConfigDbException | IOException | TopicExistsException excp) {
+ LOGGER.error("Error while fetching details of topic: " + topic, excp);
+ DMaaPResponseBuilder.respondWithError(getDmaapContext(), HttpStatus.SC_NOT_FOUND,
+ "Error while fetching details of topic: " + topic);
+ }
+ }
+
+ /**
+ * This method is used for taking Configuration Object,HttpServletRequest
+ * Object,HttpServletRequest HttpServletResponse Object,HttpServletSession
+ * Object.
+ *
+ * @return DMaaPContext object from where user can get Configuration
+ * Object,HttpServlet Object
+ *
+ */
+ private DMaaPContext getDmaapContext() {
+ DMaaPContext dmaapContext = new DMaaPContext();
+ dmaapContext.setConfigReader(configReader);
+ dmaapContext.setRequest(request);
+ dmaapContext.setResponse(response);
+ return dmaapContext;
+ }
+}
diff --git a/src/main/java/com/att/nsa/dmaap/tools/ConfigTool.java b/src/main/java/com/att/nsa/dmaap/tools/ConfigTool.java
new file mode 100644
index 0000000..4424840
--- /dev/null
+++ b/src/main/java/com/att/nsa/dmaap/tools/ConfigTool.java
@@ -0,0 +1,818 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package com.att.nsa.dmaap.tools;
+
+import java.io.IOException;
+import java.io.PrintStream;
+import java.security.NoSuchAlgorithmException;
+import java.util.Date;
+import java.util.LinkedList;
+import java.util.Map.Entry;
+
+import org.json.JSONException;
+
+import com.att.nsa.apiServer.CommonServlet;
+import com.att.nsa.cambria.beans.DMaaPKafkaMetaBroker;
+import com.att.nsa.cambria.metabroker.Topic;
+import com.att.nsa.cmdtool.Command;
+import com.att.nsa.cmdtool.CommandLineTool;
+import com.att.nsa.cmdtool.CommandNotReadyException;
+import com.att.nsa.configs.ConfigDb;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.configs.ConfigPath;
+import com.att.nsa.configs.confimpl.EncryptingLayer;
+import com.att.nsa.configs.confimpl.ZkConfigDb;
+import com.att.nsa.drumlin.till.data.rrConvertor;
+import com.att.nsa.drumlin.till.data.uniqueStringGenerator;
+import com.att.nsa.drumlin.till.nv.impl.nvWriteableTable;
+import com.att.nsa.security.db.BaseNsaApiDbImpl;
+import com.att.nsa.security.db.EncryptingApiDbImpl;
+import com.att.nsa.security.db.NsaApiDb.KeyExistsException;
+import com.att.nsa.security.db.simple.NsaSimpleApiKey;
+import com.att.nsa.security.db.simple.NsaSimpleApiKeyFactory;
+import com.att.nsa.util.NsaClock;
+
+public class ConfigTool extends CommandLineTool<ConfigToolContext>
+{
+ protected ConfigTool ()
+ {
+ super ( "Cambria API Config Tool", "cambriaConfig> " );
+
+ super.registerCommand ( new ListTopicCommand () );
+ super.registerCommand ( new WriteTopicCommand () );
+ super.registerCommand ( new ReadTopicCommand () );
+ super.registerCommand ( new SetTopicOwnerCommand () );
+ super.registerCommand ( new InitSecureTopicCommand () );
+ super.registerCommand ( new ListApiKeysCommand () );
+ super.registerCommand ( new PutApiCommand () );
+ super.registerCommand ( new writeApiKeyCommand () );
+ super.registerCommand ( new EncryptApiKeysCommand () );
+ super.registerCommand ( new DecryptApiKeysCommand () );
+ super.registerCommand ( new NodeFetchCommand () );
+ super.registerCommand ( new DropOldConsumerGroupsCommand () );
+ }
+
+ public static void main ( String[] args ) throws IOException
+ {
+ final String connStr = args.length>0 ? args[0] : "localhost:2181";
+ final ConfigDb db = new ZkConfigDb (
+ connStr,
+ args.length>1 ? args[1] : CommonServlet.getDefaultZkRoot ( "cambria" )
+ );
+
+ final ConfigToolContext context = new ConfigToolContext ( db, connStr, new nvWriteableTable() );
+ final ConfigTool ct = new ConfigTool ();
+ ct.runFromMain ( args, context );
+ }
+
+ private static class ListTopicCommand implements Command<ConfigToolContext>
+ {
+ @Override
+ public String[] getMatches ()
+ {
+ return new String[] { "topics", "list (\\S*)" };
+ }
+
+ @Override
+ public void checkReady ( ConfigToolContext context ) throws CommandNotReadyException
+ {
+ }
+
+ @Override
+ public void execute ( String[] parts, ConfigToolContext context, PrintStream out ) throws CommandNotReadyException
+ {
+ try
+ {
+ final ConfigDb db = context.getDb();
+ final ConfigPath base = db.parse ( "/topics" );
+
+ if ( parts.length > 0 )
+ {
+ final ConfigPath myTopic = base.getChild ( parts[0] );
+ final String data = db.load ( myTopic );
+ if ( data != null )
+ {
+ out.println ( data );
+ }
+ else
+ {
+ out.println ( "No topic [" + parts[0] + "]" );
+ }
+ }
+ else
+ {
+ for ( ConfigPath child : db.loadChildrenNames ( base ) )
+ {
+ out.println ( child.getName () );
+ }
+ }
+ }
+ catch ( ConfigDbException e )
+ {
+ out.println ( "Command failed: " + e.getMessage() );
+ }
+ }
+
+ @Override
+ public void displayHelp ( PrintStream out )
+ {
+ out.println ( "topics" );
+ out.println ( "list <topic>" );
+ }
+ }
+
+ private static class WriteTopicCommand implements Command<ConfigToolContext>
+ {
+ @Override
+ public String[] getMatches ()
+ {
+ return new String[] { "write (\\S*) (\\S*)" };
+ }
+
+ @Override
+ public void checkReady ( ConfigToolContext context ) throws CommandNotReadyException
+ {
+ }
+
+ @Override
+ public void execute ( String[] parts, ConfigToolContext context, PrintStream out ) throws CommandNotReadyException
+ {
+ try
+ {
+ final ConfigDb db = context.getDb();
+ final ConfigPath base = db.parse ( "/topics" );
+ final ConfigPath myTopic = base.getChild ( parts[0] );
+ db.store ( myTopic, parts[1] );
+ out.println ( "wrote [" + parts[1] + "] to topic [" + parts[0] + "]" );
+ }
+ catch ( ConfigDbException e )
+ {
+ out.println ( "Command failed: " + e.getMessage() );
+ }
+ }
+
+ @Override
+ public void displayHelp ( PrintStream out )
+ {
+ out.println ( "write <topic> <string>" );
+ out.println ( "\tBe careful with this. You can write data that's not compatible with Cambria's config db." );
+ }
+ }
+
+ private static class ReadTopicCommand implements Command<ConfigToolContext>
+ {
+ @Override
+ public String[] getMatches ()
+ {
+ return new String[] { "read (\\S*)" };
+ }
+
+ @Override
+ public void checkReady ( ConfigToolContext context ) throws CommandNotReadyException
+ {
+ }
+
+ @Override
+ public void execute ( String[] parts, ConfigToolContext context, PrintStream out ) throws CommandNotReadyException
+ {
+ try
+ {
+ final ConfigDb db = context.getDb();
+ final ConfigPath base = db.parse ( "/topics" );
+ final ConfigPath myTopic = base.getChild ( parts[0] );
+ db.store ( myTopic, parts[1] );
+ out.println ( "wrote [" + parts[1] + "] to topic [" + parts[0] + "]" );
+ }
+ catch ( ConfigDbException e )
+ {
+ out.println ( "Command failed: " + e.getMessage() );
+ }
+ }
+
+ @Override
+ public void displayHelp ( PrintStream out )
+ {
+ out.println ( "read <topic>" );
+ out.println ( "\tRead config data for a topic." );
+ }
+ }
+
+ private static class InitSecureTopicCommand implements Command<ConfigToolContext>
+ {
+ @Override
+ public String[] getMatches ()
+ {
+ return new String[] { "initTopic (\\S*) (\\S*) (\\S*)" };
+ }
+
+ @Override
+ public void checkReady ( ConfigToolContext context ) throws CommandNotReadyException
+ {
+ }
+
+ @Override
+ public void execute ( String[] parts, ConfigToolContext context, PrintStream out ) throws CommandNotReadyException
+ {
+ try
+ {
+ DMaaPKafkaMetaBroker.createTopicEntry ( context.getDb (),
+ context.getDb ().parse("/topics"), parts[0], parts[2], parts[1],true );
+ out.println ( "Topic [" + parts[0] + "] updated." );
+ }
+ catch ( ConfigDbException e )
+ {
+ out.println ( "Command failed: " + e.getMessage () );
+ }
+ }
+
+ @Override
+ public void displayHelp ( PrintStream out )
+ {
+ out.println ( "initTopic <topic> <ownerApiKey> <description>" );
+ }
+ }
+
+ private static class SetTopicOwnerCommand implements Command<ConfigToolContext>
+ {
+ @Override
+ public String[] getMatches ()
+ {
+ return new String[] { "setOwner (\\S*) (\\S*)" };
+ }
+
+ @Override
+ public void checkReady ( ConfigToolContext context ) throws CommandNotReadyException
+ {
+ }
+
+ @Override
+ public void execute ( String[] parts, ConfigToolContext context, PrintStream out ) throws CommandNotReadyException
+ {
+ try
+ {
+ final Topic kt = DMaaPKafkaMetaBroker.getKafkaTopicConfig ( context.getDb(),
+ context.getDb().parse ( "/topics" ), parts[0] );
+ if ( kt != null )
+ {
+ final String desc = kt.getDescription ();
+
+ DMaaPKafkaMetaBroker.createTopicEntry ( context.getDb (),
+ context.getDb ().parse("/topics"), parts[0], desc, parts[1], true );
+ out.println ( "Topic [" + parts[0] + "] updated." );
+ }
+ else
+ {
+ out.println ( "Topic [" + parts[0] + "] doesn't exist." );
+ }
+ }
+ catch ( ConfigDbException e )
+ {
+ out.println ( "Command failed: " + e.getMessage () );
+ }
+ }
+
+ @Override
+ public void displayHelp ( PrintStream out )
+ {
+ out.println ( "setOwner <topic> <ownerApiKey>" );
+ }
+ }
+
+ private static class ListApiKeysCommand implements Command<ConfigToolContext>
+ {
+ @Override
+ public String[] getMatches ()
+ {
+ return new String[] { "listApiKeys", "listApiKey (\\S*) (\\S*) (\\S*)", "listApiKey (\\S*)" };
+ }
+
+ @Override
+ public void checkReady ( ConfigToolContext context ) throws CommandNotReadyException
+ {
+ }
+
+ @Override
+ public void execute ( String[] parts, ConfigToolContext context, PrintStream out ) throws CommandNotReadyException
+ {
+ try
+ {
+ final ConfigDb db = context.getDb ();
+ if ( parts.length == 0 )
+ {
+ final BaseNsaApiDbImpl<NsaSimpleApiKey> readFrom = new BaseNsaApiDbImpl<NsaSimpleApiKey> ( db, new NsaSimpleApiKeyFactory () );
+ int count = 0;
+ for ( String key : readFrom.loadAllKeys () )
+ {
+ out.println ( key );
+ count++;
+ }
+ out.println ( "" + count + " records." );
+ }
+ else
+ {
+ BaseNsaApiDbImpl<NsaSimpleApiKey> readFrom = new BaseNsaApiDbImpl<NsaSimpleApiKey> ( db, new NsaSimpleApiKeyFactory () );
+ if ( parts.length == 3 )
+ {
+ readFrom = new EncryptingApiDbImpl<NsaSimpleApiKey> ( db, new NsaSimpleApiKeyFactory (),
+ EncryptingLayer.readSecretKey ( parts[1] ), rrConvertor.base64Decode ( parts[2] ) );
+ }
+ final NsaSimpleApiKey apikey = readFrom.loadApiKey ( parts[0] );
+ if ( apikey == null )
+ {
+ out.println ( "Key '" + parts[0] + "' not found." );
+ }
+ else
+ {
+ out.println ( apikey.asJsonObject ().toString () );
+ }
+ }
+ }
+ catch ( ConfigDbException e )
+ {
+ out.println ( "Command failed: " + e.getMessage() );
+ }
+ catch ( JSONException e )
+ {
+ out.println ( "Command failed: " + e.getMessage() );
+ }
+ }
+
+ @Override
+ public void displayHelp ( PrintStream out )
+ {
+ out.println ( "listApiKeys" );
+ out.println ( "listApiKey <key>" );
+ out.println ( "listApiKey <key> <dbKey> <dbIv>" );
+ }
+ }
+
+ private static class PutApiCommand implements Command<ConfigToolContext>
+ {
+ @Override
+ public String[] getMatches ()
+ {
+ return new String[]
+ {
+ // these are <key> <enckey> <encinit> <value>
+ "putApiKey (secret) (\\S*) (\\S*) (\\S*) (\\S*)",
+ "putApiKey (email) (\\S*) (\\S*) (\\S*) (\\S*)",
+ "putApiKey (description) (\\S*) (\\S*) (\\S*) (\\S*)"
+ };
+ }
+
+ @Override
+ public void checkReady ( ConfigToolContext context ) throws CommandNotReadyException
+ {
+ }
+
+ @Override
+ public void execute ( String[] parts, ConfigToolContext context, PrintStream out ) throws CommandNotReadyException
+ {
+ try
+ {
+ final ConfigDb db = context.getDb ();
+ if ( parts.length == 5 )
+ {
+ final BaseNsaApiDbImpl<NsaSimpleApiKey> apiKeyDb =
+ new EncryptingApiDbImpl<NsaSimpleApiKey> ( db, new NsaSimpleApiKeyFactory (),
+ EncryptingLayer.readSecretKey ( parts[2] ), rrConvertor.base64Decode ( parts[3] ) );
+
+ final NsaSimpleApiKey apikey = apiKeyDb.loadApiKey ( parts[1] );
+ if ( apikey == null )
+ {
+ out.println ( "Key '" + parts[1] + "' not found." );
+ }
+ else
+ {
+ if ( parts[0].equalsIgnoreCase ( "secret" ) )
+ {
+ apikey.resetSecret ( parts[4] );
+ }
+ else if ( parts[0].equalsIgnoreCase ( "email" ) )
+ {
+ apikey.setContactEmail ( parts[4] );
+ }
+ else if ( parts[0].equalsIgnoreCase ( "description" ) )
+ {
+ apikey.setDescription ( parts[4] );
+ }
+
+ apiKeyDb.saveApiKey ( apikey );
+ out.println ( apikey.asJsonObject ().toString () );
+ }
+ }
+ }
+ catch ( ConfigDbException e )
+ {
+ out.println ( "Command failed: " + e.getMessage() );
+ }
+ catch ( JSONException e )
+ {
+ out.println ( "Command failed: " + e.getMessage() );
+ }
+ }
+
+ @Override
+ public void displayHelp ( PrintStream out )
+ {
+ out.println ( "putApiKey secret <apiKey> <dbKey> <dbIv> <newSecret>" );
+ out.println ( "putApiKey email <apiKey> <dbKey> <dbIv> <newEmail>" );
+ out.println ( "putApiKey description <apiKey> <dbKey> <dbIv> <newDescription>" );
+ }
+ }
+
+ private static class writeApiKeyCommand implements Command<ConfigToolContext>
+ {
+ @Override
+ public String[] getMatches ()
+ {
+ return new String[]
+ {
+ // <enckey> <encinit> <key> <secret>
+ "writeApiKey (\\S*) (\\S*) (\\S*) (\\S*)",
+ };
+ }
+
+ @Override
+ public void checkReady ( ConfigToolContext context ) throws CommandNotReadyException
+ {
+ }
+
+ @Override
+ public void execute ( String[] parts, ConfigToolContext context, PrintStream out ) throws CommandNotReadyException
+ {
+ try
+ {
+ final ConfigDb db = context.getDb ();
+ if ( parts.length == 4 )
+ {
+ final BaseNsaApiDbImpl<NsaSimpleApiKey> apiKeyDb =
+ new EncryptingApiDbImpl<NsaSimpleApiKey> ( db, new NsaSimpleApiKeyFactory (),
+ EncryptingLayer.readSecretKey ( parts[0] ), rrConvertor.base64Decode ( parts[1] ) );
+
+ apiKeyDb.deleteApiKey ( parts[2] );
+ final NsaSimpleApiKey apikey = apiKeyDb.createApiKey ( parts[2], parts[3] );
+ out.println ( apikey.asJsonObject ().toString () );
+ }
+ }
+ catch ( ConfigDbException e )
+ {
+ out.println ( "Command failed: " + e.getMessage() );
+ }
+ catch ( JSONException e )
+ {
+ out.println ( "Command failed: " + e.getMessage() );
+ }
+ catch ( KeyExistsException e )
+ {
+ out.println ( "Command failed: " + e.getMessage() );
+ }
+ }
+
+ @Override
+ public void displayHelp ( PrintStream out )
+ {
+ out.println ( "writeApiKey <dbKey> <dbIv> <newApiKey> <newSecret>" );
+ }
+ }
+
+ private static class EncryptApiKeysCommand implements Command<ConfigToolContext>
+ {
+ @Override
+ public String[] getMatches ()
+ {
+ return new String[] { "convertApiKeyDb", "convertApiKeyDb (\\S*) (\\S*)" };
+ }
+
+ @Override
+ public void checkReady ( ConfigToolContext context ) throws CommandNotReadyException
+ {
+ }
+
+ @Override
+ public void execute ( String[] parts, ConfigToolContext context, PrintStream out ) throws CommandNotReadyException
+ {
+ try
+ {
+ final String key = parts.length == 2 ? parts[0] : EncryptingLayer.createSecretKey ();
+ final String iv = parts.length == 2 ? parts[1] : rrConvertor.base64Encode ( uniqueStringGenerator.createValue ( 16 ) );
+
+ // This doesn't do well when the number of API keys is giant...
+ if ( parts.length == 0 )
+ {
+ out.println ( "YOU MUST RECORD THESE VALUES AND USE THEM IN THE SERVER CONFIG" );
+ out.println ( "Key: " + key );
+ out.println ( " IV: " + iv );
+ out.println ( "\n" );
+ out.println ( "Call again with key and IV on command line." );
+ out.println ( "\n" );
+ return; // because otherwise the values get lost
+ }
+
+ final ConfigDb db = context.getDb ();
+ final BaseNsaApiDbImpl<NsaSimpleApiKey> readFrom = new BaseNsaApiDbImpl<NsaSimpleApiKey> ( db, new NsaSimpleApiKeyFactory () );
+ final EncryptingApiDbImpl<NsaSimpleApiKey> writeTo = new EncryptingApiDbImpl<NsaSimpleApiKey> ( db, new NsaSimpleApiKeyFactory (),
+ EncryptingLayer.readSecretKey ( key ), rrConvertor.base64Decode ( iv ) );
+
+ int count = 0;
+ for ( Entry<String, NsaSimpleApiKey> e : readFrom.loadAllKeyRecords ().entrySet () )
+ {
+ out.println ( "-------------------------------" );
+ out.println ( "Converting " + e.getKey () );
+ final String was = e.getValue ().asJsonObject ().toString ();
+ out.println ( was );
+
+ writeTo.saveApiKey ( e.getValue () );
+ count++;
+ }
+
+ out.println ( "Conversion complete, converted " + count + " records." );
+ }
+ catch ( ConfigDbException e )
+ {
+ out.println ( "Command failed: " + e.getMessage() );
+ }
+ catch ( NoSuchAlgorithmException e )
+ {
+ out.println ( "Command failed: " + e.getMessage() );
+ }
+ }
+
+ @Override
+ public void displayHelp ( PrintStream out )
+ {
+ out.println ( "convertApiKeyDb" );
+ out.println ( "\tconvert an API key DB to an encrypted DB and output the cipher details" );
+ }
+ }
+
+ private static class DecryptApiKeysCommand implements Command<ConfigToolContext>
+ {
+ @Override
+ public String[] getMatches ()
+ {
+ return new String[] { "revertApiKeyDb (\\S*) (\\S*)" };
+ }
+
+ @Override
+ public void checkReady ( ConfigToolContext context ) throws CommandNotReadyException
+ {
+ }
+
+ @Override
+ public void execute ( String[] parts, ConfigToolContext context, PrintStream out ) throws CommandNotReadyException
+ {
+ try
+ {
+ final String keyStr = parts[0];
+ final String iv = parts[1];
+ final byte[] ivBytes = rrConvertor.base64Decode ( iv );
+
+ final ConfigDb db = context.getDb ();
+ final EncryptingApiDbImpl<NsaSimpleApiKey> readFrom = new EncryptingApiDbImpl<NsaSimpleApiKey> ( db, new NsaSimpleApiKeyFactory (),
+ EncryptingLayer.readSecretKey ( keyStr ), ivBytes );
+ final BaseNsaApiDbImpl<NsaSimpleApiKey> writeTo = new BaseNsaApiDbImpl<NsaSimpleApiKey> ( db, new NsaSimpleApiKeyFactory () );
+
+ int count = 0;
+ for ( String apiKey : readFrom.loadAllKeys () )
+ {
+ out.println ( "Converting " + apiKey );
+ final NsaSimpleApiKey record = readFrom.loadApiKey ( apiKey );
+ if ( record == null )
+ {
+ out.println ( "Couldn't load " + apiKey );
+ }
+ else
+ {
+ writeTo.saveApiKey ( record );
+ count++;
+ }
+ }
+ out.println ( "Conversion complete, converted " + count + " records." );
+ }
+ catch ( ConfigDbException e )
+ {
+ out.println ( "Command failed: " + e.getMessage() );
+ }
+ }
+
+ @Override
+ public void displayHelp ( PrintStream out )
+ {
+ out.println ( "revertApiKeyDb <keyBase64> <ivBase64>" );
+ out.println ( "\trevert an API key DB to a deencrypted DB" );
+ }
+ }
+
+ private static class NodeFetchCommand implements Command<ConfigToolContext>
+ {
+ @Override
+ public String[] getMatches ()
+ {
+ return new String[] { "node (\\S*)" };
+ }
+
+ @Override
+ public void checkReady ( ConfigToolContext context ) throws CommandNotReadyException
+ {
+ }
+
+ @Override
+ public void execute ( String[] parts, ConfigToolContext context, PrintStream out ) throws CommandNotReadyException
+ {
+ try
+ {
+ final String node = parts[0];
+
+ final ConfigDb db = context.getDb ();
+ final ConfigPath cp = db.parse ( node );
+
+ boolean doneOne = false;
+ for ( ConfigPath child : db.loadChildrenNames ( cp ) )
+ {
+ out.println ( "\t- " + child.getName () );
+ doneOne = true;
+ }
+ if ( doneOne )
+ {
+ out.println ();
+ }
+ else
+ {
+ out.println ( "(No child nodes of '" + node + "')" );
+ }
+
+ final String val = db.load ( cp );
+ if ( val == null )
+ {
+ out.println ( "(No data at '" + node + "')" );
+ }
+ else
+ {
+ out.println ( val );
+ }
+ }
+ catch ( ConfigDbException e )
+ {
+ out.println ( "Command failed: " + e.getMessage() );
+ }
+ catch ( IllegalArgumentException e )
+ {
+ out.println ( "Command failed: " + e.getMessage() );
+ }
+ }
+
+ @Override
+ public void displayHelp ( PrintStream out )
+ {
+ out.println ( "node <nodeName>" );
+ out.println ( "\tread a config db node" );
+ }
+ }
+
+ private static class DropOldConsumerGroupsCommand implements Command<ConfigToolContext>
+ {
+ private final long kMaxRemovals = 500;
+
+ @Override
+ public String[] getMatches ()
+ {
+ return new String[] { "(dropOldConsumers) (\\S*)", "(showOldConsumers) (\\S*)" };
+ }
+
+ @Override
+ public void checkReady ( ConfigToolContext context ) throws CommandNotReadyException
+ {
+ }
+
+ @Override
+ public void execute ( String[] parts, ConfigToolContext context, PrintStream out ) throws CommandNotReadyException
+ {
+ try
+ {
+ final boolean runDrops = parts[0].equalsIgnoreCase ( "dropOldConsumers" );
+ final String maxAgeInDaysStr = parts[1];
+ final int maxAgeInDays = Integer.parseInt ( maxAgeInDaysStr );
+ final long oldestEpochSecs = ( NsaClock.now () / 1000 ) - ( 24 * 60 * 60 * maxAgeInDays );
+
+ out.println ( "Dropping consumer groups older than " + new Date ( oldestEpochSecs * 1000 ) );
+
+ final ConfigDb db = context.getDb ();
+
+ // kafka updates consumer partition records in ZK each time a message
+ // is served. we can determine which consumers are old based on a lack
+ // of update to the partition entries
+ // (see https://cwiki.apache.org/confluence/display/KAFKA/Kafka+data+structures+in+Zookeeper)
+
+ // kafka only works with ZK, and our configDb was constructed with a non-kafka
+ // root node. We have to switch it to get to the right content...
+ if ( ! ( db instanceof ZkConfigDb ) )
+ {
+ throw new ConfigDbException ( "You can only show/drop old consumers against a ZK config db." );
+ }
+
+ final ZkConfigDb newZkDb = new ZkConfigDb ( context.getConnectionString (), "" );
+ long cgCount = 0;
+
+ final LinkedList<ConfigPath> removals = new LinkedList<ConfigPath> ();
+ for ( ConfigPath consumerGroupName : newZkDb.loadChildrenNames ( newZkDb.parse ( "/consumers" ) ) )
+ {
+ cgCount++;
+ if ( cgCount % 500 == 0 )
+ {
+ out.println ( "" + cgCount + " groups examined" );
+ }
+
+ boolean foundAnything = false;
+ boolean foundRecentUse = false;
+ long mostRecent = -1;
+
+ // each consumer group has an "offsets" entry, which contains 0 or more topic entries.
+ // each topic contains partition nodes.
+ for ( ConfigPath topic : newZkDb.loadChildrenNames ( consumerGroupName.getChild ( "offsets" ) ) )
+ {
+ for ( ConfigPath offset : newZkDb.loadChildrenNames ( topic ) )
+ {
+ foundAnything = true;
+
+ final long modTime = newZkDb.getLastModificationTime ( offset );
+ mostRecent = Math.max ( mostRecent, modTime );
+
+ foundRecentUse = ( modTime > oldestEpochSecs );
+ if ( foundRecentUse ) break;
+ }
+ if ( foundRecentUse ) break;
+ }
+
+ // decide if this consumer group is old
+ out.println ( "Group " + consumerGroupName.getName () + " was most recently used " + new Date ( mostRecent*1000 ) );
+ if ( foundAnything && !foundRecentUse )
+ {
+ removals.add ( consumerGroupName );
+ }
+
+ if ( removals.size () >= kMaxRemovals )
+ {
+ break;
+ }
+ }
+
+ // removals
+ for ( ConfigPath consumerGroupName : removals )
+ {
+ out.println ( "Group " + consumerGroupName.getName () + " has no recent activity." );
+ if ( runDrops )
+ {
+ out.println ( "Removing group " + consumerGroupName.getName () + "..." );
+ newZkDb.clear ( consumerGroupName );
+ }
+ }
+ }
+ catch ( ConfigDbException e )
+ {
+ out.println ( "Command failed: " + e.getMessage() );
+ }
+ catch ( NumberFormatException e )
+ {
+ out.println ( "Command failed: " + e.getMessage() );
+ }
+ catch ( JSONException e )
+ {
+ out.println ( "Command failed: " + e.getMessage() );
+ }
+ }
+
+ @Override
+ public void displayHelp ( PrintStream out )
+ {
+ out.println ( "showOldConsumers <minAgeInDays>" );
+ out.println ( "dropOldConsumers <minAgeInDays>" );
+ out.println ( "\tDrop (or just show) any consumer group that has been inactive longer than <minAgeInDays> days." );
+ out.println ();
+ out.println ( "\tTo be safe, <minAgeInDays> should be much higher than the maximum storage time on the Kafka topics." );
+ out.println ( "\tA very old consumer will potentially miss messages, but will resume at the oldest message, while a" );
+ out.println ( "\tdeleted consumer will start at the current message if it ever comes back." );
+ out.println ();
+ out.println ( "\tNote that show/drops are limited to " + kMaxRemovals + " records per invocation." );
+ }
+ }
+}
diff --git a/src/main/java/com/att/nsa/dmaap/tools/ConfigToolContext.java b/src/main/java/com/att/nsa/dmaap/tools/ConfigToolContext.java
new file mode 100644
index 0000000..bb44d1f
--- /dev/null
+++ b/src/main/java/com/att/nsa/dmaap/tools/ConfigToolContext.java
@@ -0,0 +1,69 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package com.att.nsa.dmaap.tools;
+
+import com.att.nsa.cambria.beans.DMaaPMetricsSet;
+import com.att.nsa.cmdtool.CommandContext;
+import com.att.nsa.configs.ConfigDb;
+import com.att.nsa.drumlin.till.nv.rrNvReadable;
+
+public class ConfigToolContext implements CommandContext
+{
+ public ConfigToolContext ( ConfigDb db, String connStr, rrNvReadable cs )
+ {
+ fDb = db;
+ fConnStr = connStr;
+ fMetrics = new DMaaPMetricsSet( cs );
+ }
+
+ @Override
+ public void requestShutdown ()
+ {
+ fQuit = true;
+ }
+
+ @Override
+ public boolean shouldContinue ()
+ {
+ return !fQuit;
+ }
+
+ public ConfigDb getDb ()
+ {
+ return fDb;
+ }
+
+ public String getConnectionString ()
+ {
+ return fConnStr;
+ }
+
+ public DMaaPMetricsSet getMetrics ()
+ {
+ return fMetrics;
+ }
+
+ private final ConfigDb fDb;
+ private final String fConnStr;
+ private boolean fQuit = false;
+ private DMaaPMetricsSet fMetrics;
+}
diff --git a/src/main/java/com/att/nsa/dmaap/util/ContentLengthInterceptor.java b/src/main/java/com/att/nsa/dmaap/util/ContentLengthInterceptor.java
new file mode 100644
index 0000000..fe1c768
--- /dev/null
+++ b/src/main/java/com/att/nsa/dmaap/util/ContentLengthInterceptor.java
@@ -0,0 +1,132 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package com.att.nsa.dmaap.util;
+
+import java.util.Map;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import org.apache.http.HttpStatus;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.springframework.stereotype.Component;
+import com.att.nsa.cambria.CambriaApiException;
+import com.att.nsa.cambria.exception.DMaaPResponseCode;
+import com.att.nsa.cambria.exception.ErrorResponse;
+import ajsc.beans.interceptors.AjscInterceptor;
+
+/**
+ * AJSC Intercepter implementation of ContentLengthFilter
+ */
+@Component
+public class ContentLengthInterceptor implements AjscInterceptor{
+
+
+ private String defLength;
+ //private Logger log = Logger.getLogger(ContentLengthInterceptor.class.toString());
+ private static final EELFLogger log = EELFManager.getInstance().getLogger(ContentLengthInterceptor.class);
+
+
+ /**
+ * Intercepter method to intercept requests before processing
+ */
+ @Override
+ public boolean allowOrReject(HttpServletRequest httpservletrequest, HttpServletResponse httpservletresponse,
+ Map map) throws Exception {
+
+ log.info("inside Interceptor allowOrReject content length checking before pub/sub");
+
+ JSONObject jsonObj = null;
+ int requestLength = 0;
+ setDefLength(System.getProperty("maxcontentlength"));
+ try {
+ // retrieving content length from message header
+
+ if (null != httpservletrequest.getHeader("Content-Length")) {
+ requestLength = Integer.parseInt(httpservletrequest.getHeader("Content-Length"));
+ }
+ // retrieving encoding from message header
+ String transferEncoding = httpservletrequest.getHeader("Transfer-Encoding");
+ // checking for no encoding, chunked and requestLength greater then
+ // default length
+ if (null != transferEncoding && !(transferEncoding.contains("chunked"))
+ && (requestLength > Integer.parseInt(getDefLength()))) {
+ jsonObj = new JSONObject().append("defaultlength", getDefLength())
+ .append("requestlength", requestLength);
+ log.error("message length is greater than default");
+ throw new CambriaApiException(jsonObj);
+ }
+ else if (null == transferEncoding && (requestLength > Integer.parseInt(getDefLength())))
+ {
+ jsonObj = new JSONObject().append("defaultlength", getDefLength()).append(
+ "requestlength", requestLength);
+ log.error("Request message is not chunked or request length is greater than default length");
+ throw new CambriaApiException(jsonObj);
+
+
+ }
+ else
+ {
+ //chain.doFilter(req, res);
+ return true;
+ }
+
+ } catch (CambriaApiException | NumberFormatException | JSONException e) {
+
+ log.info("Exception obj--"+e);
+ log.error("message size is greater then default"+e.getMessage());
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_REQUEST_TOO_LONG,
+ DMaaPResponseCode.MSG_SIZE_EXCEEDS_MSG_LIMIT.getResponseCode(), System.getProperty("msg_size_exceeds")
+ + jsonObj.toString());
+ log.info(errRes.toString());
+
+
+ map.put(HttpServletResponse.SC_INTERNAL_SERVER_ERROR,"test");
+ httpservletresponse.setStatus(HttpStatus.SC_REQUEST_TOO_LONG);
+ httpservletresponse.getOutputStream().write(errRes.toString().getBytes());
+ return false;
+ }
+
+
+
+ }
+
+
+ /**
+ * Get Default Content Length
+ * @return defLength
+ */
+ public String getDefLength() {
+ return defLength;
+ }
+ /**
+ * Set Default Content Length
+ * @param defLength
+ */
+ public void setDefLength(String defLength) {
+ this.defLength = defLength;
+ }
+
+
+
+}
diff --git a/src/main/java/com/att/nsa/dmaap/util/DMaaPAuthFilter.java b/src/main/java/com/att/nsa/dmaap/util/DMaaPAuthFilter.java
new file mode 100644
index 0000000..ae79938
--- /dev/null
+++ b/src/main/java/com/att/nsa/dmaap/util/DMaaPAuthFilter.java
@@ -0,0 +1,164 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package com.att.nsa.dmaap.util;
+
+import java.io.IOException;
+
+import javax.servlet.FilterChain;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
+
+//import com.att.eelf.configuration.EELFLogger;
+//import com.att.eelf.configuration.EELFManager;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.stereotype.Component;
+
+import com.att.cadi.filter.CadiFilter;
+import javax.servlet.FilterConfig;
+
+/**
+ * This is a Servlet Filter class overriding the AjscCadiFilter
+ */
+@Component
+public class DMaaPAuthFilter extends CadiFilter {
+
+ // private Logger log = Logger.getLogger(DMaaPAuthFilter.class.toString());
+
+ // private static final EELFLogger log =
+ // EELFManager.getInstance().getLogger(DMaaPAuthFilter.class);
+ private Logger log = LoggerFactory.getLogger(DMaaPAuthFilter.class);
+
+ final Boolean enabled = "authentication-scheme-1".equalsIgnoreCase(System.getProperty("CadiAuthN"));
+
+ /**
+ * This method will disable Cadi Authentication if cambria headers are
+ * present in the request else continue with Cadi Authentication
+ */
+ public void init(FilterConfig filterConfig) throws ServletException {
+
+ try {
+
+ super.init(filterConfig);
+
+ } catch (Exception ex) {
+ log.error("Ajsc Cadi Filter Exception:" + ex.getMessage());
+
+ }
+ }
+
+ @Override
+ public void doFilter(ServletRequest req, ServletResponse res, FilterChain chain)
+ throws IOException, ServletException {
+
+ log.info("inside servlet filter Cambria Auth Headers checking before doing other Authentication");
+ HttpServletRequest request = (HttpServletRequest) req;
+
+ boolean forceAAF = Boolean.valueOf(System.getProperty("forceAAF"));
+ if (forceAAF ||
+ null != request.getHeader("Authorization") ||
+ (null != request.getHeader("AppName") &&
+ request.getHeader("AppName").equalsIgnoreCase("invenio") &&
+ null != request.getHeader("cookie"))) {
+
+ if (!enabled ||
+ request.getMethod().equalsIgnoreCase("head") ||
+ request.getHeader("DME2HealthCheck") != null) {
+
+ chain.doFilter(req, res);
+
+ } else {
+
+ super.doFilter(req, res, chain);
+
+ }
+ } else {
+
+ System.setProperty("CadiAuthN", "authentication-scheme-2");
+ chain.doFilter(req, res);
+
+ }
+
+ }
+
+ @Override
+ public void log(Exception e, Object... elements) {
+ // TODO Auto-generated method stub
+ // super.log(e, elements);
+ // System.out.println(convertArrayToString(elements));
+ log.error(convertArrayToString(elements), e);
+
+ }
+
+ @Override
+ public void log(Level level, Object... elements) {
+
+ // System.out.println(willWrite().compareTo(level) );
+ if (willWrite().compareTo(level) <= 0) {
+ switch (level) {
+ case DEBUG:
+ log.debug(convertArrayToString(elements));
+ break;
+ case INFO:
+ log.info(convertArrayToString(elements));
+ break;
+ case ERROR:
+ log.error(convertArrayToString(elements));
+ break;
+ case AUDIT:
+ log.info(convertArrayToString(elements));
+ break;
+ case INIT:
+ log.info(convertArrayToString(elements));
+ break;
+ case WARN:
+ log.warn(convertArrayToString(elements));
+ break;
+ default:
+
+ log.warn(convertArrayToString(elements));
+
+ }
+
+ }
+
+ }
+
+ private String convertArrayToString(Object[] elements) {
+
+ StringBuilder strBuilder = new StringBuilder();
+ for (int i = 0; i < elements.length; i++) {
+ if (elements[i] instanceof String)
+ strBuilder.append((String) elements[i]);
+ else if (elements[i] instanceof Integer)
+ strBuilder.append((Integer) elements[i]);
+ else
+ strBuilder.append(elements[i]);
+ }
+ String newString = strBuilder.toString();
+ return newString;
+ }
+
+}
diff --git a/src/main/java/com/att/nsa/dmaap/util/ServicePropertiesMapBean.java b/src/main/java/com/att/nsa/dmaap/util/ServicePropertiesMapBean.java
new file mode 100644
index 0000000..c5173c1
--- /dev/null
+++ b/src/main/java/com/att/nsa/dmaap/util/ServicePropertiesMapBean.java
@@ -0,0 +1,41 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package com.att.nsa.dmaap.util;
+
+import com.att.nsa.dmaap.filemonitor.ServicePropertiesMap;
+
+/**
+ * Class ServicePropertiesMapBean
+ * @author author
+ *
+ */
+public class ServicePropertiesMapBean {
+ /**
+ * get property
+ * @param propFileName propFileName
+ * @param propertyKey propertyKey
+ * @return str
+ */
+ public static String getProperty(String propFileName, String propertyKey) {
+ return ServicePropertiesMap.getProperty(propFileName, propertyKey);
+ }
+}
diff --git a/src/main/resources/docker-compose/Dockerfile b/src/main/resources/docker-compose/Dockerfile
new file mode 100644
index 0000000..0649fcc
--- /dev/null
+++ b/src/main/resources/docker-compose/Dockerfile
@@ -0,0 +1,22 @@
+FROM anapsix/alpine-java
+
+MAINTAINER Wurstmeister
+
+RUN apk add --update unzip wget curl docker jq coreutils
+
+ENV KAFKA_VERSION="0.8.1.1" SCALA_VERSION="2.9.2"
+ADD download-kafka.sh /tmp/download-kafka.sh
+RUN chmod a+x /tmp/download-kafka.sh && /tmp/download-kafka.sh && tar xfz /tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz -C /opt && rm /tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz
+
+VOLUME ["/kafka"]
+
+ENV KAFKA_HOME /opt/kafka_${SCALA_VERSION}-${KAFKA_VERSION}
+ADD start-kafka.sh /usr/bin/start-kafka.sh
+ADD broker-list.sh /usr/bin/broker-list.sh
+ADD create-topics.sh /usr/bin/create-topics.sh
+# The scripts need to have executable permission
+RUN chmod a+x /usr/bin/start-kafka.sh && \
+ chmod a+x /usr/bin/broker-list.sh && \
+ chmod a+x /usr/bin/create-topics.sh
+# Use "exec" form so that it runs as PID 1 (useful for graceful shutdown)
+CMD ["start-kafka.sh"]
diff --git a/src/main/resources/docker-compose/LICENSE b/src/main/resources/docker-compose/LICENSE
new file mode 100644
index 0000000..e06d208
--- /dev/null
+++ b/src/main/resources/docker-compose/LICENSE
@@ -0,0 +1,202 @@
+Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright {yyyy} {name of copyright owner}
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
diff --git a/src/main/resources/docker-compose/README.md b/src/main/resources/docker-compose/README.md
new file mode 100644
index 0000000..b8aaf8b
--- /dev/null
+++ b/src/main/resources/docker-compose/README.md
@@ -0,0 +1,78 @@
+[![Docker Pulls](https://img.shields.io/docker/pulls/wurstmeister/kafka.svg)](https://hub.docker.com/r/wurstmeister/kafka/)
+[![Docker Stars](https://img.shields.io/docker/stars/wurstmeister/kafka.svg)](https://hub.docker.com/r/wurstmeister/kafka/)
+[![](https://badge.imagelayers.io/wurstmeister/kafka:latest.svg)](https://imagelayers.io/?images=wurstmeister/kafka:latest)
+
+kafka-docker
+============
+
+Dockerfile for [Apache Kafka](http://kafka.apache.org/)
+
+The image is available directly from https://registry.hub.docker.com/
+
+##Pre-Requisites
+
+- install docker-compose [https://docs.docker.com/compose/install/](https://docs.docker.com/compose/install/)
+- modify the ```KAFKA_ADVERTISED_HOST_NAME``` in ```docker-compose.yml``` to match your docker host IP (Note: Do not use localhost or 127.0.0.1 as the host ip if you want to run multiple brokers.)
+- if you want to customise any Kafka parameters, simply add them as environment variables in ```docker-compose.yml```, e.g. in order to increase the ```message.max.bytes``` parameter set the environment to ```KAFKA_MESSAGE_MAX_BYTES: 2000000```. To turn off automatic topic creation set ```KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'false'```
+
+##Usage
+
+Start a cluster:
+
+- ```docker-compose up -d ```
+
+Add more brokers:
+
+- ```docker-compose scale kafka=3```
+
+Destroy a cluster:
+
+- ```docker-compose stop```
+
+##Note
+
+The default ```docker-compose.yml``` should be seen as a starting point. By default each broker will get a new port number and broker id on restart. Depending on your use case this might not be desirable. If you need to use specific ports and broker ids, modify the docker-compose configuration accordingly, e.g. [docker-compose-single-broker.yml](https://github.com/wurstmeister/kafka-docker/blob/master/docker-compose-single-broker.yml):
+
+- ```docker-compose -f docker-compose-single-broker.yml up```
+
+##Broker IDs
+
+If you don't specify a broker id in your docker-compose file, it will automatically be generated (see [https://issues.apache.org/jira/browse/KAFKA-1070](https://issues.apache.org/jira/browse/KAFKA-1070). This allows scaling up and down. In this case it is recommended to use the ```--no-recreate``` option of docker-compose to ensure that containers are not re-created and thus keep their names and ids.
+
+
+##Automatically create topics
+
+If you want to have kafka-docker automatically create topics in Kafka during
+creation, a ```KAFKA_CREATE_TOPICS``` environment variable can be
+added in ```docker-compose.yml```.
+
+Here is an example snippet from ```docker-compose.yml```:
+
+ environment:
+ KAFKA_CREATE_TOPICS: "Topic1:1:3,Topic2:1:1"
+
+```Topic 1``` will have 1 partition and 3 replicas, ```Topic 2``` will have 1 partition and 1 replica.
+
+##Advertised hostname
+
+You can configure the advertised hostname in different ways
+
+1. explicitly, using ```KAFKA_ADVERTISED_HOST_NAME```
+2. via a command, using ```HOSTNAME_COMMAND```, e.g. ```HOSTNAME_COMMAND: "route -n | awk '/UG[ \t]/{print $$2}'"```
+
+When using commands, make sure you review the "Variable Substitution" section in [https://docs.docker.com/compose/compose-file/](https://docs.docker.com/compose/compose-file/)
+
+If ```KAFKA_ADVERTISED_HOST_NAME``` is specified, it takes presendence over ```HOSTNAME_COMMAND```
+
+For AWS deployment, you can use the Metadata service to get the container host's IP:
+```
+HOSTNAME_COMMAND=wget -t3 -T2 -qO- http://169.254.169.254/latest/meta-data/local-ipv4
+```
+Reference: http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-instance-metadata.html
+
+##Tutorial
+
+[http://wurstmeister.github.io/kafka-docker/](http://wurstmeister.github.io/kafka-docker/)
+
+
+
diff --git a/src/main/resources/docker-compose/broker-list.sh b/src/main/resources/docker-compose/broker-list.sh
new file mode 100644
index 0000000..238c251
--- /dev/null
+++ b/src/main/resources/docker-compose/broker-list.sh
@@ -0,0 +1,26 @@
+#!/bin/bash
+#*******************************************************************************
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+#*******************************************************************************
+
+CONTAINERS=$(docker ps | grep 9092 | awk '{print $1}')
+BROKERS=$(for CONTAINER in $CONTAINERS; do docker port $CONTAINER 9092 | sed -e "s/0.0.0.0:/$HOST_IP:/g"; done)
+echo $BROKERS | sed -e 's/ /,/g'
diff --git a/src/main/resources/docker-compose/create-topics.sh b/src/main/resources/docker-compose/create-topics.sh
new file mode 100644
index 0000000..4e46cd2
--- /dev/null
+++ b/src/main/resources/docker-compose/create-topics.sh
@@ -0,0 +1,53 @@
+#!/bin/bash
+#*******************************************************************************
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+#*******************************************************************************
+
+
+if [[ -z "$START_TIMEOUT" ]]; then
+ START_TIMEOUT=600
+fi
+
+start_timeout_exceeded=false
+count=0
+step=10
+while netstat -lnt | awk '$4 ~ /:'$KAFKA_PORT'$/ {exit 1}'; do
+ echo "waiting for kafka to be ready"
+ sleep $step;
+ count=$(expr $count + $step)
+ if [ $count -gt $START_TIMEOUT ]; then
+ start_timeout_exceeded=true
+ break
+ fi
+done
+
+if $start_timeout_exceeded; then
+ echo "Not able to auto-create topic (waited for $START_TIMEOUT sec)"
+ exit 1
+fi
+
+if [[ -n $KAFKA_CREATE_TOPICS ]]; then
+ IFS=','; for topicToCreate in $KAFKA_CREATE_TOPICS; do
+ echo "creating topics: $topicToCreate"
+ IFS=':' read -a topicConfig <<< "$topicToCreate"
+ JMX_PORT='' $KAFKA_HOME/bin/kafka-topics.sh --create --zookeeper $KAFKA_ZOOKEEPER_CONNECT --replication-factor ${topicConfig[2]} --partition ${topicConfig[1]} --topic "${topicConfig[0]}"
+ done
+fi
diff --git a/src/main/resources/docker-compose/docker-compose-single-broker.yml b/src/main/resources/docker-compose/docker-compose-single-broker.yml
new file mode 100644
index 0000000..4d8e9f5
--- /dev/null
+++ b/src/main/resources/docker-compose/docker-compose-single-broker.yml
@@ -0,0 +1,16 @@
+version: '2'
+services:
+ zookeeper:
+ image: wurstmeister/zookeeper
+ ports:
+ - "2181:2181"
+ kafka:
+ build: .
+ ports:
+ - "9092:9092"
+ environment:
+ KAFKA_ADVERTISED_HOST_NAME: 192.168.99.100
+ KAFKA_CREATE_TOPICS: "test:1:1"
+ KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+ volumes:
+ - /var/run/docker.sock:/var/run/docker.sock
diff --git a/src/main/resources/docker-compose/docker-compose.yml b/src/main/resources/docker-compose/docker-compose.yml
new file mode 100644
index 0000000..20cf977
--- /dev/null
+++ b/src/main/resources/docker-compose/docker-compose.yml
@@ -0,0 +1,27 @@
+version: '2'
+services:
+ zookeeper:
+ image: wurstmeister/zookeeper
+ ports:
+ - "2181:2181"
+ kafka:
+ build: .
+ ports:
+ - "9092:9092"
+ environment:
+ KAFKA_ADVERTISED_HOST_NAME: 172.18.0.1
+ KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+
+ volumes:
+ - /var/run/docker.sock:/var/run/docker.sock
+
+ dmaap:
+ image: attos/dmaap
+ ports:
+ - "3904:3904"
+ - "3905:3905"
+ volumes:
+ - /var/tmp/MsgRtrApi.properties:/appl/dmaapMR1/bundleconfig/etc/appprops/MsgRtrApi.properties
+ depends_on:
+ - zookeeper
+ - kafka
diff --git a/src/main/resources/docker-compose/download-kafka.sh b/src/main/resources/docker-compose/download-kafka.sh
new file mode 100644
index 0000000..3dda45e
--- /dev/null
+++ b/src/main/resources/docker-compose/download-kafka.sh
@@ -0,0 +1,26 @@
+#!/bin/sh
+#*******************************************************************************
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+#*******************************************************************************
+
+mirror=$(curl --stderr /dev/null https://www.apache.org/dyn/closer.cgi\?as_json\=1 | jq -r '.preferred')
+url="${mirror}kafka/${KAFKA_VERSION}/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz"
+wget -q "${url}" -O "/tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz"
diff --git a/src/main/resources/docker-compose/start-kafka-shell.sh b/src/main/resources/docker-compose/start-kafka-shell.sh
new file mode 100644
index 0000000..025259e
--- /dev/null
+++ b/src/main/resources/docker-compose/start-kafka-shell.sh
@@ -0,0 +1,23 @@
+#!/bin/bash
+#*******************************************************************************
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+#*******************************************************************************
+docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -e HOST_IP=$1 -e ZK=$2 -i -t wurstmeister/kafka /bin/bash
diff --git a/src/main/resources/docker-compose/start-kafka.sh b/src/main/resources/docker-compose/start-kafka.sh
new file mode 100644
index 0000000..87047ad
--- /dev/null
+++ b/src/main/resources/docker-compose/start-kafka.sh
@@ -0,0 +1,88 @@
+#!/bin/bash
+#*******************************************************************************
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+#*******************************************************************************
+
+if [[ -z "$KAFKA_PORT" ]]; then
+ export KAFKA_PORT=9092
+fi
+if [[ -z "$KAFKA_ADVERTISED_PORT" ]]; then
+ export KAFKA_ADVERTISED_PORT=$(docker port `hostname` $KAFKA_PORT | sed -r "s/.*:(.*)/\1/g")
+fi
+if [[ -z "$KAFKA_BROKER_ID" ]]; then
+ # By default auto allocate broker ID
+ export KAFKA_BROKER_ID=1
+fi
+if [[ -z "$KAFKA_LOG_DIRS" ]]; then
+ export KAFKA_LOG_DIRS="/kafka/kafka-logs-$HOSTNAME"
+fi
+if [[ -z "$KAFKA_ZOOKEEPER_CONNECT" ]]; then
+ export KAFKA_ZOOKEEPER_CONNECT=$(env | grep ZK.*PORT_2181_TCP= | sed -e 's|.*tcp://||' | paste -sd ,)
+fi
+
+if [[ -n "$KAFKA_HEAP_OPTS" ]]; then
+ sed -r -i "s/(export KAFKA_HEAP_OPTS)=\"(.*)\"/\1=\"$KAFKA_HEAP_OPTS\"/g" $KAFKA_HOME/bin/kafka-server-start.sh
+ unset KAFKA_HEAP_OPTS
+fi
+
+if [[ -z "$KAFKA_ADVERTISED_HOST_NAME" && -n "$HOSTNAME_COMMAND" ]]; then
+ export KAFKA_ADVERTISED_HOST_NAME=$(eval $HOSTNAME_COMMAND)
+fi
+
+for VAR in `env`
+do
+ if [[ $VAR =~ ^KAFKA_ && ! $VAR =~ ^KAFKA_HOME ]]; then
+ kafka_name=`echo "$VAR" | sed -r "s/KAFKA_(.*)=.*/\1/g" | tr '[:upper:]' '[:lower:]' | tr _ .`
+ env_var=`echo "$VAR" | sed -r "s/(.*)=.*/\1/g"`
+ if egrep -q "(^|^#)$kafka_name=" $KAFKA_HOME/config/server.properties; then
+ sed -r -i "s@(^|^#)($kafka_name)=(.*)@\2=${!env_var}@g" $KAFKA_HOME/config/server.properties #note that no config values may contain an '@' char
+ else
+ echo "$kafka_name=${!env_var}" >> $KAFKA_HOME/config/server.properties
+ fi
+ fi
+done
+
+if [[ -n "$CUSTOM_INIT_SCRIPT" ]] ; then
+ eval $CUSTOM_INIT_SCRIPT
+fi
+
+
+KAFKA_PID=0
+
+# see https://medium.com/@gchudnov/trapping-signals-in-docker-containers-7a57fdda7d86#.bh35ir4u5
+term_handler() {
+ echo 'Stopping Kafka....'
+ if [ $KAFKA_PID -ne 0 ]; then
+ kill -s TERM "$KAFKA_PID"
+ wait "$KAFKA_PID"
+ fi
+ echo 'Kafka stopped.'
+ exit
+}
+
+
+# Capture kill requests to stop properly
+trap "term_handler" SIGHUP SIGINT SIGTERM
+create-topics.sh &
+$KAFKA_HOME/bin/kafka-server-start.sh $KAFKA_HOME/config/server.properties &
+KAFKA_PID=$!
+
+wait "$KAFKA_PID"
diff --git a/src/main/resources/docker/Dockerfile b/src/main/resources/docker/Dockerfile
new file mode 100644
index 0000000..e6356f9
--- /dev/null
+++ b/src/main/resources/docker/Dockerfile
@@ -0,0 +1,6 @@
+FROM openjdk:8-jdk
+ADD appl /appl/
+ADD startup.sh /startup.sh
+RUN chmod 700 /startup.sh
+ENTRYPOINT ./startup.sh
+EXPOSE 3904 3905
diff --git a/src/main/resources/docker/startup.sh b/src/main/resources/docker/startup.sh
new file mode 100644
index 0000000..933ceca
--- /dev/null
+++ b/src/main/resources/docker/startup.sh
@@ -0,0 +1,29 @@
+#*******************************************************************************
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+#*******************************************************************************
+root_directory="/appl/${project.artifactId}"
+config_directory="/appl/${project.artifactId}/bundleconfig"
+runner_file="appl/${project.artifactId}/lib/ajsc-runner-${ajscRuntimeVersion}.jar"
+echo "AJSC HOME directory is " $root_directory
+echo "AJSC Conf Directory is" $config_directory
+echo "Starting using" $runner_file
+
+java -jar -XX:MaxPermSize=256m -XX:PermSize=32m -DSOACLOUD_SERVICE_VERSION=0.0.1 -DAJSC_HOME=$root_directory -DAJSC_CONF_HOME=$config_directory -DAJSC_SHARED_CONFIG=$config_directory -DAJSC_HTTPS_PORT=3905 -Dplatform=NON-PROD -DPid=1306 -Dlogback.configurationFile=/appl/dmaapMR1/bundleconfig/etc/logback.xml -Xmx512m -Xms512m $runner_file context=/ port=3904 sslport=3905
diff --git a/src/main/runtime/context/__module.ajsc.namespace.name__#__module.ajsc.namespace.version__.context b/src/main/runtime/context/__module.ajsc.namespace.name__#__module.ajsc.namespace.version__.context
new file mode 100644
index 0000000..8514196
--- /dev/null
+++ b/src/main/runtime/context/__module.ajsc.namespace.name__#__module.ajsc.namespace.version__.context
@@ -0,0 +1 @@
+{"context":{"contextClass":"ajsc.Context","contextId":"__module_ajsc_namespace_name__:__module_ajsc_namespace_version__","contextName":"__module_ajsc_namespace_name__","contextVersion":"__module_ajsc_namespace_version__","description":"__module_ajsc_namespace_name__ Context"}} \ No newline at end of file
diff --git a/src/main/runtime/context/default#0.context b/src/main/runtime/context/default#0.context
new file mode 100644
index 0000000..d1b5ab4
--- /dev/null
+++ b/src/main/runtime/context/default#0.context
@@ -0,0 +1 @@
+{"context":{"contextClass":"ajsc.Context","contextId":"default:0","contextName":"default","contextVersion":"0","description":"Default Context"}} \ No newline at end of file
diff --git a/src/main/runtime/deploymentPackage/__module.ajsc.namespace.name__#__module.ajsc.namespace.version__.json b/src/main/runtime/deploymentPackage/__module.ajsc.namespace.name__#__module.ajsc.namespace.version__.json
new file mode 100644
index 0000000..d0954cf
--- /dev/null
+++ b/src/main/runtime/deploymentPackage/__module.ajsc.namespace.name__#__module.ajsc.namespace.version__.json
@@ -0,0 +1 @@
+{"deploymentPackage":{"Class":"ajsc.DeploymentPackage","Id":"__module.ajsc.namespace.name__:__module_ajsc_namespace_version__","namespace":"__module_ajsc_namespace_name__","namespaceVersion":"__module_ajsc_namespace_version__","description":"__module_ajsc_namespace_name__ __module_ajsc_namespace_version__ - default description","userId":"ajsc"}} \ No newline at end of file
diff --git a/src/main/runtime/shiroRole/ajscadmin.json b/src/main/runtime/shiroRole/ajscadmin.json
new file mode 100644
index 0000000..f5e981e
--- /dev/null
+++ b/src/main/runtime/shiroRole/ajscadmin.json
@@ -0,0 +1 @@
+{"shiroRoleClass":"ajsc.auth.ShiroRole","shiroRoleId":"ajscadmin","name":"ajscadmin","permissions":"[ajscadmin:*, ajsc:*]"} \ No newline at end of file
diff --git a/src/main/runtime/shiroRole/contextadmin#__module.ajsc.namespace.name__.json b/src/main/runtime/shiroRole/contextadmin#__module.ajsc.namespace.name__.json
new file mode 100644
index 0000000..2dae9f5
--- /dev/null
+++ b/src/main/runtime/shiroRole/contextadmin#__module.ajsc.namespace.name__.json
@@ -0,0 +1 @@
+{"shiroRoleClass":"ajsc.auth.ShiroRole","shiroRoleId":"contextadmin:__module_ajsc_namespace_name__","name":"contextadmin:__module_ajsc_namespace_name__","permissions":"[]"} \ No newline at end of file
diff --git a/src/main/runtime/shiroRole/contextadmin#default.json b/src/main/runtime/shiroRole/contextadmin#default.json
new file mode 100644
index 0000000..5de814e
--- /dev/null
+++ b/src/main/runtime/shiroRole/contextadmin#default.json
@@ -0,0 +1 @@
+{"shiroRoleClass":"ajsc.auth.ShiroRole","shiroRoleId":"contextadmin:default","name":"contextadmin:default","permissions":"[]"} \ No newline at end of file
diff --git a/src/main/runtime/shiroUser/ajsc.json b/src/main/runtime/shiroUser/ajsc.json
new file mode 100644
index 0000000..f4c7855
--- /dev/null
+++ b/src/main/runtime/shiroUser/ajsc.json
@@ -0,0 +1 @@
+{"shiroUserClass":"ajsc.auth.ShiroUser","shiroUserId":"ajsc","passwordHash":"9471697417008c880720ba54c6038791ad7e98f3b88136fe34f4d31a462dd27a","permissions":"[*:*]","username":"ajsc"} \ No newline at end of file
diff --git a/src/main/runtime/shiroUserRole/ajsc#ajscadmin.json b/src/main/runtime/shiroUserRole/ajsc#ajscadmin.json
new file mode 100644
index 0000000..cb8d483
--- /dev/null
+++ b/src/main/runtime/shiroUserRole/ajsc#ajscadmin.json
@@ -0,0 +1 @@
+{"shiroUserRoleClass":"ajsc.auth.ShiroUserRole","shiroUserRoleId":"ajsc:ajscadmin","roleId":"ajscadmin","userId":"ajsc"} \ No newline at end of file
diff --git a/src/main/runtime/shiroUserRole/ajsc#contextadmin#__module.ajsc.namespace.name__.json b/src/main/runtime/shiroUserRole/ajsc#contextadmin#__module.ajsc.namespace.name__.json
new file mode 100644
index 0000000..95d2361
--- /dev/null
+++ b/src/main/runtime/shiroUserRole/ajsc#contextadmin#__module.ajsc.namespace.name__.json
@@ -0,0 +1 @@
+{"shiroUserRoleClass":"ajsc.auth.ShiroUserRole","shiroUserRoleId":"ajsc:contextadmin:__module_ajsc_namespace_name__","roleId":"contextadmin:__module_ajsc_namespace_name__","userId":"ajsc"} \ No newline at end of file
diff --git a/src/main/runtime/shiroUserRole/ajsc#contextadmin#default.json b/src/main/runtime/shiroUserRole/ajsc#contextadmin#default.json
new file mode 100644
index 0000000..2bd5063
--- /dev/null
+++ b/src/main/runtime/shiroUserRole/ajsc#contextadmin#default.json
@@ -0,0 +1 @@
+{"shiroUserRoleClass":"ajsc.auth.ShiroUserRole","shiroUserRoleId":"ajsc:contextadmin:default","roleId":"contextadmin:default","userId":"ajsc"} \ No newline at end of file
diff --git a/src/main/scripts/cambria.sh b/src/main/scripts/cambria.sh
new file mode 100644
index 0000000..f74aa7b
--- /dev/null
+++ b/src/main/scripts/cambria.sh
@@ -0,0 +1,49 @@
+#!/bin/sh
+#*******************************************************************************
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+#*******************************************************************************
+
+# switched this from CAMBRIA_API_HOME, which should be declared in the env.
+# harmless to overwrite it here, but it's confusing to do so.
+BASE_DIR=`dirname "$0"`/..
+
+# use JAVA_HOME if provided
+if [ -n "${CAMBRIA_JAVA_HOME}" ]; then
+ JAVA=${CAMBRIA_JAVA_HOME}/bin/java
+elif [ -n "${JAVA_HOME}" ]; then
+ JAVA=${JAVA_HOME}/bin/java
+else
+ JAVA=java
+fi
+
+# use the logs dir set in environment, or the installation's logs dir if not set
+if [ -z "$CAMBRIA_LOGS_HOME" ]; then
+ CAMBRIA_LOGS_HOME=$BASE_DIR/logs
+fi
+
+mkdir -p ${CAMBRIA_LOGS_HOME}
+# run java. The classpath is the etc dir for config files, and the lib dir
+# for all the jars.
+#
+# don't pipe stdout/stderr to /dev/null here - some diagnostic info is available only there.
+# also don't assume the run is in the background. the caller should take care of that.
+#
+$JAVA -cp ${BASE_DIR}/etc:${BASE_DIR}/lib/* com.att.nsa.cambria.CambriaApiServer $* >${CAMBRIA_LOGS_HOME}/console.log 2>&1
diff --git a/src/main/scripts/cambriaJsonPublisher.sh b/src/main/scripts/cambriaJsonPublisher.sh
new file mode 100644
index 0000000..9dbc4c7
--- /dev/null
+++ b/src/main/scripts/cambriaJsonPublisher.sh
@@ -0,0 +1,41 @@
+#!/bin/bash
+#*******************************************************************************
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+#*******************************************************************************
+
+#
+# act as a simple cambria publisher, requires wget
+#
+# usage:
+# cambriaPublisher <broker> <topic>
+#
+
+KEY=$3
+if [ "$3" == "" ]
+then
+ KEY=`hostname -f`
+fi
+
+while read LINE
+do
+ wget -q --header "Content-Type: application/json" --post-data="{ \"cambria.partition\":\"$KEY\", \"msg\":\"$LINE\" }" -O - $1/events/$2 >/dev/null
+done
+
diff --git a/src/main/scripts/cambriaMonitor.sh b/src/main/scripts/cambriaMonitor.sh
new file mode 100644
index 0000000..0a8727c
--- /dev/null
+++ b/src/main/scripts/cambriaMonitor.sh
@@ -0,0 +1,40 @@
+#!/bin/bash
+#*******************************************************************************
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+#*******************************************************************************
+
+#
+# act as a simple cambria consumer, requires wget
+#
+# usage:
+# cambriaMonitor <broker> <topic> <group> <id> <timeout>
+#
+
+while :
+do
+ wget -q -O - $1/events/$2/$3/$4?timeout=$5\&pretty=1
+ if [ $? -ne 0 ]
+ then
+ sleep 10
+ fi
+ echo
+done
+
diff --git a/src/main/scripts/cambriaMonitorWithAuth.sh b/src/main/scripts/cambriaMonitorWithAuth.sh
new file mode 100644
index 0000000..4ee2908
--- /dev/null
+++ b/src/main/scripts/cambriaMonitorWithAuth.sh
@@ -0,0 +1,43 @@
+#!/bin/bash
+#*******************************************************************************
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+#*******************************************************************************
+
+#
+# act as a simple cambria consumer, requires wget
+#
+# usage:
+# cambriaMonitor <broker> <topic> <group> <id> <timeout>
+#
+
+while :
+do
+ DATE=`date`
+ SIGNATURE=`echo -n "$DATE" | openssl sha1 -hmac $CAMBRIA_APISECRET -binary | openssl base64`
+
+ wget -q --header "X-CambriaAuth: $CAMBRIA_APIKEY:$SIGNATURE" --header "X-CambriaDate: $DATE" -O - $1/events/$2/$3/$4?timeout=$5\&pretty=1
+ if [ $? -ne 0 ]
+ then
+ sleep 10
+ fi
+ echo
+done
+
diff --git a/src/main/scripts/cambriaSimpleTextPubWithAuth.sh b/src/main/scripts/cambriaSimpleTextPubWithAuth.sh
new file mode 100644
index 0000000..1623726
--- /dev/null
+++ b/src/main/scripts/cambriaSimpleTextPubWithAuth.sh
@@ -0,0 +1,38 @@
+#!/bin/bash
+#*******************************************************************************
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+#*******************************************************************************
+
+#
+# act as a simple cambria publisher, requires wget
+#
+# usage:
+# cambriaPublisher <broker> <topic>
+#
+
+DATE=`date`
+SIGNATURE=`echo -n "$DATE" | openssl sha1 -hmac $CAMBRIA_APISECRET -binary | openssl base64`
+
+while read LINE
+do
+ wget -q --header "Content-Type: text/plain" --header "X-CambriaAuth: $CAMBRIA_APIKEY:$SIGNATURE" --header "X-CambriaDate: $DATE" --post-data="$LINE" -O - $1/events/$2 >/dev/null
+done
+
diff --git a/src/main/scripts/cambriaSimpleTextPublisher.sh b/src/main/scripts/cambriaSimpleTextPublisher.sh
new file mode 100644
index 0000000..4aacd8a
--- /dev/null
+++ b/src/main/scripts/cambriaSimpleTextPublisher.sh
@@ -0,0 +1,35 @@
+#!/bin/bash
+#*******************************************************************************
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+#*******************************************************************************
+
+#
+# act as a simple cambria publisher, requires wget
+#
+# usage:
+# cambriaPublisher <broker> <topic>
+#
+
+while read LINE
+do
+ wget -q --header "Content-Type: text/plain" --post-data="$LINE" -O - $1/events/$2 >/dev/null
+done
+
diff --git a/src/main/scripts/cambriaTool.sh b/src/main/scripts/cambriaTool.sh
new file mode 100644
index 0000000..a9d6e15
--- /dev/null
+++ b/src/main/scripts/cambriaTool.sh
@@ -0,0 +1,55 @@
+#!/bin/sh
+#*******************************************************************************
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+#*******************************************************************************
+
+# switched this from CAMBRIA_API_HOME, which should be declared in the env.
+# harmless to overwrite it here, but it's confusing to do so.
+BASE_DIR=`dirname "$0"`/..
+
+# determin a path separator that works for this platform
+PATHSEP=":"
+case "$(uname -s)" in
+
+ Darwin)
+ ;;
+
+ Linux)
+ ;;
+
+ CYGWIN*|MINGW32*|MSYS*)
+ PATHSEP=";"
+ ;;
+
+ *)
+ ;;
+esac
+
+# use JAVA_HOME if provided
+if [ -n "${CAMBRIA_JAVA_HOME}" ]; then
+ JAVA=${CAMBRIA_JAVA_HOME}/bin/java
+elif [ -n "${JAVA_HOME}" ]; then
+ JAVA=${JAVA_HOME}/bin/java
+else
+ JAVA=java
+fi
+
+$JAVA -cp ${BASE_DIR}/etc${PATHSEP}${BASE_DIR}/lib/* com.att.nsa.cambria.tools.ConfigTool $*
diff --git a/src/main/scripts/swmpkgclean.sh b/src/main/scripts/swmpkgclean.sh
new file mode 100644
index 0000000..7e6bc51
--- /dev/null
+++ b/src/main/scripts/swmpkgclean.sh
@@ -0,0 +1,42 @@
+#!/bin/bash
+#*******************************************************************************
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+#*******************************************************************************
+
+# SWM can only store a finite amount of packages in its repository, so this script deletes the oldest package.
+# This script is run by Jenkins after the build is finished (post SWM upload).
+
+SWM_COMPONENT="com.att.nsa:msgrtr"
+
+SWM_PKGS=`/opt/app/swm/aftswmcli/bin/swmcli "component pkglist -c $SWM_COMPONENT -df -dh -dj -sui"`
+SWM_PKGS_COUNT=`echo "$SWM_PKGS" | wc -l`
+SWM_PKGS_OLDEST=`echo "$SWM_PKGS" | head -1`
+SWM_PKGS_MAX_COUNT=2
+
+if [ $SWM_PKGS_COUNT > $SWM_PKGS_MAX_COUNT ]
+then
+ SWM_PKG_OLDEST_VERSION=`echo $SWM_PKGS_OLDEST | awk '{print $2}'`
+
+ # Delete the oldest package for this component from the SWM repository
+ /opt/app/swm/aftswmcli/bin/swmcli "component pkgdelete -c $SWM_COMPONENT:$SWM_PKG_OLDEST_VERSION"
+else
+ echo "No need to clean up SWM, package count ($SWM_PKGS_COUNT) is below threshold ($SWM_PKGS_MAX_COUNT)"
+fi
diff --git a/src/main/swm/common/common.env b/src/main/swm/common/common.env
new file mode 100644
index 0000000..b6ae68a
--- /dev/null
+++ b/src/main/swm/common/common.env
@@ -0,0 +1,19 @@
+
+# This file is used to source variables from the generated archetype
+# Because the *proc.sh scripts may contain variables that conflict with
+# Maven variables, we exclude those scripts and only replace values here
+ROOT_DIR=${INSTALL_ROOT}${distFilesRoot}; export ROOT_DIR
+LRMCLI=${INSTALL_ROOT}/opt/app/aft/scldlrm/bin/lrmcli
+PATH=$PATH:`dirname $0`/utils; export PATH
+
+
+# Fail - used to quickly exit with a rc and error message
+fail() {
+ rc=$1
+ shift;
+ echo "PROC_USER_MSG: [$rc]: $@" >&2
+ exit ${rc}
+}
+
+
+
diff --git a/src/main/swm/common/deinstall.env b/src/main/swm/common/deinstall.env
new file mode 100644
index 0000000..9a54c67
--- /dev/null
+++ b/src/main/swm/common/deinstall.env
@@ -0,0 +1,15 @@
+
+# This file is used to set the environment which the install_*.sh files
+# will use when executing. Only set variables that must be derived at
+# installation time here. For variables that should be set by the installer
+# in SWM, add VariableDescriptor elements to the descriptor.xml. Place
+# logical steps in the install_preproc.sh or install_postproc.sh.
+
+. `dirname $0`/common.env
+
+# CHECK FOR ABSOLUTELY REQUIRED VARIABLES HERE
+#derive version components for lrm.xml
+MAJOR_VERSION=`echo ${AFTSWM_ACTION_NEW_VERSION} | awk -F'.' '{print $1}'`; export MAJOR_VERSION
+MINOR_VERSION=`echo ${AFTSWM_ACTION_NEW_VERSION} | awk -F'.' '{print $2}'`; export MINOR_VERSION
+PATCH_VERSION=`echo ${AFTSWM_ACTION_NEW_VERSION} | awk -F'.' '{print $3}'`; export PATCH_VERSION
+
diff --git a/src/main/swm/common/deinstall_postproc.sh b/src/main/swm/common/deinstall_postproc.sh
new file mode 100644
index 0000000..b761056
--- /dev/null
+++ b/src/main/swm/common/deinstall_postproc.sh
@@ -0,0 +1,26 @@
+#!/bin/sh
+#*******************************************************************************
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+#*******************************************************************************
+
+. `dirname $0`/deinstall.env
+rm -rf ${ROOT_DIR}
+exit 0
diff --git a/src/main/swm/common/deinstall_preproc.sh b/src/main/swm/common/deinstall_preproc.sh
new file mode 100644
index 0000000..9ee8c93
--- /dev/null
+++ b/src/main/swm/common/deinstall_preproc.sh
@@ -0,0 +1,45 @@
+#!/bin/sh
+#*******************************************************************************
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+#*******************************************************************************
+
+. `dirname $0`/deinstall.env
+
+LRMCLI=${INSTALL_ROOT}/opt/app/aft/scldlrm/bin/lrmcli
+PATH=$PATH:`dirname $0`/utils; export PATH
+
+runningCount=`${LRMCLI} -running | grep -w ${SOA_CLOUD_NAMESPACE}.${AFTSWM_ACTION_ARTIFACT_NAME} | wc -l` || fail 300 "Unable to determine how many instances are running prior to notifying LRM of the upgrade"
+
+if [ "${runningCount}" -eq 0 ]; then
+
+${LRMCLI} -delete -name ${SOA_CLOUD_NAMESPACE}.${AFTSWM_ACTION_ARTIFACT_NAME} -version ${AFTSWM_ACTION_NEW_VERSION} -routeoffer ${AFT_SERVICE_ENV} || exit 101
+
+ else
+ ${LRMCLI} -shutdown -name ${SOA_CLOUD_NAMESPACE}.${AFTSWM_ACTION_ARTIFACT_NAME} -version ${AFTSWM_ACTION_NEW_VERSION} -routeoffer ${AFT_SERVICE_ENV} -ttw ${RESOURCE_MANAGER_WAIT_TIME_IN_SECONDS} || exit 100
+ ${LRMCLI} -delete -name ${SOA_CLOUD_NAMESPACE}.${AFTSWM_ACTION_ARTIFACT_NAME} -version ${AFTSWM_ACTION_NEW_VERSION} -routeoffer ${AFT_SERVICE_ENV} || exit 101
+
+fi
+
+rm -rf ${INSTALL_ROOT}/${ROOT_DIR}/logs || {
+ echo "WARNING: Unable to purge logs directory during deinstall"
+}
+
+exit 0
diff --git a/src/main/swm/common/install.env b/src/main/swm/common/install.env
new file mode 100644
index 0000000..c98a29a
--- /dev/null
+++ b/src/main/swm/common/install.env
@@ -0,0 +1,37 @@
+
+# This file is used to set the environment which the install_*.sh files
+# will use when executing. Only set variables that must be derived at
+# installation time here. For variables that should be set by the installer
+# in SWM, add VariableDescriptor elements to the descriptor.xml. Place
+# logical steps in the install_preproc.sh or install_postproc.sh.
+
+. `dirname $0`/common.env
+
+# CHECK FOR ABSOLUTELY REQUIRED VARIABLES HERE
+test -z "${SCLD_ENV}" && fail 1 "SCLD_ENV required"
+test -z "${LATITUDE}" && fail 2 "LATITUDE required"
+test -z "${LONGITUDE}" && fail 3 "LONGITUDE required"
+test -z "${AFT_ENVIRONMENT}" && fail 4 "AFT_ENVIRONMENT required"
+
+#derive version components for lrm.xml
+MAJOR_VERSION=`echo ${AFTSWM_ACTION_NEW_VERSION} | awk -F'.' '{print $1}'`; export MAJOR_VERSION
+MINOR_VERSION=`echo ${AFTSWM_ACTION_NEW_VERSION} | awk -F'.' '{print $2}'`; export MINOR_VERSION
+PATCH_VERSION=`echo ${AFTSWM_ACTION_NEW_VERSION} | awk -F'.' '{print $3}'`; export PATCH_VERSION
+
+# special handling for introscope...
+if [ ! -z "${INTROSCOPE_LIB}" ]; then
+ if [ -z "${INTROSCOPE_AGENTPROFILE}" ]; then
+ fail 100 "INTROSCOPE_AGENTPROFILE must be set"
+ fi
+
+ if [ -f ${INTROSCOPE_LIB}/Agent.jar ] && [ -f ${INTROSCOPE_AGENTPROFILE} ]; then
+ if [ -f ${TEMPLATE_RSRC_XML} ]; then
+ INTROSCOPE_VARS="-javaagent:${INTROSCOPE_LIB}/Agent.jar -noverify -Dcom.wily.introscope.agentProfile=${INTROSCOPE_AGENTPROFILE} -Dintroscope.agent.agentName=${AFTSWM_ACTION_ARTIFACT_NAME}"
+ export INTROSCOPE_VARS
+ fi
+ else
+ INTROSCOPE_VARS=""; export INTROSCOPE_VARS
+ fi
+else
+ INTROSCOPE_VARS=""; export INTROSCOPE_VARS
+fi
diff --git a/src/main/swm/common/install_postproc.sh b/src/main/swm/common/install_postproc.sh
new file mode 100644
index 0000000..cdad299
--- /dev/null
+++ b/src/main/swm/common/install_postproc.sh
@@ -0,0 +1,191 @@
+#!/bin/sh
+#*******************************************************************************
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+#*******************************************************************************
+
+if [ -f "/tmp/exitdmaapMRpostproc" ]; then
+echo "file /tmp/exitdmaapMRpostproc found .exiting..........."
+exit 0;
+fi
+. `dirname $0`/install.env
+
+mkdir -p ${ROOT_DIR}/conf || fail 100 "Error on creating the conf directory."
+mkdir -p ${ROOT_DIR}/docs || fail 100 "Error on creating the docs directory."
+mkdir -p ${ROOT_DIR}/lib || fail 100 "Error on creating the lib directory."
+mkdir -p ${ROOT_DIR}/log || fail 100 "Error on creating the logs directory."
+
+##############################################################################
+# REMOVING THE DATA DIRECTORY
+# The following if statement is checking to see if a new version is being installed
+# on top of another version. If a new version is installed on top of the current
+# version WITHOUT a proper deinstall, this will remove the data directory which
+# is necessary to cleanup old AJSC route metadata. If CSTEM chooses to re-run
+# the install_postproc.sh to update swm node variables, this if statement will NOT
+# remove the data directory which is necessary for the SAME version to utilize the
+# correct data directory route metadata.
+##############################################################################
+if [ "${AFTSWM_ACTION_NEW_VERSION}" != "${AFTSWM_ACTION_PREVIOUS_VERSION}" ]
+then
+rm -rf ${ROOT_DIR}/data
+fi
+
+# Cleaning the jetty directory which contains the AJSC exploded war as well as
+# any other apps running under jetty directory
+rm -rf ${ROOT_DIR}/jetty
+
+# A simple override for the SOA Cloud platform value. Normally this is not
+# needed outside of SOA Cloud development sandboxes
+# this is used in the template.lrm.xml file during startup of the service
+if [ ! -z "${SCLD_PLATFORM}" ]; then
+ SCLD_OPTIONAL_PLATFORM_FLAG="-Dplatform=${SCLD_PLATFORM}"; export SCLD_OPTIONAL_PLATFORM_FLAG
+fi
+
+##############################################################################
+# PROCESS TEMPLATE FILES FROM ENVIRONMENT
+# pattern: looks for all files starting with "template.", processes them using the
+# current environment, then renames them by removing the "template." in the same
+# directory
+##############################################################################
+utilpath=`dirname $0`/utils
+for tfile in `ls ${ROOT_DIR}/bundleconfig/etc/sysprops/template.* ${ROOT_DIR}/bundleconfig/etc/appprops/template.* ${ROOT_DIR}/bin/template.* ${ROOT_DIR}/etc/template.* 2>/dev/null`; do
+ dfile=`echo ${tfile} | sed -e 's@/template\.@/@g'`
+ sh ${utilpath}/findreplace.sh ${tfile} ${dfile} || exit 200
+done
+
+runningCount=`${LRMCLI} -running | grep -w ${SOA_CLOUD_NAMESPACE}.${AFTSWM_ACTION_ARTIFACT_NAME} | wc -l` || fail 300 "Unable to determine how many instances are running prior to notifying LRM of the upgrade"
+
+##############################################################################
+# DEPLOY CONTAINER TO LRM
+##############################################################################
+
+if [ -z "${RESOURCE_MANAGER_WAIT_TIME_IN_SECONDS}" ]
+then
+ RESOURCE_MANAGER_WAIT_TIME_IN_SECONDS=180
+ export RESOURCE_MANAGER_WAIT_TIME_IN_SECONDS
+fi
+
+
+
+DTE_TME_STAMP=`date +%Y%m%d_%H%M%S`
+
+LRM_ADD_LOG=/tmp/${SOA_CLOUD_NAMESPACE}.${AFTSWM_ACTION_ARTIFACT_NAME}_ADD_${DTE_TME_STAMP}.out
+ echo "Adding resource to lrm"
+ echo "${LRMCLI} -add -file ${ROOT_DIR}/etc/lrm.xml -ttw ${RESOURCE_MANAGER_WAIT_TIME_IN_SECONDS}"
+ ${LRMCLI} -add -file ${ROOT_DIR}/etc/lrm.xml -ttw ${RESOURCE_MANAGER_WAIT_TIME_IN_SECONDS} > ${LRM_ADD_LOG}
+ LRM_ADD_RC=$?
+ echo "LRMCLI ADD RC : ${LRM_ADD_RC}"
+ if [ "${LRM_ADD_RC}" -ne "0" ]; then
+
+ RSRC_EXIST=`cat ${LRM_ADD_LOG} | grep SCLD-LRM-1024` # resource is already added
+ if [ "${RSRC_EXIST:-}x" = "x" ]; then
+ echo "LRM add for Resource ${SOA_CLOUD_NAMESPACE}.${AFTSWM_ACTION_ARTIFACT_NAME} failed..."
+ cat ${LRM_ADD_LOG}
+ rm -f ${LRM_ADD_LOG}
+ exit 1
+ fi
+ echo "LRM Resource for ${SOA_CLOUD_NAMESPACE}.${AFTSWM_ACTION_ARTIFACT_NAME} already exists. Proceeding with either addOrUpgrade or modify"
+ echo "Get the number of configured ${SOA_CLOUD_NAMESPACE}.${AFTSWM_ACTION_ARTIFACT_NAME} instance"
+ versionCtr=`${LRMCLI} -configured | grep ${SOA_CLOUD_NAMESPACE}.${AFTSWM_ACTION_ARTIFACT_NAME} | awk {'print $3'} | wc -l`
+ if [ ${versionCtr} -eq 1 ]; then
+ echo "Updating lrm resource"
+ echo "${LRMCLI} -addOrUpgrade -file ${ROOT_DIR}/etc/lrm.xml -ttw ${RESOURCE_MANAGER_WAIT_TIME_IN_SECONDS}"
+ ${LRMCLI} -addOrUpgrade -file ${ROOT_DIR}/etc/lrm.xml -ttw ${RESOURCE_MANAGER_WAIT_TIME_IN_SECONDS} || abort "lrmcli addOrUpgrade failed"
+ else
+ echo "Modifying lrm resource"
+ echo "${LRMCLI} -modify -file ${ROOT_DIR}/etc/lrm.xml"
+ ${LRMCLI} -modify -file ${ROOT_DIR}/etc/lrm.xml || abort "lrmcli modify failed"
+ fi
+ fi
+
+ echo "LRMCLI execution on ${SOA_CLOUD_NAMESPACE}.${AFTSWM_ACTION_ARTIFACT_NAME} executed succesfully!"
+
+ls ${ROOT_DIR}/bundleconfig/etc/appprops/MsgRtrApi.properties
+
+if [ ! -z $CONFIG_ZK_SERVERS ]; then
+sed -i '/config.zk.servers=/c\config.zk.servers='$CONFIG_ZK_SERVERS ${ROOT_DIR}/bundleconfig/etc/appprops/MsgRtrApi.properties
+fi
+
+if [ ! -z $TRANSID_REQD ]; then
+sed -i '/transidUEBtopicreqd=/c\transidUEBtopicreqd='$TRANSID_REQD ${ROOT_DIR}/bundleconfig/etc/appprops/MsgRtrApi.properties
+fi
+
+if [ ! -z $MR_TOPICFACTOTRYCLASS ]; then
+sed -i '/msgRtr.topicfactory.aaf=/c\msgRtr.topicfactory.aaf='$MR_TOPICFACTOTRYCLASS ${ROOT_DIR}/bundleconfig/etc/appprops/MsgRtrApi.properties
+fi
+
+if [ ! -z $MR_NAMESPACE ]; then
+sed -i '/msgRtr.namespace.aaf=/c\msgRtr.namespace.aaf='$MR_NAMESPACE ${ROOT_DIR}/bundleconfig/etc/appprops/MsgRtrApi.properties
+fi
+
+
+if [ ! -z $CADI_KEYFILE ]; then
+sed -i '/cadi_keyfile=/c\cadi_keyfile='$CADI_KEYFILE ${ROOT_DIR}/etc/cadi.properties
+fi
+
+if [ ! -z $AAF_URL ]; then
+sed -i '/aaf_url=/c\aaf_url='$AAF_URL ${ROOT_DIR}/etc/cadi.properties
+fi
+
+if [ ! -z $AAF_ID ]; then
+sed -i '/aaf_id=/c\aaf_id='$AAF_ID ${ROOT_DIR}/etc/cadi.properties
+fi
+
+if [ ! -z $AAF_PWD ]; then
+sed -i '/aaf_password=/c\aaf_password='$AAF_PWD ${ROOT_DIR}/etc/cadi.properties
+fi
+
+if [ ! -z $MR_LOGLOC ]; then
+sed -i '/<property name="logDirectory" value=/c\<property name="logDirectory" value="'$MR_LOGLOC'"/>' ${ROOT_DIR}/bundleconfig/etc/logback.xml
+fi
+
+if [ ! -z $MR_KSPATH ]; then
+sed -i '/<Set name="KeyStorePath">/c\<Set name="KeyStorePath">'$MR_KSPATH'</Set>' ${ROOT_DIR}/etc/ajsc-jetty.xml
+fi
+
+if [ ! -z $MR_KSPWD ]; then
+sed -i '/<Set name="KeyStorePassword">/c\<Set name="KeyStorePassword">'$MR_KSPWD'</Set>' ${ROOT_DIR}/etc/ajsc-jetty.xml
+fi
+
+
+if [ ! -z $MR_KMPWD ]; then
+sed -i '/<Set name="KeyManagerPassword">/c\<Set name="KeyManagerPassword">'$MR_KMPWD'</Set>' ${ROOT_DIR}/etc/ajsc-jetty.xml
+fi
+
+
+
+if [ "${runningCount}" -eq 0 ]; then
+ if [ "${LRM_START_SVC}" = "true" ]; then
+ echo "${LRMCLI} -start -name ${SOA_CLOUD_NAMESPACE}.${AFTSWM_ACTION_ARTIFACT_NAME} -version ${AFTSWM_ACTION_NEW_VERSION} -routeoffer ${AFT_SERVICE_ENV} | egrep SUCCESS\|SCLD-LRM-1041"
+ ${LRMCLI} -start -name ${SOA_CLOUD_NAMESPACE}.${AFTSWM_ACTION_ARTIFACT_NAME} -version ${AFTSWM_ACTION_NEW_VERSION} -routeoffer ${AFT_SERVICE_ENV} | egrep SUCCESS\|SCLD-LRM-1041
+ if [ $? -ne 0 ]; then
+ fail 500 "Start of ${SOA_CLOUD_NAMESPACE}.${AFTSWM_ACTION_ARTIFACT_NAME} with routeOffer ${AFT_SERVICE_ENV} failed"
+ fi
+ echo "${LRMCLI} -running | grep -w ${SOA_CLOUD_NAMESPACE}.${AFTSWM_ACTION_ARTIFACT_NAME}"
+ ${LRMCLI} -running | grep -w ${SOA_CLOUD_NAMESPACE}.${AFTSWM_ACTION_ARTIFACT_NAME}
+ else
+ echo "PROC_USER_MSG: LRM_START_SVC is set to false and no running instances were found prior to upgrading so ending install with no running service instances."
+ fi
+else
+ ${LRMCLI} -running | grep -w ${SOA_CLOUD_NAMESPACE}.${AFTSWM_ACTION_ARTIFACT_NAME}
+fi
+
+
+exit 0
diff --git a/src/main/swm/common/install_preproc.sh b/src/main/swm/common/install_preproc.sh
new file mode 100644
index 0000000..461c74d
--- /dev/null
+++ b/src/main/swm/common/install_preproc.sh
@@ -0,0 +1,46 @@
+#!/bin/sh
+#*******************************************************************************
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+#*******************************************************************************
+
+. `dirname $0`/deinstall.env
+
+LRMCLI=${INSTALL_ROOT}/opt/app/aft/scldlrm/bin/lrmcli
+PATH=$PATH:`dirname $0`/utils; export PATH
+if [ -d $LRMCLI ]; then
+runningCount=`${LRMCLI} -running | grep -w ${SOA_CLOUD_NAMESPACE}.${AFTSWM_ACTION_PREVIOUS_VERSION} | wc -l` || fail 300 "Unable to determine how many instances are running prior to notifying LRM of the upgrade"
+
+if [ "${runningCount}" -eq 0 ]; then
+
+${LRMCLI} -delete -name ${SOA_CLOUD_NAMESPACE}.${AFTSWM_ACTION_ARTIFACT_NAME} -version ${AFTSWM_ACTION_PREVIOUS_VERSION} -routeoffer ${AFT_SERVICE_ENV} || exit 101
+
+ else
+ ${LRMCLI} -shutdown -name ${SOA_CLOUD_NAMESPACE}.${AFTSWM_ACTION_ARTIFACT_NAME} -version ${AFTSWM_ACTION_PREVIOUS_VERSION} -routeoffer ${AFT_SERVICE_ENV} -ttw ${RESOURCE_MANAGER_WAIT_TIME_IN_SECONDS} || exit 100
+ ${LRMCLI} -delete -name ${SOA_CLOUD_NAMESPACE}.${AFTSWM_ACTION_ARTIFACT_NAME} -version ${AFTSWM_ACTION_PREVIOUS_VERSION} -routeoffer ${AFT_SERVICE_ENV} || exit 101
+
+fi
+
+rm -rf ${INSTALL_ROOT}/${ROOT_DIR}/logs || {
+ echo "WARNING: Unable to purge logs directory during deinstall"
+}
+fi
+
+exit 0
diff --git a/src/main/swm/common/utils/findreplace.sh b/src/main/swm/common/utils/findreplace.sh
new file mode 100644
index 0000000..efd8a77
--- /dev/null
+++ b/src/main/swm/common/utils/findreplace.sh
@@ -0,0 +1,94 @@
+#!/bin/sh
+#*******************************************************************************
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+#*******************************************************************************
+# Copyright 2011 AT&T Intellectual Properties
+##############################################################################
+# findreplace.sh <template> <destination>
+#
+# This script searches a provided file for templatized variable names
+# in the format __varname__ and, if found in the current environment
+# replaces those. Once complete, it will move the final copy of the file
+# to <destination>.
+#
+##############################################################################
+TEMPLATE=${1:?"Template file path required"}
+DESTINATION=${2:?"Destination file path required"}
+
+if [ ! -f "${TEMPLATE}" ]; then
+ echo "ERROR: Specified template file does not exist: ${TEMPLATE}"
+ exit 100
+fi
+
+DIRECTORY=`dirname ${DESTINATION}`
+if [ ! -d "${DIRECTORY}" ]; then
+ echo "ERROR: Destination directory does not exist: ${DIRECTORY}"
+ exit 200
+fi
+
+SED_SCR=/tmp/sed.$$
+echo "{" > ${SED_SCR}
+
+# create a sed script for replacing variables from current environment
+for i in `env | awk -F= '{ print $1}'`; do
+ if [ "$i" = "IFS" ] ; then
+ continue;
+ fi
+
+ VALUE=`eval echo '$'${i}` || {
+ echo 'WARNING: Unable to format '${i}' for sed replacement'
+ continue;
+ }
+
+ for x in '@' '^' '&' '?' '#' '~' '%' '|' '+' '/'; do
+ echo ${VALUE} | grep "$x" 2>/dev/null 1>/dev/null
+ if [ $? != 0 ]; then
+ CCHAR="$x"
+ break
+ fi
+ done
+
+ if [ -z "${CCHAR}" ]; then
+ echo "WARNING: Unable to find a suitable sed replacement character for ${VALUE}, will ignore setting ${KEY} in templates"
+ continue;
+ fi
+
+ echo " s${CCHAR}__${i}__${CCHAR}${VALUE}${CCHAR}g" >> ${SED_SCR}
+done
+
+sed -e 's/\\\@/\\\\@/g' ${SED_SCR} > ${SED_SCR}.1 || exit 300
+
+if [ -f ${DESTINATION} ]; then
+ TIMESTAMP=`date +%Y%m%d%H%M%S`
+ o_dir=`dirname ${DESTINATION}`
+ o_file=`basename ${DESTINATION}`
+ mv ${DESTINATION} ${o_dir}/bu.${o_file}.${TIMESTAMP}
+fi
+
+mv -f ${SED_SCR}.1 ${SED_SCR} || exit 400
+
+echo "}" >> ${SED_SCR} || exit 500
+
+sed -f ${SED_SCR} ${TEMPLATE} > ${DESTINATION} || exit 600
+
+rm -f $SED_SCR
+
+exit 0 \ No newline at end of file
diff --git a/src/main/swm/deinstall/postproc/post_proc b/src/main/swm/deinstall/postproc/post_proc
new file mode 100644
index 0000000..b79a8ab
--- /dev/null
+++ b/src/main/swm/deinstall/postproc/post_proc
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd ../../common
+exec sh -x ./deinstall_postproc.sh
diff --git a/src/main/swm/deinstall/preproc/pre_proc b/src/main/swm/deinstall/preproc/pre_proc
new file mode 100644
index 0000000..7127ba3
--- /dev/null
+++ b/src/main/swm/deinstall/preproc/pre_proc
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd ../../common
+exec sh -x ./deinstall_preproc.sh \ No newline at end of file
diff --git a/src/main/swm/descriptor.xml b/src/main/swm/descriptor.xml
new file mode 100644
index 0000000..d42cf1b
--- /dev/null
+++ b/src/main/swm/descriptor.xml
@@ -0,0 +1,386 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+ ============LICENSE_START=======================================================
+ org.onap.dmaap
+ ================================================================================
+ Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ ============LICENSE_END=========================================================
+
+ ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+ -->
+
+<!-- This file is the main deployment descriptor for the SWM package generated
+ for the project. -->
+
+<!-- For installing multiple versions of a Service onto the same SOA Cloud Node and maintaining former versions to switch between the
+ versions installed, you will have to change the following line to this:
+
+ <descriptor version="1" concurrent="true" xmlns="http://aft.att.com/swm/descriptor">
+
+ Adding the 'concurrent="true"' will allow for the concurrency of this Service. Please, NOTE, however, before creating a SWM Package
+ with this Service, you MUST update your Service through SWMCLI with the following command:
+ swmcli component update -c <componentName> -conc true
+ Once your Service has been updated to be concurrent, you MUST add the 'concurrent="true"' to ALL Versions of the Service -->
+
+<descriptor version="1" xmlns="http://aft.att.com/swm/descriptor">
+ <!-- This section describes what OS's and architectures are supported. Since
+ this is Java, we'll default to supporting any OS and architecture. -->
+ <platforms>
+ <platform os="*" architecture="*" osVersions="*" />
+ </platforms>
+ <!-- This section describes the file locations and permissions and ownership
+ of those files. Only authorized users and groups for the component may be
+ used. -->
+ <paths>
+ <path name="${absoluteDistFilesRoot}" type="d"
+ user="${installOwnerUser}" group="${installOwnerGroup}" permissions="0755,0644"
+ recursive="true" />
+ <path name="${distFilesRoot}" type="d"
+ user="${installOwnerUser}" group="${installOwnerGroup}" permissions="0755,0644"
+ recursive="true" />
+ <path name="${distFilesRoot}/bundleconfig" type="d"
+ user="${installOwnerUser}" group="${installOwnerGroup}" permissions="0755"
+ recursive="true" />
+ <path name="${distFilesRoot}/etc" type="d"
+ user="${installOwnerUser}" group="${installOwnerGroup}" permissions="0755"
+ recursive="true" />
+ <path name="${distFilesRoot}/extJars" type="d"
+ user="${installOwnerUser}" group="${installOwnerGroup}" permissions="0755"
+ recursive="true" />
+ <path name="${distFilesRoot}/extApps" type="d"
+ user="${installOwnerUser}" group="${installOwnerGroup}" permissions="0755"
+ recursive="true" />
+ <path name="${distFilesRoot}/conf" type="d"
+ user="${installOwnerUser}" group="${installOwnerGroup}" permissions="0755"
+ recursive="true" />
+ <path name="${distFilesRoot}/lib" type="d"
+ user="${installOwnerUser}" group="${installOwnerGroup}" permissions="0755"
+ recursive="true" />
+ <path name="${distFilesRoot}/docs" type="d"
+ user="${installOwnerUser}" group="${installOwnerGroup}" permissions="0755"
+ recursive="true" />
+ <path name="${distFilesRoot}/runtime" type="d"
+ user="${installOwnerUser}" group="${installOwnerGroup}" permissions="0755"
+ recursive="true" />
+ <path name="${distFilesRoot}/services" type="d"
+ user="${installOwnerUser}" group="${installOwnerGroup}" permissions="0755"
+ recursive="true" />
+
+ </paths>
+ <!-- This section describes the package lifecycle scripts and the users
+ and groups they should run as. Only authorized users and groups for the component
+ may be used. -->
+ <actions>
+ <action type="DINST">
+ <proc stage="POST" user="${installOwnerUser}" group="${installOwnerGroup}" />
+ <proc stage="PRE" user="${installOwnerUser}" group="${installOwnerGroup}" />
+ </action>
+ <action type="FALL">
+ <proc stage="POST" user="${installOwnerUser}" group="${installOwnerGroup}" />
+ <proc stage="PRE" user="${installOwnerUser}" group="${installOwnerGroup}" />
+ </action>
+ <action type="INIT">
+ <proc stage="POST" user="${installOwnerUser}" group="${installOwnerGroup}" />
+ <proc stage="PRE" user="${installOwnerUser}" group="${installOwnerGroup}" />
+ </action>
+ <action type="INST">
+ <proc stage="POST" user="${installOwnerUser}" group="${installOwnerGroup}" />
+ <proc stage="PRE" user="${installOwnerUser}" group="${installOwnerGroup}" />
+ </action>
+ </actions>
+ <!-- This section describes the variables that are used during package installation.
+ Where required=true, the installer must ensure the environment contains this
+ variable by attaching a value for the variable to the SWM node or a node
+ group which the SWM node is a member of. The installer may also override
+ items that have a defaultValue. Additional variables can be added to this
+ list and used in template.* files in your project. To use one, put template
+ replacement text starting and ending with double-underscores and the variable
+ key in between. Example: __MY_VAR_KEY__. During installation, these are replaced
+ in the template.* file and the template.* file is renamed to remove the template.
+ from the name. -->
+ <variableDescriptions>
+ <variableDescription key="JAVA_VERSION"
+ description="The Java version to use." defaultValue="1.7" required="false" />
+ <variableDescription key="JAVA_PRE_CLASSPATH"
+ description="Additional classpath information to preprend to the installed classpath."
+ defaultValue=":" required="false" />
+ <variableDescription key="JAVA_POST_CLASSPATH"
+ description="Additional classpath information to preprend to the installed classpath."
+ defaultValue=":" required="false" />
+ <variableDescription key="PRE_JVM_ARGS"
+ description="Additional JVM arguments to prepend to the JVM startup command."
+ defaultValue=" " required="false" />
+ <variableDescription key="POST_JVM_ARGS"
+ description="Additional JVM arguments to append to the end of the JVM startup command."
+ defaultValue=" " required="false" />
+ <variableDescription key="MAX_PERM_SIZE"
+ description="The maximum perm size to use when starting the JVM"
+ defaultValue="256m" required="false" />
+ <variableDescription key="PERM_SIZE"
+ description="The initial perm size to use when starting the JVM"
+ defaultValue="32m" required="false" />
+ <variableDescription key="MIN_HEAP_SIZE"
+ description="The mimimum heap size to use when starting the JVM."
+ defaultValue="512m" required="false" />
+ <variableDescription key="MAX_HEAP_SIZE"
+ description="The maximum heap size to use when starting the JVM."
+ defaultValue="512m" required="false" />
+ <variableDescription key="LRM_VALIDATEPID_WAITTIME_SECONDS"
+ description="The amount of time LRM should wait to validate the health of the PID after startup"
+ defaultValue="4" required="false" />
+ <variableDescription key="LRM_RESOURCE_START_TYPE"
+ description="The type of startup LRM should perform on the resource."
+ defaultValue="MANUAL" required="false" />
+ <variableDescription key="LRM_START_PRIORITY"
+ description="The priority sequence to use when starting the resource."
+ defaultValue="0" required="false" />
+ <variableDescription key="LRM_START_TIMEOUT"
+ description="The timeout on the startup." defaultValue="100"
+ required="false" />
+ <variableDescription key="RESOURCE_MIN_COUNT"
+ description="The default minimum number of instances LRM should maintain."
+ defaultValue="1" required="false" />
+
+ <variableDescription key="LRM_START_SVC"
+ description="To auto start the service."
+ defaultValue="true" required="false" />
+
+
+ <variableDescription key="RESOURCE_MAX_COUNT"
+ description="The maximum number of instances LRM should allow."
+ defaultValue="5" required="false" />
+ <variableDescription key="LRM_RESOURCE_MAX_RESTART"
+ description="The maximum number of restarts LRM should perform before giving up."
+ defaultValue="10" required="false" />
+ <variableDescription key="LRM_RESOURCE_HEARTBEAT"
+ description="Time in seconds indicating how long to wait between heartbeat attempts."
+ defaultValue="180" required="false" />
+ <variableDescription key="LRM_RESOURCE_HEARTBEAT_FAILED_LIMIT"
+ description="Consecutive heartbeat failure threshold before an error is assumed and appropriate action taken."
+ defaultValue="3" required="false" />
+ <variableDescription key="LRM_RESOURCE_HEARTBEAT_TIMEOUT"
+ description="Timeout of heartbeats" defaultValue="2" required="false" />
+ <variableDescription key="RESOURCE_MANAGER_WAIT_TIME_IN_SECONDS"
+ description="Indicates the time in seconds that LRM waits for the resource to shutdown."
+ defaultValue="180" required="false" />
+ <variableDescription key="LRM_RESOURCE_REGISTRATION"
+ description="Flag that indicates if resource needs to be registered in the SOA Cloud."
+ defaultValue="true" required="false" />
+ <variableDescription key="PROC_SKIP_START_NEW_ON_ZERO_INSTANCES"
+ description="Indicates that the postproc should not start an initial instance of the service during installation."
+ defaultValue="false" required="false" />
+ <variableDescription key="AJSC_RUNTIME_VERSION"
+ description="the version of ajsc" defaultValue="${ajscRuntimeVersion}"
+ required="false" />
+ <variableDescription key="AJSC_CONF_HOME"
+ description="External location where AJSC can locate all required config files for its boot up"
+ defaultValue="$INSTALL_ROOT${distFilesRoot}/bundleconfig" required="false" />
+ <variableDescription key="AJSC_SHARED_CONFIG"
+ description="External location where AJSC can locate the logback configuration file for system wide logging"
+ defaultValue="$INSTALL_ROOT${distFilesRoot}/bundleconfig" required="false" />
+ <variableDescription key="AJSC_CONTEXT"
+ description="The context in which the service will run. Defaults to the root context, /"
+ defaultValue="/" required="false" />
+ <variableDescription key="AJSC_APP_SERVLET_URL_PATTERN"
+ description="This is required for proper GRM Registration of your att-dme2-serlet endpoints. This should match the value from ajsc-override-web.xml"
+ defaultValue="/services" required="false" />
+ <variableDescription key="AJSC_SVC_PORT"
+ description="the port for the embedded jetty server to listen on. A value of 0 will utilize ephemeral port selection"
+ defaultValue="${serverPort}" required="false" />
+ <variableDescription key="AJSC_SSL_PORT"
+ description="the port for SSL" defaultValue="${sslport}" required="false" />
+ <variableDescription key="AJSC_AUTH_SCHEME"
+ description="AJSC Basic Authentication scheme - authentication-scheme-1 turns on, authentication-scheme-2 turns off" defaultValue="authentication-scheme-2"
+ required="false" />
+ <variableDescription key="AJSC_PERSISTENCE"
+ description="meta data persistence. Can be riak, cassandra, or file" defaultValue="file"
+ required="false" />
+ <variableDescription key="AJSC_ENABLE_SSL"
+ description="ssl true/false. A value of true will enable, and will register your service as https" defaultValue="false"
+ required="false" />
+ <variableDescription key="CSI_ENABLE"
+ description="csi logging true/false. A value of true will enable, and a value of false will disable" defaultValue="true"
+ required="false" />
+ <variableDescription key="IS_CAET_ENABLE"
+ description="CAET enable/disable " defaultValue="true"
+ required="false" />
+ <variableDescription key="ENABLE_EJB"
+ description="enable EJB container true/false. A value of true will enable, and a value of false will disable the container" defaultValue="false"
+ required="false" />
+ <variableDescription key="OSGI_ENABLE"
+ description="enable OSGI container true/false. A value of true will enable OSGI, and a value of false will disable OSGI" defaultValue="false"
+ required="false" />
+ <variableDescription key="JMS_TIBCO_PROVIDER_URL"
+ description="JMS TIBCO PROVIDER URL" defaultValue="tcp://q27csi1c3.vci.att.com:27812"
+ required="false" />
+ <variableDescription key="JMS_LOGGER_USER_NAME"
+ description="JMS LOGGER USER NAME" defaultValue="sg"
+ required="false" />
+ <variableDescription key="JMS_LOGGER_PASSWORD"
+ description="JMS LOGGER PASSWORD" defaultValue="its4test"
+ required="false" />
+ <variableDescription key="JMS_LOGGER_AUDIT_QUEUE_BINDING"
+ description="JMS LOGGER AUDIT QUEUE_BINDING" defaultValue="pub.m2e.AJSC.Audit.logger.queue"
+ required="false" />
+ <variableDescription key="JMS_LOGGER_PERF_QUEUE_BINDING"
+ description="JMS LOGGER PERF QUEUE BINDING" defaultValue="pub.m2e.AJSC.Perf.logger.queue"
+ required="false" />
+ <variableDescription key="LOGBACK_CONFIG_FILE"
+ description="The location of the logback.xml file which controls the loggin configuration."
+ defaultValue="$INSTALL_ROOT${distFilesRoot}/bundleconfig/etc/logback.xml" required="false" />
+ <variableDescription key="LOGBACK_LOG_LEVEL"
+ description="The logging level to use when configuring logback logging backend."
+ defaultValue="INFO" required="false" />
+ <variableDescription key="END_POINT_LEVEL_LOGGING"
+ description="endpointLogging true/false. A value of true will enable, and a value of false will disable" defaultValue="false"
+ required="false" />
+ <variableDescription key="AJSC_CADI_AUTHN"
+ description="Cadi enabled/disabled. If you are using GLO Cadi filter, use authentication-scheme-1.
+ If you do NOT want to use the Cadi filter, use authentication-scheme-2" defaultValue="authentication-scheme-1"
+ required="false" />
+ <variableDescription key="AJSC_SERVICE_NAMESPACE"
+ description="AJSC Service Namespace" defaultValue="${module.ajsc.namespace.name}"
+ required="false" />
+ <variableDescription key="AJSC_SERVICE_VERSION"
+ description="AJSC Service Version" defaultValue="${module.ajsc.namespace.version}"
+ required="false" />
+ <variableDescription key="AJSC_JETTY_ThreadCount_MIN"
+ description="AJSC Jetty Min Thread Count" defaultValue="10" required="false" />
+ <variableDescription key="AJSC_JETTY_ThreadCount_MAX"
+ description="AJSC Jetty Max Thread Count" defaultValue="200"
+ required="false" />
+ <variableDescription key="AJSC_JETTY_IDLETIME_MAX"
+ description="AJSC Jetty Max Idle TimeOut" defaultValue="60000"
+ required="false" />
+ <variableDescription key="SOA_CLOUD_NAMESPACE"
+ description="the Namespace used for GRM service registrations"
+ defaultValue="com.att.ajsc" required="false" />
+ <variableDescription key="AFT_SERVICE_ENV"
+ description="the AFT Servie Env found within CSI used for GRM service registrations (routeOffer)"
+ defaultValue="DEFAULT" required="false" />
+ <variableDescription key="SOA_CLOUD_ENV"
+ description="Set to TRUE for service to register with DME2 in a SOA Cloud Environment"
+ defaultValue="TRUE" required="false" />
+ <variableDescription key="SOACLOUD_ENV_CONTEXT"
+ description="This is the value that will be used as envContext in DME2 registration of service and should match the SCLD_ENV of the Node already set"
+ defaultValue="DEV" required="false" />
+ <variableDescription key="AJSC_ENV"
+ description="Production Environment for the AJSC. Select from SOACLOUD, DEV, QA, PROD, and PREPROD"
+ defaultValue="DEV" required="false" />
+ <variableDescription key="SOACLOUD_PROTOCOL"
+ description="Protocol being used by the service"
+ defaultValue="http" required="false" />
+ <variableDescription key="SCAMPER_ENABLED"
+ description="Indicate if Scamper config file generation is enabled"
+ defaultValue="false" required="false" />
+ <variableDescription key="DME2_LIB"
+ description="DME2 Library Location for external dme2 library"
+ defaultValue="$INSTALL_ROOT/opt/app/aft/dme2/lib" required="false" />
+ <variableDescription key="CSM_LIB"
+ description="CSM Library Location for external dme2 library"
+ defaultValue="$INSTALL_ROOT/appl/external_libs/csi-csm/1.1.1/lib" required="false" />
+ <variableDescription key="AJSC_SSF_FILE_MONITOR_POLLING_INTERVAL"
+ description="AJSC Internal File Monitor Polling Interval in Seconds" defaultValue="5"
+ required="false" />
+ <variableDescription key="AJSC_SSF_FILE_MONITOR_THREAD_POOL_SIZE"
+ description="AJSC Internal File Monitor ThreadPool Size" defaultValue="10"
+ required="false" />
+ <variableDescription key="AFT_DME2_CONN_IDLE_TIMEOUTMS"
+ description="DME2 Idle Connection TimeOut In ms" defaultValue="5000"
+ required="false" />
+ <variableDescription key="HAWTIO_AUTHENTICATION_ENABLED"
+ description="HawtIO Authentication Flag" defaultValue="false"
+ required="false" />
+ <variableDescription key="HAWTIO_CONFIG_PULLONSTARTUP"
+ description="HawtIO Config Pull on Startup" defaultValue="false"
+ required="false" />
+ <variableDescription key="CAMEL_POOL_SIZE"
+ description="Pool size to set in default threadPool profile for Camel Context" defaultValue="10"
+ required="false" />
+ <variableDescription key="CAMEL_MAX_POOL_SIZE"
+ description="Max pool size to set in default threadPool profile for Camel Context" defaultValue="20"
+ required="false" />
+ <variableDescription key="CAMEL_KEEP_ALIVE_TIME"
+ description="Idle time (in Seconds) for excess threads to wait before they are discarded." defaultValue="60"
+ required="false" />
+ <variableDescription key="CAMEL_MAX_QUEUE_SIZE"
+ description="The task queue can contain up to n tasks before the pool is exhausted." defaultValue="1000"
+ required="false" />
+ <variableDescription key="LOGBACK_CONFIG_FILE"
+ description="Logback configuration file location" defaultValue="${distFilesRoot}/etc/logback.xml"
+ required="false" />
+ <variableDescription key="RESTLET_COMPONENT_CONTROLLER_DAEMON"
+ description="Indicates if the controller thread should be a daemon (not blocking JVM exit)." defaultValue="true"
+ required="false" />
+ <variableDescription key="RESTLET_COMPONENT_CONTROLLER_SLEEP_TIME_MS"
+ description="Time for the controller thread to sleep between each control." defaultValue="100"
+ required="false" />
+ <variableDescription key="RESTLET_COMPONENT_INBOUND_BUFFER_SIZE"
+ description="The size of the buffer when reading messages." defaultValue="8192"
+ required="false" />
+ <variableDescription key="RESTLET_COMPONENT_MIN_THREADS"
+ description="Minimum threads waiting to service requests." defaultValue="1"
+ required="false" />
+ <variableDescription key="RESTLET_COMPONENT_MAX_THREADS"
+ description="Maximum threads that will service requests." defaultValue="10"
+ required="false" />
+ <variableDescription key="RESTLET_COMPONENT_LOW_THREADS"
+ description="Number of worker threads determining when the connector is considered overloaded." defaultValue="8"
+ required="false" />
+ <variableDescription key="RESTLET_COMPONENT_MAX_QUEUED"
+ description="Maximum number of calls that can be queued if there aren't any worker thread available to service them." defaultValue="0"
+ required="false" />
+ <variableDescription key="RESTLET_COMPONENT_MAX_CONNECTIONS_PER_HOST"
+ description="Maximum number of concurrent connections per host (IP address)." defaultValue="-1"
+ required="false" />
+ <variableDescription key="RESTLET_COMPONENT_MAX_TOTAL_CONNECTIONS"
+ description="Maximum number of concurrent connections in total." defaultValue="-1"
+ required="false" />
+ <variableDescription key="RESTLET_COMPONENT_OUTBOUND_BUFFER_SIZE"
+ description="The size of the buffer when writing messages." defaultValue="8192"
+ required="false" />
+ <variableDescription key="RESTLET_COMPONENT_PERSISTING_CONNECTIONS"
+ description="Indicates if connections should be kept alive after a call." defaultValue="true"
+ required="false" />
+ <variableDescription key="RESTLET_COMPONENT_PIPELINING_CONNECTIONS"
+ description="Indicates if pipelining connections are supported." defaultValue="false"
+ required="false" />
+ <variableDescription key="RESTLET_COMPONENT_THREAD_MAX_IDLE_TIME_MS"
+ description="Time for an idle thread to wait for an operation before being collected." defaultValue="60000"
+ required="false" />
+ <variableDescription key="RESTLET_COMPONENT_USE_FORWARDED_HEADER"
+ description="Lookup the X-Forwarded-For header supported by popular proxies and caches and uses it to populate the Request.getClientAddresses() method result." defaultValue="false"
+ required="false" />
+ <variableDescription key="RESTLET_COMPONENT_REUSE_ADDRESS"
+ description="Enable/disable the SO_REUSEADDR socket option. See java.io.ServerSocket#reuseAddress property for additional details." defaultValue="true"
+ required="false" />
+ <variableDescription key="AJSC_EXTERNAL_LIB_FOLDERS"
+ description="Location of external libs to be used by AJSC. Values here will be in addition to libs in template.sys-props.properties." defaultValue=""
+ required="false" />
+ <variableDescription key="AJSC_EXTERNAL_PROPERTIES_FOLDERS"
+ description="Location of external property folders that may be needed on the classpath (ex: csm) to be used by AJSC. Values here will be in addition to libs in template.sys-props.properties." defaultValue=""
+ required="false" />
+ <variableDescription key="ENABLE_TRAIL_LOGGING"
+ description="enable logging of the routes" defaultValue="false"
+ required="false" />
+ <variableDescription key="ENABLE_TRAIL_LOGGING_SUMMARY"
+ description="enable logging of the routes and summary" defaultValue="false"
+ required="false" />
+ <variableDescription key="API_DOC"
+ description="enable generation of metadata for swagger UI" defaultValue="false"
+ required="false" />
+ </variableDescriptions>
+</descriptor>
+
diff --git a/src/main/swm/fallback/postproc/post_proc b/src/main/swm/fallback/postproc/post_proc
new file mode 100644
index 0000000..d017750
--- /dev/null
+++ b/src/main/swm/fallback/postproc/post_proc
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd ../../common
+exec sh -x ./install_postproc.sh
diff --git a/src/main/swm/fallback/preproc/install_preproc.sh b/src/main/swm/fallback/preproc/install_preproc.sh
new file mode 100644
index 0000000..e200912
--- /dev/null
+++ b/src/main/swm/fallback/preproc/install_preproc.sh
@@ -0,0 +1,26 @@
+#!/bin/sh
+#*******************************************************************************
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+#*******************************************************************************
+
+. `dirname $0`/install.env
+
+exit 0 \ No newline at end of file
diff --git a/src/main/swm/fallback/preproc/pre_proc b/src/main/swm/fallback/preproc/pre_proc
new file mode 100644
index 0000000..3f1b26f
--- /dev/null
+++ b/src/main/swm/fallback/preproc/pre_proc
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd ../../common
+exec sh -x ./install_preproc.sh
diff --git a/src/main/swm/initinst/postproc/post_proc b/src/main/swm/initinst/postproc/post_proc
new file mode 100644
index 0000000..7561171
--- /dev/null
+++ b/src/main/swm/initinst/postproc/post_proc
@@ -0,0 +1,25 @@
+#!/bin/sh
+
+if [ ${SCAMPER_ENABLED} == "true" ]
+ then
+
+if [ ! -d /opt/app/${AFTSWM_ACTION_ARTIFACT_GROUP}/${AFTSWM_ACTION_ARTIFACT_NAME}/bin/config2 ]
+ then
+ mkdir /opt/app/${AFTSWM_ACTION_ARTIFACT_GROUP}/${AFTSWM_ACTION_ARTIFACT_NAME}/bin/config2
+ echo "Creating directory " /opt/app/${AFTSWM_ACTION_ARTIFACT_GROUP}/${AFTSWM_ACTION_ARTIFACT_NAME}/bin/config2
+fi
+
+
+. /opt/app/${AFTSWM_ACTION_ARTIFACT_GROUP}/${AFTSWM_ACTION_ARTIFACT_NAME}/bin/scamper.sh
+
+#Check to see if the scamper.sh script executed without errors.
+#If an error occurred, then exit, stop the install and cause a failed install.
+if [ ! $? -eq 0 ]
+ then
+ exit $?
+fi
+
+echo "Finished Scamper file generation"
+fi
+cd ../../common
+exec sh -x ./install_postproc.sh
diff --git a/src/main/swm/initinst/preproc/pre_proc b/src/main/swm/initinst/preproc/pre_proc
new file mode 100644
index 0000000..3f1b26f
--- /dev/null
+++ b/src/main/swm/initinst/preproc/pre_proc
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd ../../common
+exec sh -x ./install_preproc.sh
diff --git a/src/main/swm/install/postproc/post_proc b/src/main/swm/install/postproc/post_proc
new file mode 100644
index 0000000..b0c49dc
--- /dev/null
+++ b/src/main/swm/install/postproc/post_proc
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd ../../common
+exec sh -x ./install_postproc.sh \ No newline at end of file
diff --git a/src/main/swm/install/preproc/pre_proc b/src/main/swm/install/preproc/pre_proc
new file mode 100644
index 0000000..2f0eb3f
--- /dev/null
+++ b/src/main/swm/install/preproc/pre_proc
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd ../../common
+exec sh -x ./install_preproc.sh \ No newline at end of file
diff --git a/src/main/swm/notes.txt b/src/main/swm/notes.txt
new file mode 100644
index 0000000..7c617d6
--- /dev/null
+++ b/src/main/swm/notes.txt
@@ -0,0 +1,8 @@
+# (c) 2012 AT&T Intellectual Property. All rights reserved.
+
+NOTES FOR com.att.nsa:dmaap - Version 0.0.1-SNAPSHOT
+=======================================================
+
+This package was created at ${maven.build.timestamp}.
+
+No additional notes are available. \ No newline at end of file
diff --git a/src/main/test/com/att/nsa/dmaap/DummyTest.java b/src/main/test/com/att/nsa/dmaap/DummyTest.java
new file mode 100644
index 0000000..b979e31
--- /dev/null
+++ b/src/main/test/com/att/nsa/dmaap/DummyTest.java
@@ -0,0 +1,45 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package com.att.nsa.dmaap;
+
+import static org.junit.Assert.*;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class DummyTest {
+
+ @Before
+ public void setUp() throws Exception {
+ }
+
+ @After
+ public void tearDown() throws Exception {
+ }
+
+ @Test
+ public void test() {
+ fail("Not yet implemented");
+ }
+
+}
diff --git a/src/test/java/com/att/nsa/dmaap/DummyTest.java b/src/test/java/com/att/nsa/dmaap/DummyTest.java
new file mode 100644
index 0000000..60e3931
--- /dev/null
+++ b/src/test/java/com/att/nsa/dmaap/DummyTest.java
@@ -0,0 +1,45 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package com.att.nsa.dmaap;
+
+import static org.junit.Assert.*;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class DummyTest {
+
+ @Before
+ public void setUp() throws Exception {
+ }
+
+ @After
+ public void tearDown() throws Exception {
+ }
+
+ @Test
+ public void test() {
+ assertTrue("Dummy test case", true);
+ }
+
+}