summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorVarun Gudisena <vg411h@att.com>2017-08-30 16:00:10 -0500
committerVarun Gudisena <vg411h@att.com>2017-08-30 16:00:18 -0500
commitca63da6e0cb7fb63e231343d0b52a40036f6b6aa (patch)
tree1f03578a5badef32c983b1ebcab9df447b30e214
parentd53cd5dba6a7a74bba79d8e4bca5c60d0c9779b8 (diff)
Add Initial Code Import
Added initial code for DMaaP Message Router Component Issue-id: DMAAP-76 Change-Id: Ica6f265ea4f2901cf47191f21b4448514ea7c8d4 Signed-off-by: Varun Gudisena <vg411h@att.com>
-rw-r--r--.gitignore1
-rw-r--r--LICENSE22
-rw-r--r--etc/cambriaApi.properties138
-rw-r--r--etc/cambriaApi_template.properties130
-rw-r--r--etc/log4j.xml65
-rw-r--r--etc/log4j_template.xml63
-rw-r--r--etc/logstash_cambria_template.conf36
-rw-r--r--notes/capacityMath.xlsxbin0 -> 10910 bytes
-rw-r--r--notes/keys.txt3
-rw-r--r--notes/systems.xlsxbin0 -> 13139 bytes
-rw-r--r--notes/users.csv20
-rw-r--r--pom.xml418
-rw-r--r--src/assembly/dep.xml50
-rw-r--r--src/main/config/fixme.txt4
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/apiServer/metrics/cambria/DMaaPMetricsSender.java198
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/CambriaApiException.java80
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/CambriaApiVersionInfo.java88
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/Consumer.java96
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/ConsumerFactory.java110
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/MetricsSet.java71
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/Publisher.java98
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/kafka/KafkaConsumer.java245
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/kafka/KafkaConsumerCache.java614
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/kafka/KafkaPublisher.java169
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MemoryConsumerFactory.java160
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MemoryMetaBroker.java200
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MemoryQueue.java207
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MemoryQueuePublisher.java90
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MessageDropper.java61
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MessageLogger.java101
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/ApiKeyBean.java88
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPCambriaLimiter.java227
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPContext.java104
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPKafkaConsumerFactory.java320
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPKafkaMetaBroker.java462
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPMetricsSet.java233
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPNsaApiDb.java139
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPZkClient.java45
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPZkConfigDb.java52
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/LogDetails.java214
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/TopicBean.java155
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/constants/CambriaConstants.java125
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/DMaaPAccessDeniedException.java42
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/DMaaPCambriaExceptionMapper.java92
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/DMaaPErrorMessages.java239
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/DMaaPResponseCode.java93
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/DMaaPWebExceptionMapper.java137
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/ErrorResponse.java135
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/listener/CambriaServletContextListener.java64
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/listener/DME2EndPointLoader.java124
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metabroker/Broker.java93
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metabroker/Topic.java133
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/CambriaBatchingPublisher.java52
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/CambriaClient.java89
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/CambriaConsumer.java52
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/CambriaPublisher.java101
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/CambriaPublisherUtility.java146
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/DMaaPCambriaClientFactory.java423
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/impl/CambriaBaseClient.java99
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/impl/Clock.java74
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/impl/DMaaPCambriaConsumerImpl.java170
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisher.java430
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/CambriaEventSet.java115
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/CambriaOutboundEventStream.java516
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/streamReaders/CambriaJsonStreamReader.java171
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/streamReaders/CambriaRawStreamReader.java142
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/streamReaders/CambriaStreamReader.java229
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/streamReaders/CambriaTextStreamReader.java140
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/DMaaPAAFAuthenticator.java39
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/DMaaPAAFAuthenticatorImpl.java90
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/DMaaPAuthenticator.java62
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/DMaaPAuthenticatorImpl.java136
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/impl/DMaaPMechIdAuthenticator.java88
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/impl/DMaaPOriginalUebAuthenticator.java293
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/AdminService.java83
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/ApiKeysService.java106
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/EventsService.java76
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/MMService.java68
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/MetricsService.java54
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/TopicService.java176
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/TransactionService.java62
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/UIService.java92
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/AdminServiceImpl.java188
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/ApiKeysServiceImpl.java325
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/BaseTransactionDbImpl.java154
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/EventsServiceImpl.java788
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/MMServiceImpl.java604
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/MetricsServiceImpl.java115
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/TopicServiceImpl.java649
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/TransactionServiceImpl.java100
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/UIServiceImpl.java206
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/DMaaPTransactionFactory.java44
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/DMaaPTransactionObj.java83
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/DMaaPTransactionObjDB.java86
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/TransactionObj.java202
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/TrnRequest.java183
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/impl/DMaaPSimpleTransactionFactory.java61
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/ConfigurationReader.java497
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/DMaaPCuratorFactory.java69
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/DMaaPResponseBuilder.java358
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/Emailer.java215
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/PropertyReader.java133
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/Utils.java145
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/filter/ContentLengthFilter.java133
-rw-r--r--src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/filter/DefaultLength.java37
-rw-r--r--src/main/resources/DMaaPUrl.properties39
-rw-r--r--src/main/resources/cambriaApiVersion.properties23
-rw-r--r--src/main/resources/dme2testcase.properties85
-rw-r--r--src/main/resources/endpoint.properties31
-rw-r--r--src/main/resources/images/attLogo.gifbin0 -> 1885 bytes
-rw-r--r--src/main/resources/images/att_vt_1cp_grd_rev.gifbin0 -> 5238 bytes
-rw-r--r--src/main/resources/routes.conf106
-rw-r--r--src/main/resources/templates/hello.html9
-rw-r--r--src/main/scripts/cambria.sh49
-rw-r--r--src/main/scripts/cambriaJsonPublisher.sh41
-rw-r--r--src/main/scripts/cambriaMonitor.sh40
-rw-r--r--src/main/scripts/cambriaMonitorWithAuth.sh43
-rw-r--r--src/main/scripts/cambriaSimpleTextPubWithAuth.sh38
-rw-r--r--src/main/scripts/cambriaSimpleTextPublisher.sh35
-rw-r--r--src/main/scripts/cambriaTool.sh55
-rw-r--r--src/main/scripts/swmpkgclean.sh42
-rw-r--r--src/main/swm/common/common.env21
-rw-r--r--src/main/swm/common/deinstall.env1
-rw-r--r--src/main/swm/common/deinstall_postproc.sh26
-rw-r--r--src/main/swm/common/deinstall_preproc.sh26
-rw-r--r--src/main/swm/common/install.env1
-rw-r--r--src/main/swm/common/install_postproc.sh26
-rw-r--r--src/main/swm/common/install_preproc.sh26
-rw-r--r--src/main/swm/common/localize.sh170
-rw-r--r--src/main/swm/deinstall/postproc/post_proc4
-rw-r--r--src/main/swm/deinstall/preproc/pre_proc8
-rw-r--r--src/main/swm/descriptor.xml46
-rw-r--r--src/main/swm/fallback/postproc/post_proc4
-rw-r--r--src/main/swm/fallback/preproc/pre_proc4
-rw-r--r--src/main/swm/initinst/postproc/post_proc6
-rw-r--r--src/main/swm/initinst/preproc/pre_proc6
-rw-r--r--src/main/swm/install/postproc/post_proc24
-rw-r--r--src/main/swm/install/preproc/pre_proc17
-rw-r--r--src/main/webapp/WEB-INF/spring-context.xml122
-rw-r--r--src/main/webapp/WEB-INF/web.xml45
-rw-r--r--src/test/java/com/att/sa/cambria/testClient/SimpleExample.java335
-rw-r--r--src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dmaap/ApiKeyBean.java72
-rw-r--r--src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dmaap/DMaapPubSubTest.java138
-rw-r--r--src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dmaap/DMaapTopicTest.java267
-rw-r--r--src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dmaap/DmaapAdminTest.java60
-rw-r--r--src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dmaap/DmaapApiKeyTest.java162
-rw-r--r--src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dmaap/DmaapMetricsTest.java77
-rw-r--r--src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dmaap/JUnitTestSuite.java44
-rw-r--r--src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dmaap/LoadPropertyFile.java48
-rw-r--r--src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dmaap/TestRunner.java42
-rw-r--r--src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dmaap/TopicBean.java72
-rw-r--r--src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/ApiKeyBean.java72
-rw-r--r--src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/DME2AdminTest.java148
-rw-r--r--src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/DME2ApiKeyTest.java229
-rw-r--r--src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/DME2ConsumerFilterTest.java97
-rw-r--r--src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/DME2ConsumerTest.java95
-rw-r--r--src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/DME2MetricsTest.java133
-rw-r--r--src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/DME2ProducerTest.java101
-rw-r--r--src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/DME2TopicTest.java546
-rw-r--r--src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/JUnitTestSuite.java44
-rw-r--r--src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/LoadPropertyFile.java69
-rw-r--r--src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/TestRunner.java42
-rw-r--r--src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/TopicBeanDME2.java94
-rw-r--r--src/test/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/CambriaApiTestCase.java52
-rw-r--r--src/test/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/CambriaRateLimiterTest.java78
-rw-r--r--src/test/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/UtilsTest.java58
166 files changed, 20997 insertions, 0 deletions
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..b83d222
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1 @@
+/target/
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..2ce945c
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,22 @@
+/*******************************************************************************
+ * ============LICENSE_START==================================================
+ * * org.onap.dmaap
+ * * ===========================================================================
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * * ===========================================================================
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * *
+ * * http://www.apache.org/licenses/LICENSE-2.0
+ * *
+ * * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ * * ============LICENSE_END====================================================
+ * *
+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ * *
+ ******************************************************************************/ \ No newline at end of file
diff --git a/etc/cambriaApi.properties b/etc/cambriaApi.properties
new file mode 100644
index 0000000..b3a2b68
--- /dev/null
+++ b/etc/cambriaApi.properties
@@ -0,0 +1,138 @@
+###############################################################################
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+###############################################################################
+###############################################################################
+##
+## Cambria API Server config
+##
+## - Default values are shown as commented settings.
+##
+
+###############################################################################
+##
+## HTTP service
+##
+## - 3904 is standard as of 7/29/14.
+## - At this time, Cambria always binds to 0.0.0.0
+##
+#cambria.service.port=3904
+#tomcat.maxthreads=(tomcat default, which is usually 200)
+
+###############################################################################
+##
+## Broker Type
+##
+## The Cambria server can run either as a memory-only implementation, meant
+## for testing, or against Kafka. For a memory-only server, use "memory" for
+## the broker.type setting.
+##
+#broker.type=kafka
+
+###############################################################################
+##
+## Zookeeper Connection
+##
+## Both Cambria and Kafka make use of Zookeeper.
+##
+#config.zk.servers=localhost
+#config.zk.root=/fe3c/cambria/config
+
+
+###############################################################################
+##
+## Kafka Connection
+##
+## Items below are passed through to Kafka's producer and consumer
+## configurations (after removing "kafka.")
+##
+#kafka.metadata.broker.list=localhost:9092
+#kafka.client.zookeeper=${config.zk.servers}
+
+###############################################################################
+##
+## Secured Config
+##
+## Some data stored in the config system is sensitive -- API keys and secrets,
+## for example. to protect it, we use an encryption layer for this section
+## of the config.
+##
+## The key is a base64 encode AES key. This must be created/configured for
+## each installation.
+#cambria.secureConfig.key=
+##
+## The initialization vector is a 16 byte value specific to the secured store.
+## This must be created/configured for each installation.
+#cambria.secureConfig.iv=
+
+## Southfield Sandbox
+#cambria.secureConfig.key=b/7ouTn9FfEw2PQwL0ov/Q==
+#cambria.secureConfig.iv=wR9xP5k5vbz/xD0LmtqQLw==
+
+cambria.secureConfig.key[pc569h]=YT3XPyxEmKCTLI2NK+Sjbw==
+cambria.secureConfig.iv[pc569h]=rMm2jhR3yVnU+u2V9Ugu3Q==
+
+
+###############################################################################
+##
+## Consumer Caching
+##
+## Kafka expects live connections from the consumer to the broker, which
+## obviously doesn't work over connectionless HTTP requests. The Cambria
+## server proxies HTTP requests into Kafka consumer sessions that are kept
+## around for later re-use. Not doing so is costly for setup per request,
+## which would substantially impact a high volume consumer's performance.
+##
+## This complicates Cambria server failover, because we often need server
+## A to close its connection before server B brings up the replacement.
+##
+
+## The consumer cache is normally enabled.
+#cambria.consumer.cache.enabled=true
+
+## Cached consumers are cleaned up after a period of disuse. The server inspects
+## consumers every sweepFreqSeconds and will clean up any connections that are
+## dormant for touchFreqMs.
+#cambria.consumer.cache.sweepFreqSeconds=15
+#cambria.consumer.cache.touchFreqMs=120000
+
+## The cache is managed through ZK. The default value for the ZK connection
+## string is the same as config.zk.servers.
+#cambria.consumer.cache.zkConnect=${config.zk.servers}
+
+##
+## Shared cache information is associated with this node's name. The default
+## name is the hostname plus the HTTP service port this host runs on. (The
+## hostname is determined via InetAddress.getLocalHost ().getCanonicalHostName(),
+## which is not always adequate.) You can set this value explicitly here.
+##
+#cambria.api.node.identifier=<use-something-unique-to-this-instance>
+
+###############################################################################
+##
+## Metrics Reporting
+##
+## This server can report its metrics periodically on a topic.
+##
+#metrics.send.cambria.enabled=true
+#metrics.send.cambria.baseUrl=localhost
+#metrics.send.cambria.topic=cambria.apinode.metrics
+#metrics.send.cambria.sendEverySeconds=60
+
diff --git a/etc/cambriaApi_template.properties b/etc/cambriaApi_template.properties
new file mode 100644
index 0000000..59ed815
--- /dev/null
+++ b/etc/cambriaApi_template.properties
@@ -0,0 +1,130 @@
+###############################################################################
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+###############################################################################
+###############################################################################
+##
+## Cambria API Server config
+##
+## - Default values are shown as commented settings.
+##
+
+
+###############################################################################
+##
+## HTTP service
+##
+## - 3904 is standard as of 7/29/14.
+## - At this time, Cambria always binds to 0.0.0.0
+##
+cambria.service.port=${CAMBRIA_SERVICE_PORT}
+
+###############################################################################
+##
+## Broker Type
+##
+## The Cambria server can run either as a memory-only implementation, meant
+## for testing, or against Kafka. For a memory-only server, use "memory" for
+## the broker.type setting.
+##
+broker.type=${CAMBRIA_BROKER_TYPE}
+
+###############################################################################
+##
+## Zookeeper Connection
+##
+## Both Cambria and Kafka make use of Zookeeper.
+#
+config.zk.servers=${CAMBRIA_ZOOKEEPER_NODES}
+config.zk.root=/fe3c/cambria/config
+
+
+###############################################################################
+##
+## Kafka Connection
+##
+## Items below are passed through to Kafka's producer and consumer
+## configurations (after removing "kafka.")
+##
+kafka.metadata.broker.list=${KAFKA_BROKER_LIST}
+kafka.client.zookeeper=${CAMBRIA_ZOOKEEPER_NODES}
+
+###############################################################################
+##
+## Secured Config
+##
+## Some data stored in the config system is sensitive -- API keys and secrets,
+## for example. to protect it, we use an encryption layer for this section
+## of the config.
+##
+## The key is a base64 encode AES key. This must be created/configured for
+## each installation.
+#cambria.secureConfig.key=
+##
+## The initialization vector is a 16 byte value specific to the secured store.
+## This must be created/configured for each installation.
+#cambria.secureConfig.iv=
+
+###############################################################################
+##
+## Consumer Caching
+##
+## Kafka expects live connections from the consumer to the broker, which
+## obviously doesn't work over connectionless HTTP requests. The Cambria
+## server proxies HTTP requests into Kafka consumer sessions that are kept
+## around for later re-use. Not doing so is costly for setup per request,
+## which would substantially impact a high volume consumer's performance.
+##
+## This complicates Cambria server failover, because we often need server
+## A to close its connection before server B brings up the replacement.
+##
+
+## The consumer cache is normally enabled.
+cambria.consumer.cache.enabled=true
+
+## Cached consumers are cleaned up after a period of disuse. The server inspects
+## consumers every sweepFreqSeconds and will clean up any connections that are
+## dormant for touchFreqMs.
+cambria.consumer.cache.sweepFreqSeconds=15
+cambria.consumer.cache.touchFreqMs=120000
+
+## The cache is managed through ZK. The default value for the ZK connection
+## string is the same as config.zk.servers.
+cambria.consumer.cache.zkConnect=${CAMBRIA_ZOOKEEPER_NODES}
+
+##
+## Shared cache information is associated with this node's name. The default
+## name is the hostname plus the HTTP service port this host runs on. (The
+## hostname is determined via InetAddress.getLocalHost ().getCanonicalHostName(),
+## which is not always adequate.) You can set this value explicitly here.
+##
+#cambria.api.node.identifier=<use-something-unique-to-this-instance>
+
+###############################################################################
+##
+## Metrics Reporting
+##
+## This server can report its metrics periodically on a topic.
+##
+metrics.send.cambria.enabled=true
+metrics.send.cambria.baseUrl=localhost:${CAMBRIA_SERVICE_PORT}
+metrics.send.cambria.topic=cambria.apinode.metrics
+metrics.send.cambria.sendEverySeconds=60
+
diff --git a/etc/log4j.xml b/etc/log4j.xml
new file mode 100644
index 0000000..5a15348
--- /dev/null
+++ b/etc/log4j.xml
@@ -0,0 +1,65 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ============LICENSE_START=======================================================
+ org.onap.dmaap
+ ================================================================================
+ Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ ============LICENSE_END=========================================================
+
+ ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+ -->
+
+<!DOCTYPE log4j:configuration PUBLIC
+ "-//APACHE//DTD LOG4J 1.2//EN" "http://logging.apache.org/log4j/1.2/apidocs/org/apache/log4j/xml/doc-files/log4j.dtd">
+
+<log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/" debug="false">
+
+ <!-- available for console output, not generally used in deployment -->
+ <appender name="CONSOLE" class="org.apache.log4j.ConsoleAppender">
+ <param name="threshold" value="INFO" />
+ <layout class="org.apache.log4j.EnhancedPatternLayout">
+ <param name="ConversionPattern" value="[%d{ISO8601}{GMT+0} GMT][%-10t][%-5p]%m%n" />
+ </layout>
+ </appender>
+
+ <appender name="FILE" class="org.apache.log4j.RollingFileAppender">
+ <param name="threshold" value="INFO" />
+ <param name="File" value="./logs/cambria.log" /> <!-- use local dir by default; prod setup can overwrite -->
+ <param name="MaxFileSize" value="128MB"/>
+ <param name="MaxBackupIndex" value="10"/>
+ <layout class="org.apache.log4j.EnhancedPatternLayout">
+ <param name="ConversionPattern" value="[%d{ISO8601}{GMT+0} GMT][%-10t][%-5p]%m%n" />
+ </layout>
+ </appender>
+
+ <appender name="ECOMP_ERROR" class="org.apache.log4j.RollingFileAppender">
+ <param name="threshold" value="INFO" />
+ <param name="File" value="./logs/error.log" /> <!-- use local dir by default; prod setup can overwrite -->
+ <param name="MaxFileSize" value="128MB"/>
+ <param name="MaxBackupIndex" value="10"/>
+ <layout class="org.apache.log4j.EnhancedPatternLayout">
+ <param name="ConversionPattern" value="%d{yyyy-MM-dd'T'HH:mm:ss}{GMT+0}+00:00|%X{requestId}|%X{serviceInstanceId}|%-10t|%X{serverName}|%X{serviceName}|%X{instanceUuid}|%p|%X{severity}|%X{serverIpAddress}|%X{server}|%X{ipAddress}|%X{className}|%X{timer}|%m%n" />
+ </layout>
+ </appender>
+ <!-- the other 3 ECOMP logs are omitted for this release -->
+
+ <root>
+ <level value="INFO" />
+ <appender-ref ref="FILE" />
+ <appender-ref ref="ECOMP_ERROR" />
+ <appender-ref ref="CONSOLE" />
+ </root>
+
+</log4j:configuration>
diff --git a/etc/log4j_template.xml b/etc/log4j_template.xml
new file mode 100644
index 0000000..808a1bc
--- /dev/null
+++ b/etc/log4j_template.xml
@@ -0,0 +1,63 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ============LICENSE_START=======================================================
+ org.onap.dmaap
+ ================================================================================
+ Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ ============LICENSE_END=========================================================
+
+ ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+ -->
+
+<!DOCTYPE log4j:configuration PUBLIC
+ "-//APACHE//DTD LOG4J 1.2//EN" "http://logging.apache.org/log4j/1.2/apidocs/org/apache/log4j/xml/doc-files/log4j.dtd">
+
+<log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/" debug="false">
+
+ <appender name="CONSOLE" class="org.apache.log4j.ConsoleAppender">
+ <param name="threshold" value="INFO" />
+ <layout class="org.apache.log4j.EnhancedPatternLayout">
+ <param name="ConversionPattern" value="[%d{ISO8601}{GMT+0} GMT][%-10t][%-5p][%X{serverIp}]%m%n" />
+ </layout>
+ </appender>
+
+ <appender name="FILE" class="org.apache.log4j.RollingFileAppender">
+ <param name="threshold" value="${CAMBRIA_LOG_THRESHOLD}" />
+ <param name="File" value="${CAMBRIA_LOG_DIR}/cambria.log" />
+ <param name="MaxFileSize" value="128MB"/>
+ <param name="MaxBackupIndex" value="10"/>
+ <layout class="org.apache.log4j.EnhancedPatternLayout">
+ <param name="ConversionPattern" value="%d{yyyy-MM-dd'T'HH:mm:ss}{GMT+0}+00:00|%X{requestId}|%X{serviceInstanceId}|%-10t|%X{serverName}|%X{serviceName}|%X{instanceUuid}|%p|%X{severity}|%X{serverIpAddress}|%X{server}|%X{ipAddress}|%X{className}|%X{timer}|%m%n" />
+ </layout>
+ </appender>
+
+ <!-- The ECOMP error.log log -->
+ <appender name="ECOMP_ERROR" class="org.apache.log4j.RollingFileAppender">
+ <param name="threshold" value="INFO" />
+ <param name="File" value="${CAMBRIA_LOG_DIR}/error.log" />
+ <param name="MaxFileSize" value="128MB"/>
+ <param name="MaxBackupIndex" value="10"/>
+ <layout class="org.apache.log4j.EnhancedPatternLayout">
+ <param name="ConversionPattern" value="%d{yyyy-MM-dd'T'HH:mm:ss}{GMT+0}+00:00|%X{requestId}|%X{serviceInstanceId}|%-10t|%X{serverName}|%X{serviceName}|%X{instanceUuid}|%p|%X{severity}|%X{serverIpAddress}|%X{server}|%X{ipAddress}|%X{className}|%X{timer}|%m%n" />
+ </layout>
+ </appender>
+ <!-- the other 3 ECOMP logs are omitted for this release -->
+
+ <root>
+ <level value="${CAMBRIA_LOG_THRESHOLD}" />
+ <appender-ref ref="FILE" />
+ </root>
+
+</log4j:configuration>
diff --git a/etc/logstash_cambria_template.conf b/etc/logstash_cambria_template.conf
new file mode 100644
index 0000000..2ddb7d5
--- /dev/null
+++ b/etc/logstash_cambria_template.conf
@@ -0,0 +1,36 @@
+input {
+ file {
+ path => "${CAMBRIA_SERVER_LOG}"
+ codec => multiline {
+ pattern => "^\[%{YEAR}-%{MONTH}-%{MONTHDAY}%{SPACE}%{HOUR}:%{MINUTE}:%{SECOND}\]"
+ negate => "true"
+ what => "previous"
+ }
+ sincedb_path => "/opt/app/logstash/conf_stage/.sincedb_cambria"
+ start_position => "beginning"
+ type => "cambria"
+ }
+}
+
+filter {
+ if [type] == "cambria" {
+ grok {
+ match => ["message", "\[(?<date>%{YEAR}-%{MONTH}-%{MONTHDAY}%{SPACE}%{HOUR}:%{MINUTE}:%{SECOND})\]\[%{DATA:logLevel}\]\[%{DATA:thread}\]\[%{DATA:class}\]\[%{DATA:id}\]%{GREEDYDATA:message}"]
+ }
+
+ date {
+ match => ["date", "YYYY-MMM-DD HH:mm:ss,SSS"]
+ }
+ }
+}
+
+output {
+ if [type] == "cambria" {
+ elasticsearch {
+ cluster => "2020SA"
+ host => "${ELASTICSEARCH_NODES}"
+ index => "cambria-%{+YYYY.MM.dd}"
+ }
+ }
+}
+
diff --git a/notes/capacityMath.xlsx b/notes/capacityMath.xlsx
new file mode 100644
index 0000000..51bc01b
--- /dev/null
+++ b/notes/capacityMath.xlsx
Binary files differ
diff --git a/notes/keys.txt b/notes/keys.txt
new file mode 100644
index 0000000..bfa13ac
--- /dev/null
+++ b/notes/keys.txt
@@ -0,0 +1,3 @@
+Southfield Sandbox:
+ Key: b/7ouTn9FfEw2PQwL0ov/Q==
+ IV: wR9xP5k5vbz/xD0LmtqQLw==
diff --git a/notes/systems.xlsx b/notes/systems.xlsx
new file mode 100644
index 0000000..2e424ec
--- /dev/null
+++ b/notes/systems.xlsx
Binary files differ
diff --git a/notes/users.csv b/notes/users.csv
new file mode 100644
index 0000000..2c75187
--- /dev/null
+++ b/notes/users.csv
@@ -0,0 +1,20 @@
+Group,Project,Dev Contact,Dev UID,Project Contact,Proj UID,Deployment Plan
+GFP-IP,vEPC,Dave Loreti,dl2652,Robert Chin,rc0421,2014.12
+GFP-Mobility,vEPC,Michael Yin,my2328,Fred Delaplace,fd7200,2014.12
+SMLS,,Ken Lee,kl5760,Shrikant Acharya,sa8763,prototyping
+GFP Cacher (Mob only?),276550c,Michael Yin,my2328,,,2015.04
+GFP-CPE for Gamma,Trinity,,,Cho Wong,cw1728,investigating
+GFP-CPE,Gamma - P272078d,Janet Kalajian,,,2015.04,
+GFP-Uverse,Uverse UCA migration,Kailas Deshmukh,kd046m,,,2015.02
+GFP-Uverse,TL1 Adapters,Barry Tai,bt721t,Kailas Deshmukh,kd046m,2015.02
+GFP-Uverse,UEB to NOM adapter,,,Kailas Deshmukh,kd046m,2015.02
+PMOSS,vEPC,Ken Martau,km1785,,,2014.12
+Kinsey D2.0 PoC,D2.0 PoC Collector,Jon Lynn,jl220w,Jesse Chan,jc898t,prototyping
+Kinsey D2.0 PoC,D2.0 Poc PM,Ken Martau,km1785,Jesse Chan,jc898t,prototyping
+Kinsey D2.0 PoC,D2.0 Poc FM,Peter Cardona,pc569h,Jesse Chan,jc898t,prototyping
+Kinsey D2.0 PoC,D2.0 Policy Mgr,Peter Cardona,pc569h,Jesse Chan,jc898t,prototyping
+Fotache Group?,Trinity PoC,Steve Solomon,ss5395,Alina Fotache,af3193,prototyping
+NetCool Group?,NetCool PoC,,,Prashant Rajpal,pr216d,prototyping
+NetCool-ITO Group?,NetCool-ITO PoC,,,Chenlock Lim,cl111y,prototyping
+A&AI ECOMP,D2.0 ECOMP,,,Jeff Polhemus,jp6726, prototyping
+NVP,D2.0 Policy,Bobby Mander,bm116p,,,prototyping
diff --git a/pom.xml b/pom.xml
new file mode 100644
index 0000000..a26425e
--- /dev/null
+++ b/pom.xml
@@ -0,0 +1,418 @@
+<!--
+ ============LICENSE_START=======================================================
+ org.onap.dmaap
+ ================================================================================
+ Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ ============LICENSE_END=========================================================
+
+ ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+ -->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.onap.dmaap.messagerouter.msgrtr</groupId>
+ <artifactId>msgrtr</artifactId>
+ <version>0.0.5</version>
+ <packaging>jar</packaging>
+ <name>Message Router</name>
+ <description>Message Router - Restful interface built for kafka</description>
+ <url>https://github.com/att/dmaap-framework</url>
+ <properties>
+ <spring.version>3.2.14.RELEASE</spring.version>
+ <cxf.version>3.0.4</cxf.version>
+ <jstl.version>1.2</jstl.version>
+ <sonar.junit.reportsPath>target/surefire-reports</sonar.junit.reportsPath>
+ <sonar.cobertura.reportPath>target/cobertura/cobertura.ser</sonar.cobertura.reportPath>
+ <maven.compiler.target>1.7</maven.compiler.target>
+ <maven.compiler.source>1.7</maven.compiler.source>
+ <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+ <nexusproxy>https://nexus.onap.org</nexusproxy>
+ <snapshotNexusPath>/content/repositories/snapshots/</snapshotNexusPath>
+ <releaseNexusPath>/content/repositories/releases/</releaseNexusPath>
+ <stagingNexusPath>/content/repositories/staging/</stagingNexusPath>
+ <sitePath>/content/sites/site/org/onap/datarouter/${project.artifactId}/${project.version}</sitePath>
+ </properties>
+
+ <!-- Distribution management -->
+ <!-- Currently all artifacts will be uploaded to att-public-group reposiotry
+ on Maven Central -->
+
+ <!-- End Distribution management -->
+
+ <licenses>
+ <license>
+ <name>Apache License 2.0</name>
+ </license>
+ </licenses>
+
+ <developers>
+ <developer>
+ <name>Rajashree</name>
+ <email></email>
+ <organization>ATT</organization>
+ <organizationUrl>www.att.com</organizationUrl>
+ </developer>
+ <developer>
+ <name>Ramkumar</name>
+ <email></email>
+ <organization>ATT</organization>
+ <organizationUrl>www.att.com</organizationUrl>
+ </developer>
+ </developers>
+ <distributionManagement>
+ <repository>
+ <id>ecomp-releases</id>
+ <name>AAF Release Repository</name>
+ <url>${nexusproxy}${releaseNexusPath}</url>
+ </repository>
+ <snapshotRepository>
+ <id>ecomp-snapshots</id>
+ <name>AAF Snapshot Repository</name>
+ <url>${nexusproxy}${snapshotNexusPath}</url>
+ </snapshotRepository>
+ <site>
+ <id>ecomp-site</id>
+ <url>dav:${nexusproxy}${sitePath}</url>
+ </site>
+ </distributionManagement>
+
+ <pluginRepositories>
+ <pluginRepository>
+ <id>onap-plugin-snapshots</id>
+ <url>https://nexus.onap.org/content/repositories/snapshots/</url>
+ </pluginRepository>
+ </pluginRepositories>
+
+ <dependencies>
+
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>4.11</version>
+ <scope>test</scope>
+ </dependency>
+
+ <!-- slf4j logger -->
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-api</artifactId>
+ <version>1.7.6</version>
+ </dependency>
+
+ <dependency>
+ <groupId>javax.inject</groupId>
+ <artifactId>javax.inject</artifactId>
+ <version>1</version>
+ </dependency>
+
+ <dependency>
+ <groupId>com.att.ajsc</groupId>
+ <artifactId>ajsc-core</artifactId>
+ <version>1.0.0</version>
+ </dependency>
+
+ <dependency>
+ <groupId>javax.ws.rs</groupId>
+ <artifactId>javax.ws.rs-api</artifactId>
+ <version>2.0.1</version>
+ </dependency>
+
+ <!-- <dependency> <groupId>org.apache.cxf</groupId> <artifactId>cxf-rt-rs-client</artifactId>
+ <version>${cxf.version}</version> </dependency> <dependency> <groupId>org.apache.cxf</groupId>
+ <artifactId>cxf-rt-frontend-jaxws</artifactId> <version>${cxf.version}</version>
+ </dependency> <dependency> <groupId>org.apache.cxf</groupId> <artifactId>cxf-rt-transports-http</artifactId>
+ <version>${cxf.version}</version> </dependency> Jetty is needed if you're
+ are not using the CXFServlet <dependency> <groupId>org.apache.cxf</groupId>
+ <artifactId>cxf-rt-transports-http-jetty</artifactId> <version>${cxf.version}</version>
+ </dependency> -->
+ <!-- Begin - Spring Dependencies for DI -->
+ <dependency>
+ <groupId>org.springframework</groupId>
+ <artifactId>spring-core</artifactId>
+ <version>${spring.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework</groupId>
+ <artifactId>spring-context</artifactId>
+ <version>${spring.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework</groupId>
+ <artifactId>spring-webmvc</artifactId>
+ <version>${spring.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework</groupId>
+ <artifactId>spring-web</artifactId>
+ <version>${spring.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>javax.servlet</groupId>
+ <artifactId>javax.servlet-api</artifactId>
+ <version>3.0.1</version>
+ <scope>provided</scope>
+ </dependency>
+ <!-- End - Spring Dependencies for DI -->
+ <!-- Begin - Dependency on Dmaap Spring layer -->
+ <!-- <dependency> <groupId>com.att.dmaap</groupId> <artifactId>dmaap-spring</artifactId>
+ <version>0.0.1-SNAPSHOT</version> </dependency> -->
+ <!-- End - Dependency on Dmaap Spring layer -->
+ <!-- Begin - Dependency on log4j for logging purpose -->
+ <!-- <dependency> <groupId>log4j</groupId> <artifactId>log4j</artifactId>
+ <version>1.2.17</version> </dependency> -->
+ <!-- Log4j's enhanced pattern layout is shipped separately <dependency>
+ <groupId>log4j</groupId> <artifactId>apache-log4j-extras</artifactId> <version>1.2.17</version>
+ </dependency> -->
+ <!-- End - Dependency on log4j for logging purpose -->
+ <!-- ZooKeeper Library -->
+ <dependency>
+ <groupId>org.apache.zookeeper</groupId>
+ <artifactId>zookeeper</artifactId>
+ <version>3.4.6</version>
+ </dependency>
+
+ <!-- JSON libraries -->
+ <dependency>
+ <groupId>org.json</groupId>
+ <artifactId>json</artifactId>
+ <version>20131018</version>
+ </dependency>
+
+ <!-- Apache Kafka -->
+ <dependency>
+ <groupId>org.apache.kafka</groupId>
+ <artifactId>kafka_2.10</artifactId>
+ <version>0.8.2.1</version>
+ </dependency>
+
+ <dependency>
+ <groupId>com.att.eelf</groupId>
+ <artifactId>eelf-core</artifactId>
+ <version>0.0.1</version>
+ <scope>compile</scope>
+ </dependency>
+ <!-- our NSA server library -->
+ <dependency>
+ <groupId>com.att.nsa</groupId>
+ <artifactId>nsaServerLibrary</artifactId>
+ <version>1.0.10</version>
+ </dependency>
+ <dependency>
+ <groupId>com.att.nsa</groupId>
+ <artifactId>saToolkit</artifactId>
+ <version>0.0.1</version>
+ </dependency>
+
+ <!-- our Highland Park library -->
+ <!-- <dependency>
+ <groupId>com.att.nsa</groupId>
+ <artifactId>highlandParkCore</artifactId>
+ <version>0.4.9</version>
+ </dependency> -->
+
+ <!-- our base client library, for its command line tools -->
+ <dependency>
+ <groupId>com.att.nsa</groupId>
+ <artifactId>saClientLibrary</artifactId>
+ <version>0.0.1</version>
+ <exclusions>
+ <exclusion>
+ <groupId>org.apache.httpcomponents</groupId>
+ <artifactId>httpclient</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.httpcomponents</groupId>
+ <artifactId>httpclient-cache</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.httpcomponents</groupId>
+ <artifactId>httpclient</artifactId>
+ <version>4.4.1</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.httpcomponents</groupId>
+ <artifactId>httpclient-cache</artifactId>
+ <version>4.4.1</version>
+ </dependency>
+
+ <!-- explicit jline add b/c it conflicts with the zk client -->
+ <dependency>
+ <groupId>jline</groupId>
+ <artifactId>jline</artifactId>
+ <version>2.12.1</version>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.curator</groupId>
+ <artifactId>curator-recipes</artifactId>
+ <version>2.6.0</version>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.curator</groupId>
+ <artifactId>curator-test</artifactId>
+ <version>2.6.0</version>
+ </dependency>
+
+ <dependency>
+ <groupId>com.att.aft</groupId>
+ <artifactId>dme2</artifactId>
+ <version>3.1.200</version>
+ </dependency>
+
+ <dependency>
+ <groupId>com.google.code.gson</groupId>
+ <artifactId>gson</artifactId>
+ <version>2.8.0</version>
+ </dependency>
+ </dependencies>
+ <build>
+ <finalName>DMaaP</finalName>
+ <resources>
+ <resource>
+ <directory>src/main/resources</directory>
+ <filtering>true</filtering>
+ <includes>
+ <include>**/*.properties</include>
+ </includes>
+ </resource>
+ <!-- <resource> <directory>src/main/config</directory> <filtering>true</filtering>
+ <includes> <include>**/log4j*.xml</include> </includes> </resource> <resource>
+ <directory>src/main/resources</directory> <filtering>false</filtering> <excludes>
+ <exclude>**/cambriaApiVersion.properties</exclude> </excludes> </resource> -->
+ </resources>
+ <plugins>
+ <!-- <plugin> <artifactId>maven-assembly-plugin</artifactId> <version>2.4.1</version>
+ <configuration> <descriptors> <descriptor>src/assembly/dep.xml</descriptor>
+ </descriptors> </configuration> <executions> <execution> <id>make-assembly</id>
+ this is used for inheritance merges -->
+ <!-- <phase>package</phase> bind to the packaging phase <goals> <goal>single</goal>
+ </goals> </execution> </executions> </plugin> -->
+ <!-- -->
+ <plugin>
+ <groupId>org.sonatype.plugins</groupId>
+ <artifactId>nexus-staging-maven-plugin</artifactId>
+ <version>1.6.7</version>
+ <extensions>true</extensions>
+ <configuration>
+ <nexusUrl>${nexusproxy}</nexusUrl>
+ <stagingProfileId>176c31dfe190a</stagingProfileId>
+ <serverId>ecomp-staging</serverId>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-javadoc-plugin</artifactId>
+ <version>2.10.4</version>
+ <configuration>
+ <additionalparam>-Xdoclint:none</additionalparam>
+ </configuration>
+ <executions>
+ <execution>
+ <id>attach-javadocs</id>
+ <goals>
+ <goal>jar</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-source-plugin</artifactId>
+ <version>3.0.0</version>
+ <executions>
+ <execution>
+ <id>attach-sources</id>
+ <goals>
+ <goal>jar-no-fork</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <version>2.12.4</version>
+ <configuration>
+ <excludes>
+ <!-- exclude until junits updated -->
+ <exclude>**/DME2*.java</exclude>
+ </excludes>
+ <!-- <skipTests>true</skipTests> -->
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>cobertura-maven-plugin</artifactId>
+ <version>2.7</version>
+ <configuration>
+ <formats>
+ <format>html</format>
+ <format>xml</format>
+ </formats>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.jacoco</groupId>
+ <artifactId>jacoco-maven-plugin</artifactId>
+ <version>0.6.2.201302030002</version>
+ <configuration>
+ <destfile>${basedir}/target/coverage-reports/jacoco-unit.exec</destfile>
+ <datafile>${basedir}/target/coverage-reports/jacoco-unit.exec</datafile>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-gpg-plugin</artifactId>
+ <version>1.5</version>
+ <executions>
+ <execution>
+ <id>sign-artifacts</id>
+ <phase>verify</phase>
+ <goals>
+ <goal>sign</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+
+ </plugins>
+ </build>
+
+ <!-- <profiles> <profile> <id>jenkins</id> <activation> <property> <name>env.BUILD_NUMBER</name>
+ </property> </activation> <build> <plugins> <plugin> <groupId>org.codehaus.mojo</groupId>
+ <artifactId>cobertura-maven-plugin</artifactId> </plugin> <plugin> <groupId>com.att.aft.swm</groupId>
+ <artifactId>swm-plugin</artifactId> <version>1.4.4.12</version> <configuration>
+ <distFilesRootDirPath>/opt/app/dmaap/msgrtr/jenkinsbuild</distFilesRootDirPath>
+ <version>${project.version}-${env.BUILD_NUMBER}</version> <scriptExcludes>
+ <scriptExclude>**/swmpkgclean.sh</scriptExclude> </scriptExcludes> </configuration>
+ <executions> <execution> <id>pkgstage</id> <goals> <goal>pkgstage</goal>
+ </goals> </execution> <execution> <id>pkgcreate</id> <goals> <goal>pkgcreate</goal>
+ </goals> </execution> <execution> <id>pkginstall</id> <goals> <goal>install</goal>
+ </goals> <configuration> <componentName>com.att.nsa:msgrtr</componentName>
+ <version>${project.version}-${env.BUILD_NUMBER}</version> <waitTimeMins>4</waitTimeMins>
+ <properties> <property> <name>overrideDependencyConflicts</name> <value>true</value>
+ </property> <property> <name>AFTSWM_NOTIFY_ADDRESSES</name> <value>mailto:rs857c@att.com</value>
+ </property> </properties> </configuration> </execution> </executions> </plugin>
+ <plugin> <artifactId>maven-antrun-plugin</artifactId> <executions> <execution>
+ <phase>package</phase> <configuration> <tasks> <copy file="./src/main/scripts/swmpkgclean.sh"
+ toDir="./target" /> </tasks> </configuration> <goals> <goal>run</goal> </goals>
+ </execution> </executions> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-deploy-plugin</artifactId> <version>2.5</version> <configuration>
+ <skip>true</skip> </configuration> </plugin> </plugins> </build> </profile>
+ </profiles> -->
+</project> \ No newline at end of file
diff --git a/src/assembly/dep.xml b/src/assembly/dep.xml
new file mode 100644
index 0000000..ab1c8f8
--- /dev/null
+++ b/src/assembly/dep.xml
@@ -0,0 +1,50 @@
+<!--
+ ============LICENSE_START=======================================================
+ org.onap.dmaap
+ ================================================================================
+ Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ ============LICENSE_END=========================================================
+
+ ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+ -->
+<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2 http://maven.apache.org/xsd/assembly-1.1.2.xsd">
+ <id>bundle</id>
+ <formats>
+ <format>tar.gz</format>
+ </formats>
+ <files>
+ <file>
+ <source>target/DMaaP.war</source>
+ <outputDirectory>lib</outputDirectory>
+ </file>
+ </files>
+ <fileSets>
+ <fileSet>
+ <directory>src/main/scripts</directory>
+ <outputDirectory>bin</outputDirectory>
+ <includes>
+ <include>**/*.sh</include>
+ </includes>
+ <fileMode>0755</fileMode>
+ </fileSet>
+ <fileSet>
+ <directory>etc</directory>
+ <outputDirectory>etc</outputDirectory>
+ </fileSet>
+ </fileSets>
+
+</assembly>
diff --git a/src/main/config/fixme.txt b/src/main/config/fixme.txt
new file mode 100644
index 0000000..ad5a0fd
--- /dev/null
+++ b/src/main/config/fixme.txt
@@ -0,0 +1,4 @@
+
+FIXME: src/main/config gets picked up my the resources:resources target and put into target/generated-conf, but never
+makes it into the jar...?
+
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/apiServer/metrics/cambria/DMaaPMetricsSender.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/apiServer/metrics/cambria/DMaaPMetricsSender.java
new file mode 100644
index 0000000..b8e5212
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/apiServer/metrics/cambria/DMaaPMetricsSender.java
@@ -0,0 +1,198 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.apiServer.metrics.cambria;
+
+import java.io.IOException;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.ScheduledFuture;
+import java.util.concurrent.TimeUnit;
+
+import org.json.JSONException;
+import org.json.JSONObject;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants.CambriaConstants;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metrics.publisher.CambriaPublisher;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metrics.publisher.DMaaPCambriaClientFactory;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.att.nsa.apiServer.metrics.cambria.MetricsSender;
+import com.att.nsa.drumlin.till.nv.rrNvReadable;
+import com.att.nsa.metrics.CdmMetricsRegistry;
+import com.att.nsa.metrics.impl.CdmConstant;
+
+/**
+ * MetricsSender will send the given metrics registry content as an event on the
+ * Cambria event broker to the given topic.
+ *
+ * @author author
+ *
+ */
+public class DMaaPMetricsSender implements Runnable {
+ public static final String kSetting_CambriaEnabled = "metrics.send.cambria.enabled";
+ public static final String kSetting_CambriaBaseUrl = "metrics.send.cambria.baseUrl";
+ public static final String kSetting_CambriaTopic = "metrics.send.cambria.topic";
+ public static final String kSetting_CambriaSendFreqSecs = "metrics.send.cambria.sendEverySeconds";
+
+ /**
+ * Schedule a periodic send of the given metrics registry using the given
+ * settings container for the Cambria location, topic, and send frequency.
+ * <br/>
+ * <br/>
+ * If the enabled flag is false, this method returns null.
+ *
+ * @param scheduler
+ * @param metrics
+ * @param settings
+ * @param defaultTopic
+ * @return a handle to the scheduled task
+ */
+ public static ScheduledFuture<?> sendPeriodically(ScheduledExecutorService scheduler, CdmMetricsRegistry metrics,
+ String defaultTopic) {
+ log.info("Inside : DMaaPMetricsSender : sendPeriodically");
+ String cambriaSetting= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_CambriaEnabled);
+ boolean setEnable=true;
+ if (cambriaSetting!=null && cambriaSetting.equals("false") )
+ setEnable= false;
+ //System.out.println(setEnable+"XXXXXXXXXXXXXXXX");
+ if (setEnable) {
+ String Setting_CambriaBaseUrl=com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_CambriaEnabled);
+
+ Setting_CambriaBaseUrl=Setting_CambriaBaseUrl==null?"localhost":Setting_CambriaBaseUrl;
+
+ String Setting_CambriaTopic=com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_CambriaTopic);
+ if(Setting_CambriaTopic==null) Setting_CambriaTopic = "msgrtr.apinode.metrics.dmaap";
+
+ // Setting_CambriaBaseUrl=Setting_CambriaBaseUrl==null?defaultTopic:Setting_CambriaBaseUrl;
+
+ String Setting_CambriaSendFreqSecs=com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_CambriaSendFreqSecs);
+
+ int _CambriaSendFreqSecs =30;
+ if(Setting_CambriaSendFreqSecs!=null){
+ _CambriaSendFreqSecs = Integer.parseInt(Setting_CambriaSendFreqSecs);
+ }
+
+
+ return DMaaPMetricsSender.sendPeriodically(scheduler, metrics,
+ Setting_CambriaBaseUrl,Setting_CambriaTopic,_CambriaSendFreqSecs
+ );
+ /*return DMaaPMetricsSender.sendPeriodically(scheduler, metrics,
+ settings.getString(kSetting_CambriaBaseUrl, "localhost"),
+ settings.getString(kSetting_CambriaTopic, defaultTopic),
+ settings.getInt(kSetting_CambriaSendFreqSecs, 30));*/
+ } else {
+ return null;
+ }
+ }
+
+ /**
+ * Schedule a periodic send of the metrics registry to the given Cambria
+ * broker and topic.
+ *
+ * @param scheduler
+ * @param metrics
+ * the registry to send
+ * @param cambriaBaseUrl
+ * the base URL for Cambria
+ * @param topic
+ * the topic to publish on
+ * @param everySeconds
+ * how frequently to publish
+ * @return a handle to the scheduled task
+ */
+ public static ScheduledFuture<?> sendPeriodically(ScheduledExecutorService scheduler, CdmMetricsRegistry metrics,
+ String cambriaBaseUrl, String topic, int everySeconds) {
+ return scheduler.scheduleAtFixedRate(new DMaaPMetricsSender(metrics, cambriaBaseUrl, topic), everySeconds,
+ everySeconds, TimeUnit.SECONDS);
+ }
+
+ /**
+ * Create a metrics sender.
+ *
+ * @param metrics
+ * @param cambriaBaseUrl
+ * @param topic
+ */
+ public DMaaPMetricsSender(CdmMetricsRegistry metrics, String cambriaBaseUrl, String topic) {
+ try {
+ fMetrics = metrics;
+ fHostname = InetAddress.getLocalHost().getHostName();
+
+ // setup a "simple" publisher that will send metrics immediately
+ fCambria = DMaaPCambriaClientFactory.createSimplePublisher(cambriaBaseUrl, topic);
+ } catch (UnknownHostException e) {
+ log.warn("Unable to get localhost address in MetricsSender constructor.", e);
+ throw new RuntimeException(e);
+ }
+ }
+
+ /**
+ * Send on demand.
+ */
+ public void send() {
+ try {
+ final JSONObject o = fMetrics.toJson();
+ o.put("hostname", fHostname);
+ o.put("now", System.currentTimeMillis());
+ o.put("metricsSendTime", addTimeStamp());
+ o.put("transactionEnabled", false);
+ fCambria.send(fHostname, o.toString());
+ } catch (JSONException e) {
+ log.warn("Error posting metrics to Cambria: " + e.getMessage());
+ } catch (IOException e) {
+ log.warn("Error posting metrics to Cambria: " + e.getMessage());
+ }
+ }
+
+ /**
+ * Run() calls send(). It's meant for use in a background-scheduled task.
+ */
+ @Override
+ public void run() {
+ send();
+ }
+
+ private final CdmMetricsRegistry fMetrics;
+ private final CambriaPublisher fCambria;
+ private final String fHostname;
+
+ //private static final Logger log = LoggerFactory.getLogger(MetricsSender.class);
+
+ private static final EELFLogger log = EELFManager.getInstance().getLogger(MetricsSender.class);
+ /**
+ * method creates and returnd CdmConstant object using current timestamp
+ *
+ * @return
+ */
+ public CdmConstant addTimeStamp() {
+ // Add the timestamp with every metrics send
+ final long metricsSendTime = System.currentTimeMillis();
+ final Date d = new Date(metricsSendTime);
+ final String text = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssz").format(d);
+ return new CdmConstant(metricsSendTime / 1000, "Metrics Send Time (epoch); " + text);
+ }
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/CambriaApiException.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/CambriaApiException.java
new file mode 100644
index 0000000..e627f23
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/CambriaApiException.java
@@ -0,0 +1,80 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria;
+
+import org.json.JSONObject;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception.ErrorResponse;
+
+import com.att.nsa.apiServer.NsaAppException;
+
+public class CambriaApiException extends NsaAppException
+{
+
+ private ErrorResponse errRes;
+ /**
+ * Implements constructor CambriaApiException
+ * @param jsonObject
+ *
+ */
+ public CambriaApiException ( JSONObject jsonObject )
+ {
+ super ( jsonObject );
+ }
+
+ /**
+ * Implements constructor CambriaApiException
+ * @param status
+ * @param msg
+ */
+ public CambriaApiException ( int status, String msg )
+ {
+ super ( status, msg );
+ }
+
+ /**
+ * Implements constructor CambriaApiException
+ * @param status
+ * @param jsonObject
+ */
+ public CambriaApiException ( int status, JSONObject jsonObject )
+ {
+ super ( status, jsonObject );
+ }
+
+ public CambriaApiException (ErrorResponse errRes)
+ {
+ super(errRes.getHttpStatusCode(),errRes.getErrorMessage());
+ this.errRes = errRes;
+ }
+
+ /*
+ * defined long type constant serialVersionUID
+ */
+ private static final long serialVersionUID = 1L;
+ public ErrorResponse getErrRes() {
+ return errRes;
+ }
+
+ public void setErrRes(ErrorResponse errRes) {
+ this.errRes = errRes;
+ }
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/CambriaApiVersionInfo.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/CambriaApiVersionInfo.java
new file mode 100644
index 0000000..ec9e43f
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/CambriaApiVersionInfo.java
@@ -0,0 +1,88 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Properties;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+/**
+ * CambriaApiVersionInfo will provide the version of cambria code
+ *
+ * @author author
+ *
+ */
+public class CambriaApiVersionInfo {
+
+ /**
+ * 3 constants are defined:-
+ * PROPS,VERSION and LOG
+ */
+
+ private static final Properties PROPS = new Properties();
+ private static final String VERSION;
+
+
+ private static final EELFLogger LOG = EELFManager.getInstance().getLogger(CambriaApiVersionInfo.class);
+
+ /**
+ * private constructor created with no argument
+ * to avoid default constructor
+ */
+ private CambriaApiVersionInfo()
+ {
+
+ }
+
+ /**
+ * returns version of String type
+ */
+ public static String getVersion() {
+ return VERSION;
+ }
+
+ /**
+ *
+ * defines static initialization method
+ * It initializes VERSION Constant
+ * it handles exception in try catch block
+ * and throws IOException
+ *
+ */
+
+ static {
+ String use = null;
+ try {
+ final InputStream is = CambriaApiVersionInfo.class
+ .getResourceAsStream("/cambriaApiVersion.properties");
+ if (is != null) {
+ PROPS.load(is);
+ use = PROPS.getProperty("cambriaApiVersion", null);
+ }
+ } catch (IOException e) {
+ LOG.error("Failed due to IO EXception:"+e);
+ }
+ VERSION = use;
+ }
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/Consumer.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/Consumer.java
new file mode 100644
index 0000000..8e5aa76
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/Consumer.java
@@ -0,0 +1,96 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends;
+
+/**
+ * A consumer interface. Consumers pull the next message from a given topic.
+ * @author author
+ */
+public interface Consumer
+{
+ /**
+ * A message interface provide the offset and message
+ * @author author
+ *
+ */
+ public interface Message
+ {
+ /**
+ * returning the offset of that particular message
+ * @return long
+ */
+ long getOffset ();
+ /**
+ * returning the message
+ * @return message
+ */
+ String getMessage ();
+ }
+
+ /**
+ * Get this consumer's name
+ * @return name
+ */
+ String getName ();
+
+ /**
+ * Get creation time in ms
+ * @return
+ */
+ long getCreateTimeMs ();
+
+ /**
+ * Get last access time in ms
+ * @return
+ */
+ long getLastAccessMs ();
+
+ /**
+ * Get the next message from this source. This method must not block.
+ * @return the next message, or null if none are waiting
+ */
+ Message nextMessage ();
+
+ /**
+ * Get the next message from this source. This method must not block.
+ * @param atOffset start with the next message at or after atOffset. -1 means next from last request
+ * @return the next message, or null if none are waiting
+ */
+// Message nextMessage ( long atOffset );
+
+ /**
+ * Close/clean up this consumer
+ */
+ void close();
+
+ /**
+ * Commit the offset of the last consumed message
+ *
+ */
+ void commitOffsets();
+
+ /**
+ * Get the offset this consumer is currently at
+ * @return offset
+ */
+ long getOffset();
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/ConsumerFactory.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/ConsumerFactory.java
new file mode 100644
index 0000000..dddca63
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/ConsumerFactory.java
@@ -0,0 +1,110 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends;
+
+import java.util.Collection;
+
+/**
+ * This is the factory class to instantiate the consumer
+ *
+ * @author author
+ *
+ */
+
+public interface ConsumerFactory {
+ public static final String kSetting_EnableCache = "cambria.consumer.cache.enabled";
+ public static boolean kDefault_IsCacheEnabled = true;
+
+ /**
+ * User defined exception for Unavailable Exception
+ *
+ * @author author
+ *
+ */
+ public class UnavailableException extends Exception {
+ /**
+ * Unavailable Exception with message
+ *
+ * @param msg
+ */
+ public UnavailableException(String msg) {
+ super(msg);
+ }
+
+ /**
+ * Unavailable Exception with the throwable object
+ *
+ * @param t
+ */
+ public UnavailableException(Throwable t) {
+ super(t);
+ }
+
+ /**
+ * Unavailable Exception with the message and cause
+ *
+ * @param msg
+ * @param cause
+ */
+ public UnavailableException(String msg, Throwable cause) {
+ super(msg, cause);
+ }
+
+ private static final long serialVersionUID = 1L;
+ }
+
+ /**
+ * For admin use, drop all cached consumers.
+ */
+ public void dropCache();
+
+ /**
+ * Get or create a consumer for the given set of info (topic, group, id)
+ *
+ * @param topic
+ * @param consumerGroupId
+ * @param clientId
+ * @param timeoutMs
+ * @return
+ * @throws UnavailableException
+ */
+ public Consumer getConsumerFor(String topic, String consumerGroupId,
+ String clientId, int timeoutMs) throws UnavailableException;
+
+ /**
+ * For factories that employ a caching mechanism, this allows callers to
+ * explicitly destory a consumer that resides in the factory's cache.
+ *
+ * @param topic
+ * @param consumerGroupId
+ * @param clientId
+ */
+ public void destroyConsumer(String topic, String consumerGroupId,
+ String clientId);
+
+ /**
+ * For admin/debug, we provide access to the consumers
+ *
+ * @return a collection of consumers
+ */
+ public Collection<? extends Consumer> getConsumers();
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/MetricsSet.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/MetricsSet.java
new file mode 100644
index 0000000..f0900ff
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/MetricsSet.java
@@ -0,0 +1,71 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends;
+
+import com.att.nsa.metrics.CdmMetricsRegistry;
+/**
+ * This interface will help to generate metrics
+ * @author author
+ *
+ */
+public interface MetricsSet extends CdmMetricsRegistry{
+
+ /**
+ * This method will setup cambria sender code
+ */
+ public void setupCambriaSender ();
+ /**
+ * This method will define on route complete
+ * @param name
+ * @param durationMs
+ */
+ public void onRouteComplete ( String name, long durationMs );
+ /**
+ * This method will help the kafka publisher while publishing the messages
+ * @param amount
+ */
+ public void publishTick ( int amount );
+ /**
+ * This method will help the kafka consumer while consuming the messages
+ * @param amount
+ */
+ public void consumeTick ( int amount );
+ /**
+ * This method will call if the kafka consumer cache missed
+ */
+ public void onKafkaConsumerCacheMiss ();
+ /**
+ * This method will call if the kafka consumer cache will be hit while publishing/consuming the messages
+ */
+ public void onKafkaConsumerCacheHit ();
+ /**
+ * This method will call if the kafka consumer cache claimed
+ */
+ public void onKafkaConsumerClaimed ();
+ /**
+ * This method will call if Kafka consumer is timed out
+ */
+ public void onKafkaConsumerTimeout ();
+
+
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/Publisher.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/Publisher.java
new file mode 100644
index 0000000..2557980
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/Publisher.java
@@ -0,0 +1,98 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.LogDetails;
+
+import kafka.producer.KeyedMessage;
+
+/**
+ * A publisher interface. Publishers receive messages and post them to a topic.
+ * @author author
+ */
+public interface Publisher
+{
+ /**
+ * A message interface. The message has a key and a body.
+ * @author author
+ */
+ public interface message
+ {
+ /**
+ * Get the key for this message. The key is used to partition messages
+ * into "sub-streams" that have guaranteed order. The key can be null,
+ * which means the message can be processed without any concern for order.
+ *
+ * @return a key, possibly null
+ */
+ String getKey();
+
+ /**
+ * Get the message body.
+ * @return a message body
+ */
+ String getMessage();
+ /**
+ * set the logging params for transaction enabled logging
+ * @param logDetails
+ */
+ void setLogDetails (LogDetails logDetails);
+ /**
+ * Get the log details for transaction enabled logging
+ * @return LogDetails
+ */
+ LogDetails getLogDetails ();
+
+ /**
+ * boolean transactionEnabled
+ * @return true/false
+ */
+ boolean isTransactionEnabled();
+ /**
+ * Set the transaction enabled flag from prop file or topic based implementation
+ * @param transactionEnabled
+ */
+ void setTransactionEnabled(boolean transactionEnabled);
+ }
+
+ /**
+ * Send a single message to a topic. Equivalent to sendMessages with a list of size 1.
+ * @param topic
+ * @param msg
+ * @throws IOException
+ */
+ public void sendMessage ( String topic, message msg ) throws IOException;
+
+ /**
+ * Send messages to a topic.
+ * @param topic
+ * @param msgs
+ * @throws IOException
+ */
+ public void sendMessages ( String topic, List<? extends message> msgs ) throws IOException;
+
+ public void sendBatchMessage(String topic ,ArrayList<KeyedMessage<String,String>> kms) throws IOException;
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/kafka/KafkaConsumer.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/kafka/KafkaConsumer.java
new file mode 100644
index 0000000..1ea7c0d
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/kafka/KafkaConsumer.java
@@ -0,0 +1,245 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.kafka;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Consumer;
+
+import kafka.consumer.ConsumerIterator;
+import kafka.consumer.KafkaStream;
+import kafka.javaapi.consumer.ConsumerConnector;
+import kafka.message.MessageAndMetadata;
+
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+/**
+ * A consumer instance that's created per-request. These are stateless so that
+ * clients can connect to this service as a proxy.
+ *
+ * @author author
+ *
+ */
+public class KafkaConsumer implements Consumer {
+ private enum State {
+ OPENED, CLOSED
+ }
+
+ /**
+ * KafkaConsumer() is constructor. It has following 4 parameters:-
+ * @param topic
+ * @param group
+ * @param id
+ * @param cc
+ *
+ */
+
+ public KafkaConsumer(String topic, String group, String id, ConsumerConnector cc) {
+ fTopic = topic;
+ fGroup = group;
+ fId = id;
+ fConnector = cc;
+
+ fCreateTimeMs = System.currentTimeMillis();
+ fLastTouch = fCreateTimeMs;
+
+ fLogTag = fGroup + "(" + fId + ")/" + fTopic;
+ offset = 0;
+
+ state = KafkaConsumer.State.OPENED;
+
+ final Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
+ topicCountMap.put(fTopic, 1);
+ final Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = fConnector
+ .createMessageStreams(topicCountMap);
+ final List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(fTopic);
+ fStream = streams.iterator().next();
+ }
+
+
+ /** getName() method returns string type value.
+ * returns 3 parameters in string:-
+ * fTopic,fGroup,fId
+ * @Override
+ */
+ public String getName() {
+ return fTopic + " : " + fGroup + " : " + fId;
+ }
+
+ /** getCreateTimeMs() method returns long type value.
+ * returns fCreateTimeMs variable value
+ * @Override
+ *
+ */
+ public long getCreateTimeMs() {
+ return fCreateTimeMs;
+ }
+
+ /** getLastAccessMs() method returns long type value.
+ * returns fLastTouch variable value
+ * @Override
+ *
+ */
+ public long getLastAccessMs() {
+ return fLastTouch;
+ }
+
+
+ /**
+ * nextMessage() is synchronized method that means at a time only one object can access it.
+ * getName() method returns String which is of type Consumer.Message
+ * @Override
+ * */
+ public synchronized Consumer.Message nextMessage() {
+ if (getState() == KafkaConsumer.State.CLOSED) {
+ log.warn("nextMessage() called on closed KafkaConsumer " + getName());
+ return null;
+ }
+
+ try {
+ ConsumerIterator<byte[], byte[]> it = fStream.iterator();
+ if (it.hasNext()) {
+ final MessageAndMetadata<byte[], byte[]> msg = it.next();
+ offset = msg.offset();
+
+ return new Consumer.Message() {
+ @Override
+ public long getOffset() {
+ return msg.offset();
+ }
+
+ @Override
+ public String getMessage() {
+ return new String(msg.message());
+ }
+ };
+ }
+ } catch (kafka.consumer.ConsumerTimeoutException x) {
+ log.debug(fLogTag + ": ConsumerTimeoutException in Kafka consumer; returning null. ");
+ } catch (java.lang.IllegalStateException x) {
+ log.error(fLogTag + ": Illegal state exception in Kafka consumer; dropping stream. " + x.getMessage());
+ }
+
+ return null;
+ }
+
+ /** getOffset() method returns long type value.
+ * returns offset variable value
+ * @Override
+ *
+ */
+ public long getOffset() {
+ return offset;
+ }
+
+ /** commit offsets
+ * commitOffsets() method will be called on closed of KafkaConsumer.
+ * @Override
+ *
+ */
+ public void commitOffsets() {
+ if (getState() == KafkaConsumer.State.CLOSED) {
+ log.warn("commitOffsets() called on closed KafkaConsumer " + getName());
+ return;
+ }
+ fConnector.commitOffsets();
+ }
+
+ /**
+ * updating fLastTouch with current time in ms
+ */
+ public void touch() {
+ fLastTouch = System.currentTimeMillis();
+ }
+
+ /** getLastTouch() method returns long type value.
+ * returns fLastTouch variable value
+ *
+ */
+ public long getLastTouch() {
+ return fLastTouch;
+ }
+
+ /**
+ * setting the kafkaConsumer state to closed
+ */
+ public synchronized void close() {
+ if (getState() == KafkaConsumer.State.CLOSED) {
+ log.warn("close() called on closed KafkaConsumer " + getName());
+ return;
+ }
+
+ setState(KafkaConsumer.State.CLOSED);
+ fConnector.shutdown();
+ }
+
+ /**
+ * getConsumerGroup() returns Consumer group
+ * @return
+ */
+ public String getConsumerGroup() {
+ return fGroup;
+ }
+
+ /**
+ * getConsumerId returns Consumer Id
+ * @return
+ */
+ public String getConsumerId() {
+ return fId;
+ }
+
+ /**
+ * getState returns kafkaconsumer state
+ * @return
+ */
+ private KafkaConsumer.State getState() {
+ return this.state;
+ }
+
+ /**
+ * setState() sets the kafkaConsumer state
+ * @param state
+ */
+ private void setState(KafkaConsumer.State state) {
+ this.state = state;
+ }
+
+ private ConsumerConnector fConnector;
+ private final String fTopic;
+ private final String fGroup;
+ private final String fId;
+ private final String fLogTag;
+ private final KafkaStream<byte[], byte[]> fStream;
+ private long fCreateTimeMs;
+ private long fLastTouch;
+ private long offset;
+ private KafkaConsumer.State state;
+ private static final EELFLogger log = EELFManager.getInstance().getLogger(KafkaConsumer.class);
+ //private static final Logger log = LoggerFactory.getLogger(KafkaConsumer.class);
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/kafka/KafkaConsumerCache.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/kafka/KafkaConsumerCache.java
new file mode 100644
index 0000000..4cf7f3a
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/kafka/KafkaConsumerCache.java
@@ -0,0 +1,614 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.kafka;
+
+import java.io.IOException;
+import java.util.Collection;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map.Entry;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.TimeUnit;
+
+import org.I0Itec.zkclient.exception.ZkException;
+import org.I0Itec.zkclient.exception.ZkInterruptedException;
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.imps.CuratorFrameworkState;
+import org.apache.curator.framework.recipes.cache.ChildData;
+import org.apache.curator.framework.recipes.cache.PathChildrenCache;
+import org.apache.curator.framework.recipes.cache.PathChildrenCacheEvent;
+import org.apache.curator.framework.recipes.cache.PathChildrenCacheListener;
+import org.apache.curator.framework.state.ConnectionState;
+import org.apache.curator.framework.state.ConnectionStateListener;
+import org.apache.curator.utils.EnsurePath;
+import org.apache.curator.utils.ZKPaths;
+import org.apache.http.annotation.NotThreadSafe;
+import org.apache.zookeeper.KeeperException;
+import org.apache.zookeeper.KeeperException.NoNodeException;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Consumer;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.MetricsSet;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants.CambriaConstants;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.ConfigurationReader;
+
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+import com.att.nsa.drumlin.till.nv.rrNvReadable;
+
+/**
+ * @NotThreadSafe but expected to be used within KafkaConsumerFactory, which
+ * must be
+ * @author author
+ *
+ */
+@NotThreadSafe
+public class KafkaConsumerCache {
+
+ private static final String kSetting_ConsumerHandoverWaitMs = "cambria.consumer.cache.handoverWaitMs";
+ private static final int kDefault_ConsumerHandoverWaitMs = 500;
+
+ private static final String kSetting_SweepEverySeconds = "cambria.consumer.cache.sweepFreqSeconds";
+ private static final String kSetting_TouchEveryMs = "cambria.consumer.cache.touchFreqMs";
+
+ private static final String kSetting_ZkBasePath = "cambria.consumer.cache.zkBasePath";
+ private static final String kDefault_ZkBasePath = CambriaConstants.kDefault_ZkRoot + "/consumerCache";
+
+ // kafka defaults to timing out a client after 6 seconds of inactivity, but
+ // it heartbeats even when the client isn't fetching. Here, we don't
+ // want to prematurely rebalance the consumer group. Assuming clients are
+ // hitting
+ // the server at least every 30 seconds, timing out after 2 minutes should
+ // be okay.
+ // FIXME: consider allowing the client to specify its expected call rate?
+ private static final long kDefault_MustTouchEveryMs = 1000 * 60 * 2;
+
+ // check for expirations pretty regularly
+ private static final long kDefault_SweepEverySeconds = 15;
+
+ private enum Status {
+ NOT_STARTED, CONNECTED, DISCONNECTED, SUSPENDED
+ }
+
+ /**
+ * User defined exception class for kafka consumer cache
+ *
+ * @author author
+ *
+ */
+ public class KafkaConsumerCacheException extends Exception {
+ /**
+ * To throw the exception
+ *
+ * @param t
+ */
+ KafkaConsumerCacheException(Throwable t) {
+ super(t);
+ }
+
+ /**
+ *
+ * @param s
+ */
+ public KafkaConsumerCacheException(String s) {
+ super(s);
+ }
+
+ private static final long serialVersionUID = 1L;
+ }
+
+ /**
+ * Creates a KafkaConsumerCache object. Before it is used, you must call
+ * startCache()
+ *
+ * @param apiId
+ * @param s
+ * @param metrics
+ */
+ public KafkaConsumerCache(String apiId, MetricsSet metrics) {
+
+ if (apiId == null) {
+ throw new IllegalArgumentException("API Node ID must be specified.");
+ }
+
+ fApiId = apiId;
+ // fSettings = s;
+ fMetrics = metrics;
+ String strkSetting_ZkBasePath= AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_ZkBasePath);
+ if(null==strkSetting_ZkBasePath)strkSetting_ZkBasePath = kDefault_ZkBasePath;
+ fBaseZkPath = strkSetting_ZkBasePath;
+
+ fConsumers = new ConcurrentHashMap<String, KafkaConsumer>();
+ fSweepScheduler = Executors.newScheduledThreadPool(1);
+
+ curatorConsumerCache = null;
+
+ status = Status.NOT_STARTED;
+
+ listener = new ConnectionStateListener() {
+ public void stateChanged(CuratorFramework client, ConnectionState newState) {
+ if (newState == ConnectionState.LOST) {
+ log.info("ZooKeeper connection expired");
+ handleConnectionLoss();
+ } else if (newState == ConnectionState.READ_ONLY) {
+ log.warn("ZooKeeper connection set to read only mode.");
+ } else if (newState == ConnectionState.RECONNECTED) {
+ log.info("ZooKeeper connection re-established");
+ handleReconnection();
+ } else if (newState == ConnectionState.SUSPENDED) {
+ log.warn("ZooKeeper connection has been suspended.");
+ handleConnectionSuspended();
+ }
+ }
+ };
+ }
+
+ /**
+ * Start the cache service. This must be called before any get/put
+ * operations.
+ *
+ * @param mode
+ * DMAAP or cambria
+ * @param curator
+ * @throws IOException
+ * @throws KafkaConsumerCacheException
+ */
+ public void startCache(String mode, CuratorFramework curator) throws KafkaConsumerCacheException {
+ try {
+
+ // CuratorFramework curator = null;
+
+ // Changed the class from where we are initializing the curator
+ // framework
+ if (mode != null && mode.equals(CambriaConstants.CAMBRIA)) {
+ curator = ConfigurationReader.getCurator();
+ } else if (mode != null && mode.equals(CambriaConstants.DMAAP)) {
+ curator = getCuratorFramework(curator);
+ }
+
+ curator.getConnectionStateListenable().addListener(listener);
+
+ setStatus(Status.CONNECTED);
+
+ curatorConsumerCache = new PathChildrenCache(curator, fBaseZkPath, true);
+ curatorConsumerCache.start();
+
+ curatorConsumerCache.getListenable().addListener(new PathChildrenCacheListener() {
+ public void childEvent(CuratorFramework client, PathChildrenCacheEvent event) throws Exception {
+ switch (event.getType()) {
+ case CHILD_ADDED: {
+ final String apiId = new String(event.getData().getData());
+ final String consumer = ZKPaths.getNodeFromPath(event.getData().getPath());
+
+ log.info(apiId + " started consumer " + consumer);
+ break;
+ }
+ case CHILD_UPDATED: {
+ final String apiId = new String(event.getData().getData());
+ final String consumer = ZKPaths.getNodeFromPath(event.getData().getPath());
+
+ if (fConsumers.containsKey(consumer)) {
+ log.info(apiId + " claimed consumer " + consumer + " from " + fApiId);
+
+ dropClaimedConsumer(consumer);
+ }
+
+ break;
+ }
+ case CHILD_REMOVED: {
+ final String consumer = ZKPaths.getNodeFromPath(event.getData().getPath());
+
+ if (fConsumers.containsKey(consumer)) {
+ log.info("Someone wanted consumer " + consumer + " gone; removing it from the cache");
+ dropConsumer(consumer, false);
+ }
+
+ break;
+ }
+ default:
+ break;
+ }
+ }
+ });
+
+ // initialize the ZK path
+ EnsurePath ensurePath = new EnsurePath(fBaseZkPath);
+ ensurePath.ensure(curator.getZookeeperClient());
+
+ //final long freq = fSettings.getLong(kSetting_SweepEverySeconds, kDefault_SweepEverySeconds);
+ long freq = kDefault_SweepEverySeconds;
+ String strkSetting_SweepEverySeconds = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_SweepEverySeconds);
+ if(null==strkSetting_SweepEverySeconds) strkSetting_SweepEverySeconds = kDefault_SweepEverySeconds+"";
+
+ freq = Long.parseLong(strkSetting_SweepEverySeconds);
+
+ fSweepScheduler.scheduleAtFixedRate(new sweeper(), freq, freq, TimeUnit.SECONDS);
+ log.info("KafkaConsumerCache started");
+ log.info("sweeping cached clients every " + freq + " seconds");
+ } catch (ZkException e) {
+ throw new KafkaConsumerCacheException(e);
+ } catch (Exception e) {
+ throw new KafkaConsumerCacheException(e);
+ }
+ }
+
+ /**
+ * Getting the curator oject to start the zookeeper connection estabished
+ *
+ * @param curator
+ * @return curator object
+ */
+ public static CuratorFramework getCuratorFramework(CuratorFramework curator) {
+ if (curator.getState() == CuratorFrameworkState.LATENT) {
+ curator.start();
+
+ try {
+ curator.blockUntilConnected();
+ } catch (InterruptedException e) {
+ // Ignore
+ log.error("error while setting curator framework :" + e.getMessage());
+ }
+ }
+
+ return curator;
+ }
+
+ /**
+ * Stop the cache service.
+ */
+ public void stopCache() {
+ setStatus(Status.DISCONNECTED);
+
+ final CuratorFramework curator = ConfigurationReader.getCurator();
+
+ if (curator != null) {
+ try {
+ curator.getConnectionStateListenable().removeListener(listener);
+ curatorConsumerCache.close();
+ log.info("Curator client closed");
+ } catch (ZkInterruptedException e) {
+ log.warn("Curator client close interrupted: " + e.getMessage());
+ } catch (IOException e) {
+ log.warn("Error while closing curator PathChildrenCache for KafkaConsumerCache" + e.getMessage());
+ }
+
+ curatorConsumerCache = null;
+ }
+
+ if (fSweepScheduler != null) {
+ fSweepScheduler.shutdownNow();
+ log.info("cache sweeper stopped");
+ }
+
+ if (fConsumers != null) {
+ fConsumers.clear();
+ fConsumers = null;
+ }
+
+ setStatus(Status.NOT_STARTED);
+
+ log.info("Consumer cache service stopped");
+ }
+
+ /**
+ * Get a cached consumer by topic, group, and id, if it exists (and remains
+ * valid) In addition, this method waits for all other consumer caches in
+ * the cluster to release their ownership and delete their version of this
+ * consumer.
+ *
+ * @param topic
+ * @param consumerGroupId
+ * @param clientId
+ * @return a consumer, or null
+ */
+ public KafkaConsumer getConsumerFor(String topic, String consumerGroupId, String clientId)
+ throws KafkaConsumerCacheException {
+ if (getStatus() != KafkaConsumerCache.Status.CONNECTED)
+ throw new KafkaConsumerCacheException("The cache service is unavailable.");
+
+ final String consumerKey = makeConsumerKey(topic, consumerGroupId, clientId);
+ final KafkaConsumer kc = fConsumers.get(consumerKey);
+
+ if (kc != null) {
+ log.debug("Consumer cache hit for [" + consumerKey + "], last was at " + kc.getLastTouch());
+ kc.touch();
+ fMetrics.onKafkaConsumerCacheHit();
+ } else {
+ log.debug("Consumer cache miss for [" + consumerKey + "]");
+ fMetrics.onKafkaConsumerCacheMiss();
+ }
+
+ return kc;
+ }
+
+ /**
+ * Put a consumer into the cache by topic, group and ID
+ *
+ * @param topic
+ * @param consumerGroupId
+ * @param consumerId
+ * @param consumer
+ * @throws KafkaConsumerCacheException
+ */
+ public void putConsumerFor(String topic, String consumerGroupId, String consumerId, KafkaConsumer consumer)
+ throws KafkaConsumerCacheException {
+ if (getStatus() != KafkaConsumerCache.Status.CONNECTED)
+ throw new KafkaConsumerCacheException("The cache service is unavailable.");
+
+ final String consumerKey = makeConsumerKey(topic, consumerGroupId, consumerId);
+ fConsumers.put(consumerKey, consumer);
+ }
+
+ public Collection<? extends Consumer> getConsumers() {
+ return new LinkedList<KafkaConsumer>(fConsumers.values());
+ }
+
+ /**
+ * This method is to drop all the consumer
+ */
+ public void dropAllConsumers() {
+ for (Entry<String, KafkaConsumer> entry : fConsumers.entrySet()) {
+ dropConsumer(entry.getKey(), true);
+ }
+
+ // consumers should be empty here
+ if (fConsumers.size() > 0) {
+ log.warn("During dropAllConsumers, the consumer map is not empty.");
+ fConsumers.clear();
+ }
+ }
+
+ /**
+ * Drop a consumer from our cache due to a timeout
+ *
+ * @param key
+ */
+ private void dropTimedOutConsumer(String key) {
+ fMetrics.onKafkaConsumerTimeout();
+
+ if (!fConsumers.containsKey(key)) {
+ log.warn("Attempted to drop a timed out consumer which was not in our cache: " + key);
+ return;
+ }
+
+ // First, drop this consumer from our cache
+ dropConsumer(key, true);
+
+ final CuratorFramework curator = ConfigurationReader.getCurator();
+
+ try {
+ curator.delete().guaranteed().forPath(fBaseZkPath + "/" + key);
+ } catch (NoNodeException e) {
+ log.warn("A consumer was deleted from " + fApiId
+ + "'s cache, but no Cambria API node had ownership of it in ZooKeeper");
+ } catch (Exception e) {
+ log.debug("Unexpected exception while deleting consumer: " + e.getMessage());
+ }
+
+ log.info("Dropped " + key + " consumer due to timeout");
+ }
+
+ /**
+ * Drop a consumer from our cache due to another API node claiming it as
+ * their own.
+ *
+ * @param key
+ */
+ private void dropClaimedConsumer(String key) {
+ // if the consumer is still in our cache, it implies a claim.
+ if (fConsumers.containsKey(key)) {
+ fMetrics.onKafkaConsumerClaimed();
+ log.info("Consumer [" + key + "] claimed by another node.");
+ }
+
+ dropConsumer(key, false);
+ }
+
+ /**
+ * Removes the consumer from the cache and closes its connection to the
+ * kafka broker(s).
+ *
+ * @param key
+ * @param dueToTimeout
+ */
+ private void dropConsumer(String key, boolean dueToTimeout) {
+ final KafkaConsumer kc = fConsumers.remove(key);
+
+ if (kc != null) {
+ log.info("closing Kafka consumer " + key);
+ kc.close();
+ }
+ }
+
+// private final rrNvReadable fSettings;
+ private final MetricsSet fMetrics;
+ private final String fBaseZkPath;
+ private final ScheduledExecutorService fSweepScheduler;
+ private final String fApiId;
+ private final ConnectionStateListener listener;
+
+ private ConcurrentHashMap<String, KafkaConsumer> fConsumers;
+ private PathChildrenCache curatorConsumerCache;
+
+ private volatile Status status;
+
+ private void handleReconnection() {
+
+ log.info("Reading current cache data from ZK and synchronizing local cache");
+
+ final List<ChildData> cacheData = curatorConsumerCache.getCurrentData();
+
+ // Remove all the consumers in this API nodes cache that now belong to
+ // other API nodes.
+ for (ChildData cachedConsumer : cacheData) {
+ final String consumerId = ZKPaths.getNodeFromPath(cachedConsumer.getPath());
+ final String owningApiId = (cachedConsumer.getData() != null) ? new String(cachedConsumer.getData())
+ : "undefined";
+
+ if (!fApiId.equals(owningApiId)) {
+ fConsumers.remove(consumerId);
+ }
+ }
+
+ setStatus(Status.CONNECTED);
+ }
+
+ private void handleConnectionSuspended() {
+ log.info("Suspending cache until ZK connection is re-established");
+
+ setStatus(Status.SUSPENDED);
+ }
+
+ private void handleConnectionLoss() {
+ log.info("Clearing consumer cache (shutting down all Kafka consumers on this node)");
+
+ setStatus(Status.DISCONNECTED);
+
+ closeAllCachedConsumers();
+ fConsumers.clear();
+ }
+
+ private void closeAllCachedConsumers() {
+ for (Entry<String, KafkaConsumer> entry : fConsumers.entrySet()) {
+ entry.getValue().close();
+ }
+ }
+
+ private static String makeConsumerKey(String topic, String consumerGroupId, String clientId) {
+ return topic + "::" + consumerGroupId + "::" + clientId;
+ }
+
+ /**
+ * This method is to get a lock
+ *
+ * @param topic
+ * @param consumerGroupId
+ * @param consumerId
+ * @throws KafkaConsumerCacheException
+ */
+ public void signalOwnership(final String topic, final String consumerGroupId, final String consumerId)
+ throws KafkaConsumerCacheException {
+ // get a lock at <base>/<topic>::<consumerGroupId>::<consumerId>
+ final String consumerKey = makeConsumerKey(topic, consumerGroupId, consumerId);
+
+ try {
+ final String consumerPath = fBaseZkPath + "/" + consumerKey;
+
+ log.debug(fApiId + " attempting to claim ownership of consumer " + consumerKey);
+
+ final CuratorFramework curator = ConfigurationReader.getCurator();
+
+ try {
+ curator.setData().forPath(consumerPath, fApiId.getBytes());
+ } catch (KeeperException.NoNodeException e) {
+ curator.create().creatingParentsIfNeeded().forPath(consumerPath, fApiId.getBytes());
+ }
+
+ log.info(fApiId + " successfully claimed ownership of consumer " + consumerKey);
+ } catch (Exception e) {
+ log.error(fApiId + " failed to claim ownership of consumer " + consumerKey);
+ throw new KafkaConsumerCacheException(e);
+ }
+
+ log.info("Backing off to give the Kafka broker time to clean up the ZK data for this consumer");
+
+ try {
+ int kSetting_ConsumerHandoverWaitMs = kDefault_ConsumerHandoverWaitMs;
+ String strkSetting_ConsumerHandoverWaitMs= AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_ConsumerHandoverWaitMs+"");
+ if(strkSetting_ConsumerHandoverWaitMs!=null) kSetting_ConsumerHandoverWaitMs = Integer.parseInt(strkSetting_ConsumerHandoverWaitMs);
+
+ Thread.sleep(kSetting_ConsumerHandoverWaitMs);
+ //Thread.sleep(fSettings.getInt(kSetting_ConsumerHandoverWaitMs, kDefault_ConsumerHandoverWaitMs));
+ } catch (InterruptedException e) {
+ // Ignore
+ }
+ }
+
+ private void sweep() {
+ final LinkedList<String> removals = new LinkedList<String>();
+ long mustTouchEveryMs = kDefault_MustTouchEveryMs;
+ String strkSetting_TouchEveryMs = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_TouchEveryMs);
+ //if(null!=strkSetting_TouchEveryMs) strkSetting_TouchEveryMs = kDefault_MustTouchEveryMs+"";
+ if(null!=strkSetting_TouchEveryMs)
+ {
+ mustTouchEveryMs = Long.parseLong(strkSetting_TouchEveryMs);
+ }
+
+ //final long mustTouchEveryMs = fSettings.getLong(kSetting_TouchEveryMs, kDefault_MustTouchEveryMs);
+ final long oldestAllowedTouchMs = System.currentTimeMillis() - mustTouchEveryMs;
+
+ for (Entry<String, KafkaConsumer> e : fConsumers.entrySet()) {
+ final long lastTouchMs = e.getValue().getLastTouch();
+
+ log.debug("consumer " + e.getKey() + " last touched at " + lastTouchMs);
+
+ if (lastTouchMs < oldestAllowedTouchMs) {
+ log.info("consumer " + e.getKey() + " has expired");
+ removals.add(e.getKey());
+ }
+ }
+
+ for (String key : removals) {
+ dropTimedOutConsumer(key);
+ }
+ }
+
+ /**
+ * Creating a thread to run the sweep method
+ *
+ * @author author
+ *
+ */
+ private class sweeper implements Runnable {
+ /**
+ * run method
+ */
+ public void run() {
+ sweep();
+ }
+ }
+
+ /**
+ * This method is to drop consumer
+ *
+ * @param topic
+ * @param consumerGroup
+ * @param clientId
+ */
+ public void dropConsumer(String topic, String consumerGroup, String clientId) {
+ dropConsumer(makeConsumerKey(topic, consumerGroup, clientId), false);
+ }
+
+ private Status getStatus() {
+ return this.status;
+ }
+
+ private void setStatus(Status status) {
+ this.status = status;
+ }
+
+ private static final EELFLogger log = EELFManager.getInstance().getLogger(KafkaConsumerCache.class);
+ //private static final Logger log = LoggerFactory.getLogger(KafkaConsumerCache.class);
+} \ No newline at end of file
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/kafka/KafkaPublisher.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/kafka/KafkaPublisher.java
new file mode 100644
index 0000000..90e5ce0
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/kafka/KafkaPublisher.java
@@ -0,0 +1,169 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.kafka;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Properties;
+
+import kafka.common.FailedToSendMessageException;
+import kafka.javaapi.producer.Producer;
+import kafka.producer.KeyedMessage;
+import kafka.producer.ProducerConfig;
+
+import org.json.JSONException;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Publisher;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants.CambriaConstants;
+
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.springframework.beans.factory.annotation.Qualifier;
+
+import com.att.nsa.drumlin.till.nv.rrNvReadable;
+
+/**
+ * Sends raw JSON objects into Kafka.
+ *
+ * Could improve space: BSON rather than JSON?
+ *
+ * @author author
+ *
+ */
+
+public class KafkaPublisher implements Publisher {
+ /**
+ * constructor initializing
+ *
+ * @param settings
+ * @throws rrNvReadable.missingReqdSetting
+ */
+ public KafkaPublisher(@Qualifier("propertyReader") rrNvReadable settings) throws rrNvReadable.missingReqdSetting {
+ //fSettings = settings;
+
+ final Properties props = new Properties();
+ /*transferSetting(fSettings, props, "metadata.broker.list", "localhost:9092");
+ transferSetting(fSettings, props, "request.required.acks", "1");
+ transferSetting(fSettings, props, "message.send.max.retries", "5");
+ transferSetting(fSettings, props, "retry.backoff.ms", "150"); */
+ String kafkaConnUrl= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"kafka.metadata.broker.list");
+ System.out.println("kafkaConnUrl:- "+kafkaConnUrl);
+ if(null==kafkaConnUrl){
+
+ kafkaConnUrl="localhost:9092";
+ }
+ transferSetting( props, "metadata.broker.list", kafkaConnUrl);
+ transferSetting( props, "request.required.acks", "1");
+ transferSetting( props, "message.send.max.retries", "5");
+ transferSetting(props, "retry.backoff.ms", "150");
+
+ props.put("serializer.class", "kafka.serializer.StringEncoder");
+
+ fConfig = new ProducerConfig(props);
+ fProducer = new Producer<String, String>(fConfig);
+ }
+
+ /**
+ * Send a message with a given topic and key.
+ *
+ * @param msg
+ * @throws FailedToSendMessageException
+ * @throws JSONException
+ */
+ @Override
+ public void sendMessage(String topic, message msg) throws IOException, FailedToSendMessageException {
+ final List<message> msgs = new LinkedList<message>();
+ msgs.add(msg);
+ sendMessages(topic, msgs);
+ }
+
+ /**
+ * method publishing batch messages
+ *
+ * @param topic
+ * @param kms
+ * throws IOException
+ */
+ public void sendBatchMessage(String topic, ArrayList<KeyedMessage<String, String>> kms) throws IOException {
+ try {
+ fProducer.send(kms);
+
+ } catch (FailedToSendMessageException excp) {
+ log.error("Failed to send message(s) to topic [" + topic + "].", excp);
+ throw new FailedToSendMessageException(excp.getMessage(), excp);
+ }
+
+ }
+
+ /**
+ * Send a set of messages. Each must have a "key" string value.
+ *
+ * @param topic
+ * @param msg
+ * @throws FailedToSendMessageException
+ * @throws JSONException
+ */
+ @Override
+ public void sendMessages(String topic, List<? extends message> msgs)
+ throws IOException, FailedToSendMessageException {
+ log.info("sending " + msgs.size() + " events to [" + topic + "]");
+
+ final List<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>(msgs.size());
+ for (message o : msgs) {
+ final KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, o.getKey(), o.toString());
+ kms.add(data);
+ }
+ try {
+ fProducer.send(kms);
+
+ } catch (FailedToSendMessageException excp) {
+ log.error("Failed to send message(s) to topic [" + topic + "].", excp);
+ throw new FailedToSendMessageException(excp.getMessage(), excp);
+ }
+ }
+
+ //private final rrNvReadable fSettings;
+
+ private ProducerConfig fConfig;
+ private Producer<String, String> fProducer;
+
+ /**
+ * It sets the key value pair
+ * @param topic
+ * @param msg
+ * @param key
+ * @param defVal
+ */
+ private void transferSetting(Properties props, String key, String defVal) {
+ String kafka_prop= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"kafka." + key);
+ if (null==kafka_prop) kafka_prop=defVal;
+ //props.put(key, settings.getString("kafka." + key, defVal));
+ props.put(key, kafka_prop);
+ }
+
+ //private static final Logger log = LoggerFactory.getLogger(KafkaPublisher.class);
+
+ private static final EELFLogger log = EELFManager.getInstance().getLogger(KafkaPublisher.class);
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MemoryConsumerFactory.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MemoryConsumerFactory.java
new file mode 100644
index 0000000..b42a22b
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MemoryConsumerFactory.java
@@ -0,0 +1,160 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.memory;
+
+import java.util.ArrayList;
+import java.util.Collection;
+
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Consumer;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.ConsumerFactory;
+/**
+ *
+ * @author author
+ *
+ */
+public class MemoryConsumerFactory implements ConsumerFactory
+{
+ /**
+ *
+ * Initializing constructor
+ * @param q
+ */
+ public MemoryConsumerFactory ( MemoryQueue q )
+ {
+ fQueue = q;
+ }
+
+ /**
+ *
+ * @param topic
+ * @param consumerGroupId
+ * @param clientId
+ * @param timeoutMs
+ * @return Consumer
+ */
+ @Override
+ public Consumer getConsumerFor ( String topic, String consumerGroupId, String clientId, int timeoutMs )
+ {
+ return new MemoryConsumer ( topic, consumerGroupId );
+ }
+
+ private final MemoryQueue fQueue;
+
+ /**
+ *
+ * Define nested inner class
+ *
+ */
+ private class MemoryConsumer implements Consumer
+ {
+ /**
+ *
+ * Initializing MemoryConsumer constructor
+ * @param topic
+ * @param consumer
+ *
+ */
+ public MemoryConsumer ( String topic, String consumer )
+ {
+ fTopic = topic;
+ fConsumer = consumer;
+ fCreateMs = System.currentTimeMillis ();
+ fLastAccessMs = fCreateMs;
+ }
+
+ @Override
+ /**
+ *
+ * return consumer details
+ */
+ public Message nextMessage ()
+ {
+ return fQueue.get ( fTopic, fConsumer );
+ }
+
+ private final String fTopic;
+ private final String fConsumer;
+ private final long fCreateMs;
+ private long fLastAccessMs;
+
+ @Override
+ public void close() {
+ //Nothing to close/clean up.
+ }
+ /**
+ *
+ */
+ public void commitOffsets()
+ {
+ // ignoring this aspect
+ }
+ /**
+ * get offset
+ */
+ public long getOffset()
+ {
+ return 0;
+ }
+
+ @Override
+ /**
+ * get consumer topic name
+ */
+ public String getName ()
+ {
+ return fTopic + "/" + fConsumer;
+ }
+
+ @Override
+ public long getCreateTimeMs ()
+ {
+ return fCreateMs;
+ }
+
+ @Override
+ public long getLastAccessMs ()
+ {
+ return fLastAccessMs;
+ }
+ }
+
+ @Override
+ public void destroyConsumer(String topic, String consumerGroupId,
+ String clientId) {
+ //No cache for memory consumers, so NOOP
+ }
+
+ @Override
+ public void dropCache ()
+ {
+ // nothing to do - there's no cache here
+ }
+
+ @Override
+ /**
+ * @return ArrayList<MemoryConsumer>
+ */
+ public Collection<? extends Consumer> getConsumers ()
+ {
+ return new ArrayList<MemoryConsumer> ();
+ }
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MemoryMetaBroker.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MemoryMetaBroker.java
new file mode 100644
index 0000000..221e58a
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MemoryMetaBroker.java
@@ -0,0 +1,200 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.memory;
+
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Set;
+import java.util.TreeSet;
+
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metabroker.Broker;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metabroker.Topic;
+
+import com.att.nsa.configs.ConfigDb;
+import com.att.nsa.drumlin.till.nv.rrNvReadable;
+import com.att.nsa.security.NsaAcl;
+import com.att.nsa.security.NsaApiKey;
+
+/**
+ *
+ * @author author
+ *
+ */
+public class MemoryMetaBroker implements Broker {
+ /**
+ *
+ * @param mq
+ * @param configDb
+ * @param settings
+ */
+ public MemoryMetaBroker(MemoryQueue mq, ConfigDb configDb) {
+ //public MemoryMetaBroker(MemoryQueue mq, ConfigDb configDb, rrNvReadable settings) {
+ fQueue = mq;
+ fTopics = new HashMap<String, MemTopic>();
+ }
+
+ @Override
+ public List<Topic> getAllTopics() {
+ return new LinkedList<Topic>(fTopics.values());
+ }
+
+ @Override
+ public Topic getTopic(String topic) {
+ return fTopics.get(topic);
+ }
+
+ @Override
+ public Topic createTopic(String topic, String desc, String ownerApiId, int partitions, int replicas,
+ boolean transactionEnabled) throws TopicExistsException {
+ if (getTopic(topic) != null) {
+ throw new TopicExistsException(topic);
+ }
+ fQueue.createTopic(topic);
+ fTopics.put(topic, new MemTopic(topic, desc, ownerApiId, transactionEnabled));
+ return getTopic(topic);
+ }
+
+ @Override
+ public void deleteTopic(String topic) {
+ fTopics.remove(topic);
+ fQueue.removeTopic(topic);
+ }
+
+ private final MemoryQueue fQueue;
+ private final HashMap<String, MemTopic> fTopics;
+
+ private static class MemTopic implements Topic {
+ /**
+ * constructor initialization
+ *
+ * @param name
+ * @param desc
+ * @param owner
+ * @param transactionEnabled
+ */
+ public MemTopic(String name, String desc, String owner, boolean transactionEnabled) {
+ fName = name;
+ fDesc = desc;
+ fOwner = owner;
+ ftransactionEnabled = transactionEnabled;
+ fReaders = null;
+ fWriters = null;
+ }
+
+ @Override
+ public String getOwner() {
+ return fOwner;
+ }
+
+ @Override
+ public NsaAcl getReaderAcl() {
+ return fReaders;
+ }
+
+ @Override
+ public NsaAcl getWriterAcl() {
+ return fWriters;
+ }
+
+ @Override
+ public void checkUserRead(NsaApiKey user) throws AccessDeniedException {
+ if (fReaders != null && (user == null || !fReaders.canUser(user.getKey()))) {
+ throw new AccessDeniedException(user == null ? "" : user.getKey());
+ }
+ }
+
+ @Override
+ public void checkUserWrite(NsaApiKey user) throws AccessDeniedException {
+ if (fWriters != null && (user == null || !fWriters.canUser(user.getKey()))) {
+ throw new AccessDeniedException(user == null ? "" : user.getKey());
+ }
+ }
+
+ @Override
+ public String getName() {
+ return fName;
+ }
+
+ @Override
+ public String getDescription() {
+ return fDesc;
+ }
+
+ @Override
+ public void permitWritesFromUser(String publisherId, NsaApiKey asUser) throws AccessDeniedException {
+ if (!fOwner.equals(asUser.getKey())) {
+ throw new AccessDeniedException("User does not own this topic " + fName);
+ }
+ if (fWriters == null) {
+ fWriters = new NsaAcl();
+ }
+ fWriters.add(publisherId);
+ }
+
+ @Override
+ public void denyWritesFromUser(String publisherId, NsaApiKey asUser) throws AccessDeniedException {
+ if (!fOwner.equals(asUser.getKey())) {
+ throw new AccessDeniedException("User does not own this topic " + fName);
+ }
+ fWriters.remove(publisherId);
+ }
+
+ @Override
+ public void permitReadsByUser(String consumerId, NsaApiKey asUser) throws AccessDeniedException {
+ if (!fOwner.equals(asUser.getKey())) {
+ throw new AccessDeniedException("User does not own this topic " + fName);
+ }
+ if (fReaders == null) {
+ fReaders = new NsaAcl();
+ }
+ fReaders.add(consumerId);
+ }
+
+ @Override
+ public void denyReadsByUser(String consumerId, NsaApiKey asUser) throws AccessDeniedException {
+ if (!fOwner.equals(asUser.getKey())) {
+ throw new AccessDeniedException("User does not own this topic " + fName);
+ }
+ fReaders.remove(consumerId);
+ }
+
+ private final String fName;
+ private final String fDesc;
+ private final String fOwner;
+ private NsaAcl fReaders;
+ private NsaAcl fWriters;
+ private boolean ftransactionEnabled;
+
+ @Override
+ public boolean isTransactionEnabled() {
+ return ftransactionEnabled;
+ }
+
+ @Override
+ public Set<String> getOwners() {
+ final TreeSet<String> set = new TreeSet<String> ();
+ set.add ( fOwner );
+ return set;
+ }
+ }
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MemoryQueue.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MemoryQueue.java
new file mode 100644
index 0000000..e6b98b3
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MemoryQueue.java
@@ -0,0 +1,207 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.memory;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Consumer;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Publisher.message;
+
+/**
+ * When broker type is memory, then this class is doing all the topic related
+ * operations
+ *
+ * @author author
+ *
+ */
+public class MemoryQueue {
+ // map from topic to list of msgs
+ private HashMap<String, LogBuffer> fQueue;
+ private HashMap<String, HashMap<String, Integer>> fOffsets;
+
+ /**
+ * constructor storing hashMap objects in Queue and Offsets object
+ */
+ public MemoryQueue() {
+ fQueue = new HashMap<String, LogBuffer>();
+ fOffsets = new HashMap<String, HashMap<String, Integer>>();
+ }
+
+ /**
+ * method used to create topic
+ *
+ * @param topic
+ */
+ public synchronized void createTopic(String topic) {
+ LogBuffer q = fQueue.get(topic);
+ if (q == null) {
+ q = new LogBuffer(1024 * 1024);
+ fQueue.put(topic, q);
+ }
+ }
+
+ /**
+ * method used to remove topic
+ *
+ * @param topic
+ */
+ public synchronized void removeTopic(String topic) {
+ LogBuffer q = fQueue.get(topic);
+ if (q != null) {
+ fQueue.remove(topic);
+ }
+ }
+
+ /**
+ * method to write message on topic
+ *
+ * @param topic
+ * @param m
+ */
+ public synchronized void put(String topic, message m) {
+ LogBuffer q = fQueue.get(topic);
+ if (q == null) {
+ createTopic(topic);
+ q = fQueue.get(topic);
+ }
+ q.push(m.getMessage());
+ }
+
+ /**
+ * method to read consumer messages
+ *
+ * @param topic
+ * @param consumerName
+ * @return
+ */
+ public synchronized Consumer.Message get(String topic, String consumerName) {
+ final LogBuffer q = fQueue.get(topic);
+ if (q == null) {
+ return null;
+ }
+
+ HashMap<String, Integer> offsetMap = fOffsets.get(consumerName);
+ if (offsetMap == null) {
+ offsetMap = new HashMap<String, Integer>();
+ fOffsets.put(consumerName, offsetMap);
+ }
+ Integer offset = offsetMap.get(topic);
+ if (offset == null) {
+ offset = 0;
+ }
+
+ final msgInfo result = q.read(offset);
+ if (result != null && result.msg != null) {
+ offsetMap.put(topic, result.offset + 1);
+ }
+ return result;
+ }
+
+ /**
+ * static inner class used to details about consumed messages
+ *
+ * @author author
+ *
+ */
+ private static class msgInfo implements Consumer.Message {
+ /**
+ * published message which is consumed
+ */
+ public String msg;
+ /**
+ * offset associated with message
+ */
+ public int offset;
+
+ /**
+ * get offset of messages
+ */
+ @Override
+ public long getOffset() {
+ return offset;
+ }
+
+ /**
+ * get consumed message
+ */
+ @Override
+ public String getMessage() {
+ return msg;
+ }
+ }
+
+ /**
+ *
+ * @author author
+ *
+ * private LogBuffer class has synchronized push and read method
+ */
+ private class LogBuffer {
+ private int fBaseOffset;
+ private final int fMaxSize;
+ private final ArrayList<String> fList;
+
+ /**
+ * constructor initializing the offset, maxsize and list
+ *
+ * @param maxSize
+ */
+ public LogBuffer(int maxSize) {
+ fBaseOffset = 0;
+ fMaxSize = maxSize;
+ fList = new ArrayList<String>();
+ }
+
+ /**
+ * pushing message
+ *
+ * @param msg
+ */
+ public synchronized void push(String msg) {
+ fList.add(msg);
+ while (fList.size() > fMaxSize) {
+ fList.remove(0);
+ fBaseOffset++;
+ }
+ }
+
+ /**
+ * reading messages
+ *
+ * @param offset
+ * @return
+ */
+ public synchronized msgInfo read(int offset) {
+ final int actual = Math.max(0, offset - fBaseOffset);
+
+ final msgInfo mi = new msgInfo();
+ mi.msg = (actual >= fList.size()) ? null : fList.get(actual);
+ if (mi.msg == null)
+ return null;
+
+ mi.offset = actual + fBaseOffset;
+ return mi;
+ }
+
+ }
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MemoryQueuePublisher.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MemoryQueuePublisher.java
new file mode 100644
index 0000000..bf94b9c
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MemoryQueuePublisher.java
@@ -0,0 +1,90 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.memory;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Publisher;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metabroker.Broker.TopicExistsException;
+
+import kafka.producer.KeyedMessage;
+
+/**
+ *
+ * @author author
+ *
+ */
+public class MemoryQueuePublisher implements Publisher {
+ /**
+ *
+ * @param q
+ * @param b
+ */
+ public MemoryQueuePublisher(MemoryQueue q, MemoryMetaBroker b) {
+ fBroker = b;
+ fQueue = q;
+ }
+
+ /**
+ * sendBatchMessages
+ *
+ * @param topic
+ * @param kms
+ */
+ public void sendBatchMessage(String topic, ArrayList<KeyedMessage<String, String>> kms) throws IOException {
+ }
+
+ /**
+ *
+ * @param topic
+ * @param msg
+ * @throws IOException
+ */
+ @Override
+ public void sendMessage(String topic, message msg) throws IOException {
+ if (null == fBroker.getTopic(topic)) {
+ try {
+ fBroker.createTopic(topic, topic, null, 8, 3, false);
+ } catch (TopicExistsException e) {
+ throw new RuntimeException(e);
+ }
+ }
+ fQueue.put(topic, msg);
+ }
+
+ @Override
+ /**
+ * @param topic
+ * @param msgs
+ * @throws IOException
+ */
+ public void sendMessages(String topic, List<? extends message> msgs) throws IOException {
+ for (message m : msgs) {
+ sendMessage(topic, m);
+ }
+ }
+
+ private final MemoryMetaBroker fBroker;
+ private final MemoryQueue fQueue;
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MessageDropper.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MessageDropper.java
new file mode 100644
index 0000000..0414e41
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MessageDropper.java
@@ -0,0 +1,61 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.memory;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Publisher;
+
+import kafka.producer.KeyedMessage;
+
+/**
+ * class is used to message publishing
+ *
+ * @author author
+ *
+ */
+public class MessageDropper implements Publisher {
+ /**
+ * publish single messages
+ * param topic
+ * param msg
+ */
+ @Override
+ public void sendMessage(String topic, message msg) throws IOException {
+ }
+
+ /**
+ * publish multiple messages
+ */
+ @Override
+ public void sendMessages(String topic, List<? extends message> msgs) throws IOException {
+ }
+
+ /**
+ * publish batch messages
+ */
+ @Override
+ public void sendBatchMessage(String topic, ArrayList<KeyedMessage<String, String>> kms) throws IOException {
+ }
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MessageLogger.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MessageLogger.java
new file mode 100644
index 0000000..63ea3b6
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MessageLogger.java
@@ -0,0 +1,101 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.memory;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Publisher;
+
+import kafka.producer.KeyedMessage;
+
+/**
+ * class used for logging perspective
+ *
+ * @author author
+ *
+ */
+public class MessageLogger implements Publisher {
+ public MessageLogger() {
+ }
+
+ public void setFile(File f) throws FileNotFoundException {
+ fStream = new FileOutputStream(f, true);
+ }
+
+ /**
+ *
+ * @param topic
+ * @param msg
+ * @throws IOException
+ */
+ @Override
+ public void sendMessage(String topic, message msg) throws IOException {
+ logMsg(msg);
+ }
+
+ /**
+ * @param topic
+ * @param msgs
+ * @throws IOException
+ */
+ @Override
+ public void sendMessages(String topic, List<? extends message> msgs) throws IOException {
+ for (message m : msgs) {
+ logMsg(m);
+ }
+ }
+
+ /**
+ * @param topic
+ * @param kms
+ * @throws IOException
+ */
+ @Override
+ public void sendBatchMessage(String topic, ArrayList<KeyedMessage<String, String>> kms) throws
+
+ IOException {
+ }
+
+ private FileOutputStream fStream;
+
+ /**
+ *
+ * @param msg
+ * @throws IOException
+ */
+ private void logMsg(message msg) throws IOException {
+ String key = msg.getKey();
+ if (key == null)
+ key = "<none>";
+
+ fStream.write('[');
+ fStream.write(key.getBytes());
+ fStream.write("] ".getBytes());
+ fStream.write(msg.getMessage().getBytes());
+ fStream.write('\n');
+ }
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/ApiKeyBean.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/ApiKeyBean.java
new file mode 100644
index 0000000..43bc584
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/ApiKeyBean.java
@@ -0,0 +1,88 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans;
+
+import java.io.Serializable;
+
+import javax.xml.bind.annotation.XmlRootElement;
+
+import com.att.nsa.drumlin.till.data.uniqueStringGenerator;
+/**
+ *
+ * @author author
+ *
+ */
+@XmlRootElement
+public class ApiKeyBean implements Serializable {
+
+ private static final long serialVersionUID = -8219849086890567740L;
+
+ private static final String KEY_CHARS = "ABCDEFGHJIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
+
+ private String email;
+ private String description;
+ /**
+ * constructor
+ */
+ public ApiKeyBean() {
+ super();
+ }
+/**
+ *
+ * @param email
+ * @param description
+ */
+ public ApiKeyBean(String email, String description) {
+ super();
+ this.email = email;
+ this.description = description;
+ }
+
+ public String getEmail() {
+ return email;
+ }
+
+ public void setEmail(String email) {
+ this.email = email;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+
+ public void setDescription(String description) {
+ this.description = description;
+ }
+
+ public String getKey() {
+ return generateKey(16);
+ }
+
+ public String getSharedSecret() {
+ return generateKey(24);
+ }
+
+ private static String generateKey ( int length ) {
+ return uniqueStringGenerator.createKeyUsingAlphabet ( KEY_CHARS, length );
+ }
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPCambriaLimiter.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPCambriaLimiter.java
new file mode 100644
index 0000000..f4855b9
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPCambriaLimiter.java
@@ -0,0 +1,227 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans;
+
+import java.util.HashMap;
+import java.util.concurrent.TimeUnit;
+
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants.CambriaConstants;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception.DMaaPResponseCode;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception.ErrorResponse;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.stereotype.Component;
+
+import com.att.nsa.drumlin.service.standards.HttpStatusCodes;
+import com.att.nsa.drumlin.till.nv.rrNvReadable;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.invalidSettingValue;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
+import com.att.nsa.metrics.impl.CdmRateTicker;
+
+/**
+ * class provide rate information
+ *
+ * @author author
+ *
+ */
+@Component
+public class DMaaPCambriaLimiter {
+ /**
+ * constructor initializes
+ *
+ * @param settings
+ * @throws missingReqdSetting
+ * @throws invalidSettingValue
+ */
+ @Autowired
+ public DMaaPCambriaLimiter(@Qualifier("propertyReader") rrNvReadable settings)
+ throws missingReqdSetting, invalidSettingValue {
+ fRateInfo = new HashMap<String, RateInfo>();
+ fMaxEmptyPollsPerMinute = settings.getDouble(CambriaConstants.kSetting_MaxEmptyPollsPerMinute,
+ CambriaConstants.kDefault_MaxEmptyPollsPerMinute);
+ fWindowLengthMins = settings.getInt(CambriaConstants.kSetting_RateLimitWindowLength,
+ CambriaConstants.kDefault_RateLimitWindowLength);
+ fSleepMs = settings.getLong(CambriaConstants.kSetting_MaxEmptyPollsPerMinute,
+ CambriaConstants.kDefault_SleepMsOnRateLimit);
+ }
+
+ /**
+ * static method provide the sleep time
+ *
+ * @param ratePerMinute
+ * @return
+ */
+ public static long getSleepMsForRate(double ratePerMinute) {
+ if (ratePerMinute <= 0.0)
+ return 0;
+ return Math.max(1000, Math.round(60 * 1000 / ratePerMinute));
+ }
+
+ /**
+ * Construct a rate limiter.
+ *
+ * @param maxEmptyPollsPerMinute
+ * Pass <= 0 to deactivate rate limiting.
+ * @param windowLengthMins
+ */
+ public DMaaPCambriaLimiter(double maxEmptyPollsPerMinute, int windowLengthMins) {
+ this(maxEmptyPollsPerMinute, windowLengthMins, getSleepMsForRate(maxEmptyPollsPerMinute));
+ }
+
+ /**
+ * Construct a rate limiter
+ *
+ * @param maxEmptyPollsPerMinute
+ * Pass <= 0 to deactivate rate limiting.
+ * @param sleepMs
+ * @param windowLengthMins
+ */
+ public DMaaPCambriaLimiter(double maxEmptyPollsPerMinute, int windowLengthMins, long sleepMs) {
+ fRateInfo = new HashMap<String, RateInfo>();
+ fMaxEmptyPollsPerMinute = Math.max(0, maxEmptyPollsPerMinute);
+ fWindowLengthMins = windowLengthMins;
+ fSleepMs = Math.max(0, sleepMs);
+ }
+
+ /**
+ * Tell the rate limiter about a call to a topic/group/id. If the rate is
+ * too high, this call delays its return and throws an exception.
+ *
+ * @param topic
+ * @param consumerGroup
+ * @param clientId
+ * @throws CambriaApiException
+ */
+ public void onCall(String topic, String consumerGroup, String clientId) throws CambriaApiException {
+ // do nothing if rate is configured 0 or less
+ if (fMaxEmptyPollsPerMinute <= 0) {
+ return;
+ }
+
+ // setup rate info for this tuple
+ final RateInfo ri = getRateInfo(topic, consumerGroup, clientId);
+
+ final double rate = ri.onCall();
+ log.info(ri.getLabel() + ": " + rate + " empty replies/minute.");
+
+ if (rate > fMaxEmptyPollsPerMinute) {
+ try {
+ log.warn(ri.getLabel() + ": " + rate + " empty replies/minute, limit is " + fMaxEmptyPollsPerMinute
+ + ".");
+ if (fSleepMs > 0) {
+ log.warn(ri.getLabel() + ": " + "Slowing response with " + fSleepMs
+ + " ms sleep, then responding in error.");
+ Thread.sleep(fSleepMs);
+ } else {
+ log.info(ri.getLabel() + ": " + "No sleep configured, just throwing error.");
+ }
+ } catch (InterruptedException e) {
+ // ignore
+ }
+ ErrorResponse errRes = new ErrorResponse(HttpStatusCodes.k429_tooManyRequests,
+ DMaaPResponseCode.TOO_MANY_REQUESTS.getResponseCode(),
+ "This client is making too many requests. Please use a long poll "
+ + "setting to decrease the number of requests that result in empty responses. ");
+ log.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+ }
+ }
+
+ /**
+ *
+ * @param topic
+ * @param consumerGroup
+ * @param clientId
+ * @param sentCount
+ */
+ public void onSend(String topic, String consumerGroup, String clientId, long sentCount) {
+ // check for good replies
+ if (sentCount > 0) {
+ // that was a good send, reset the metric
+ getRateInfo(topic, consumerGroup, clientId).reset();
+ }
+ }
+
+ private static class RateInfo {
+ /**
+ * constructor initialzes
+ *
+ * @param label
+ * @param windowLengthMinutes
+ */
+ public RateInfo(String label, int windowLengthMinutes) {
+ fLabel = label;
+ fCallRateSinceLastMsgSend = new CdmRateTicker("Call rate since last msg send", 1, TimeUnit.MINUTES,
+ windowLengthMinutes, TimeUnit.MINUTES);
+ }
+
+ public String getLabel() {
+ return fLabel;
+ }
+
+ /**
+ * CdmRateTicker is reset
+ */
+ public void reset() {
+ fCallRateSinceLastMsgSend.reset();
+ }
+
+ /**
+ *
+ * @return
+ */
+ public double onCall() {
+ fCallRateSinceLastMsgSend.tick();
+ return fCallRateSinceLastMsgSend.getRate();
+ }
+
+ private final String fLabel;
+ private final CdmRateTicker fCallRateSinceLastMsgSend;
+ }
+
+ private final HashMap<String, RateInfo> fRateInfo;
+ private final double fMaxEmptyPollsPerMinute;
+ private final int fWindowLengthMins;
+ private final long fSleepMs;
+ //private static final Logger log = LoggerFactory.getLogger(DMaaPCambriaLimiter.class);
+ private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPCambriaLimiter.class);
+ private RateInfo getRateInfo(String topic, String consumerGroup, String clientId) {
+ final String key = makeKey(topic, consumerGroup, clientId);
+ RateInfo ri = fRateInfo.get(key);
+ if (ri == null) {
+ ri = new RateInfo(key, fWindowLengthMins);
+ fRateInfo.put(key, ri);
+ }
+ return ri;
+ }
+
+ private String makeKey(String topic, String group, String id) {
+ return topic + "::" + group + "::" + id;
+ }
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPContext.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPContext.java
new file mode 100644
index 0000000..5f132b7
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPContext.java
@@ -0,0 +1,104 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans;
+
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.servlet.http.HttpSession;
+
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.ConfigurationReader;
+
+/**
+ * DMaaPContext provide and maintain all the configurations , Http request/response
+ * Session and consumer Request Time
+ * @author author
+ *
+ */
+public class DMaaPContext {
+
+ private ConfigurationReader configReader;
+ private HttpServletRequest request;
+ private HttpServletResponse response;
+ private HttpSession session;
+ private String consumerRequestTime;
+ static int i=0;
+
+ public synchronized static long getBatchID() {
+ try{
+ final long metricsSendTime = System.currentTimeMillis();
+ final Date d = new Date(metricsSendTime);
+ final String text = new SimpleDateFormat("ddMMyyyyHHmmss").format(d);
+ long dt= Long.valueOf(text)+i;
+ i++;
+ return dt;
+ }
+ catch(NumberFormatException ex){
+ return 0;
+ }
+ }
+
+ public HttpServletRequest getRequest() {
+ return request;
+ }
+
+ public void setRequest(HttpServletRequest request) {
+ this.request = request;
+ }
+
+ public HttpServletResponse getResponse() {
+ return response;
+ }
+
+ public void setResponse(HttpServletResponse response) {
+ this.response = response;
+ }
+
+ public HttpSession getSession() {
+ this.session = request.getSession();
+ return session;
+ }
+
+ public void setSession(HttpSession session) {
+ this.session = session;
+ }
+
+ public ConfigurationReader getConfigReader() {
+ return configReader;
+ }
+
+ public void setConfigReader(ConfigurationReader configReader) {
+ this.configReader = configReader;
+ }
+
+ public String getConsumerRequestTime() {
+ return consumerRequestTime;
+ }
+
+ public void setConsumerRequestTime(String consumerRequestTime) {
+ this.consumerRequestTime = consumerRequestTime;
+ }
+
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPKafkaConsumerFactory.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPKafkaConsumerFactory.java
new file mode 100644
index 0000000..1b62ec8
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPKafkaConsumerFactory.java
@@ -0,0 +1,320 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans;
+
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Properties;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.recipes.locks.InterProcessMutex;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Consumer;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.ConsumerFactory;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.MetricsSet;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.kafka.KafkaConsumer;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.kafka.KafkaConsumerCache;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.kafka.KafkaConsumerCache.KafkaConsumerCacheException;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants.CambriaConstants;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.ConfigurationReader;
+
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.springframework.beans.factory.annotation.Qualifier;
+
+import com.att.nsa.drumlin.till.nv.rrNvReadable;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+import kafka.consumer.ConsumerConfig;
+import kafka.javaapi.consumer.ConsumerConnector;
+
+/**
+ * @author author
+ *
+ */
+public class DMaaPKafkaConsumerFactory implements ConsumerFactory {
+
+ //private static final Logger log = LoggerFactory .getLogger(DMaaPKafkaConsumerFactory.class);
+ private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPKafkaConsumerFactory.class);
+ /**
+ * constructor initialization
+ *
+ * @param settings
+ * @param metrics
+ * @param curator
+ * @throws missingReqdSetting
+ * @throws KafkaConsumerCacheException
+ * @throws UnknownHostException
+ */
+ public DMaaPKafkaConsumerFactory(
+ @Qualifier("propertyReader") rrNvReadable settings,
+ @Qualifier("dMaaPMetricsSet") MetricsSet metrics,
+ @Qualifier("curator") CuratorFramework curator)
+ throws missingReqdSetting, KafkaConsumerCacheException,
+ UnknownHostException {
+ /*final String apiNodeId = settings.getString(
+ CambriaConstants.kSetting_ApiNodeIdentifier,
+ InetAddress.getLocalHost().getCanonicalHostName()
+ + ":"
+ + settings.getInt(CambriaConstants.kSetting_Port,
+ CambriaConstants.kDefault_Port));*/
+ String apiNodeId = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+ CambriaConstants.kSetting_ApiNodeIdentifier);
+ if (apiNodeId == null){
+
+ apiNodeId=InetAddress.getLocalHost().getCanonicalHostName()
+ + ":"
+ + settings.getInt(CambriaConstants.kSetting_Port,
+ CambriaConstants.kDefault_Port);
+ }
+
+ log.info("This Cambria API Node identifies itself as [" + apiNodeId
+ + "].");
+ final String mode = CambriaConstants.DMAAP;
+ /*fSettings = settings;
+ fZooKeeper = fSettings.getString(kSettings_KafkaZookeeper, settings
+ .getString(CambriaConstants.kSetting_ZkConfigDbServers,
+ CambriaConstants.kDefault_ZkConfigDbServers));*/
+
+ String strkSettings_KafkaZookeeper = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSettings_KafkaZookeeper);
+ if(null==strkSettings_KafkaZookeeper){
+ strkSettings_KafkaZookeeper = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kSetting_ZkConfigDbServers);
+ if (null==strkSettings_KafkaZookeeper) strkSettings_KafkaZookeeper = CambriaConstants.kDefault_ZkConfigDbServers;
+
+ }
+ fZooKeeper= strkSettings_KafkaZookeeper;
+
+ //final boolean isCacheEnabled = fSettings.getBoolean(
+ // kSetting_EnableCache, kDefault_IsCacheEnabled);
+ boolean kSetting_EnableCache= kDefault_IsCacheEnabled;
+ String strkSetting_EnableCache = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_EnableCache+"");
+ if(null!=strkSetting_EnableCache)kSetting_EnableCache=Boolean.parseBoolean(strkSetting_EnableCache);
+
+ final boolean isCacheEnabled = kSetting_EnableCache;
+
+
+ fCache = (isCacheEnabled) ? new KafkaConsumerCache(apiNodeId,
+ metrics) : null;
+ if (fCache != null) {
+ fCache.startCache(mode, curator);
+ }
+ }
+
+ @Override
+ public Consumer getConsumerFor(String topic, String consumerGroupName,
+ String consumerId, int timeoutMs) throws UnavailableException {
+ KafkaConsumer kc;
+
+ try {
+ kc = (fCache != null) ? fCache.getConsumerFor(topic,
+ consumerGroupName, consumerId) : null;
+ } catch (KafkaConsumerCacheException e) {
+ throw new UnavailableException(e);
+ }
+
+ if (kc == null) {
+
+ final InterProcessMutex ipLock = new InterProcessMutex( ConfigurationReader.getCurator(), "/consumerFactory/" + topic + "/" + consumerGroupName + "/" + consumerId);
+// final InterProcessMutex fLock = new InterProcessMutex(
+// ConfigurationReader.getCurator(), "/consumerFactory/"
+// + topic + "/" + consumerGroupName + "/"
+// + consumerId);
+ boolean locked = false;
+ try {
+
+ locked = ipLock.acquire(30, TimeUnit.SECONDS);
+ if (!locked) {
+ // FIXME: this seems to cause trouble in some cases. This exception
+ // gets thrown routinely. Possibly a consumer trying multiple servers
+ // at once, producing a never-ending cycle of overlapping locks?
+ // The problem is that it throws and winds up sending a 503 to the
+ // client, which would be incorrect if the client is causing trouble
+ // by switching back and forth.
+
+ throw new UnavailableException("Could not acquire lock in order to create (topic, group, consumer) = " + "(" + topic + ", " + consumerGroupName + ", " + consumerId + ")");
+ }
+
+// if (!fLock.acquire(30, TimeUnit.SECONDS)) {
+// throw new UnavailableException(
+// "Could not acquire lock in order to create (topic, group, consumer) = "
+// + "(" + topic + ", " + consumerGroupName
+// + ", " + consumerId + ")");
+// }
+
+ fCache.signalOwnership(topic, consumerGroupName, consumerId);
+
+ log.info("Creating Kafka consumer for group ["
+ + consumerGroupName + "], consumer [" + consumerId
+ + "], on topic [" + topic + "].");
+
+ final String fakeGroupName = consumerGroupName + "--" + topic;
+
+ final ConsumerConfig ccc = createConsumerConfig(fakeGroupName,
+ consumerId);
+ final ConsumerConnector cc = kafka.consumer.Consumer
+ .createJavaConsumerConnector(ccc);
+ kc = new KafkaConsumer(topic, consumerGroupName, consumerId, cc);
+
+ if (fCache != null) {
+ fCache.putConsumerFor(topic, consumerGroupName, consumerId,
+ kc);
+ }
+ } catch (org.I0Itec.zkclient.exception.ZkTimeoutException x) {
+ log.warn("Kafka consumer couldn't connect to ZK.");
+ throw new UnavailableException("Couldn't connect to ZK.");
+ } catch (KafkaConsumerCacheException e) {
+ log.warn("Failed to cache consumer (this may have performance implications): "
+ + e.getMessage());
+ } catch (Exception e) {
+ throw new UnavailableException(
+ "Error while acquiring consumer factory lock", e);
+ } finally {
+ if ( locked )
+ {
+ try {
+ ipLock.release();
+ } catch (Exception e) {
+ throw new UnavailableException("Error while releasing consumer factory lock", e);
+ }
+ }
+ }
+ }
+
+ return kc;
+ }
+
+ @Override
+ public synchronized void destroyConsumer(String topic,
+ String consumerGroup, String clientId) {
+ if (fCache != null) {
+ fCache.dropConsumer(topic, consumerGroup, clientId);
+ }
+ }
+
+ @Override
+ public synchronized Collection<? extends Consumer> getConsumers() {
+ return fCache.getConsumers();
+ }
+
+ @Override
+ public synchronized void dropCache() {
+ fCache.dropAllConsumers();
+ }
+
+ private ConsumerConfig createConsumerConfig(String groupId,
+ String consumerId) {
+ final Properties props = new Properties();
+ props.put("zookeeper.connect", fZooKeeper);
+ props.put("group.id", groupId);
+ props.put("consumer.id", consumerId);
+ //props.put("auto.commit.enable", "false");
+ // additional settings: start with our defaults, then pull in configured
+ // overrides
+ props.putAll(KafkaInternalDefaults);
+ for (String key : KafkaConsumerKeys) {
+ transferSettingIfProvided(props, key, "kafka");
+ }
+
+ return new ConsumerConfig(props);
+ }
+
+ //private final rrNvReadable fSettings;
+ private final KafkaConsumerCache fCache;
+
+ private String fZooKeeper;
+
+ private static final String kSettings_KafkaZookeeper = "kafka.client.zookeeper";
+
+ private static final HashMap<String, String> KafkaInternalDefaults = new HashMap<String, String>();
+
+ /**
+ * putting values in hashmap like consumer timeout, zookeeper time out, etc
+ *
+ * @param setting
+ */
+ public static void populateKafkaInternalDefaultsMap() {
+ //@Qualifier("propertyReader") rrNvReadable setting) {
+ try {
+
+ HashMap<String, String> map1= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperties(CambriaConstants.msgRtr_prop);
+
+ KafkaInternalDefaults.put("consumer.timeout.ms",
+ // AJSCPropertiesMap.get(CambriaConstants.msgRtr_prop, "consumer.timeout.ms"));
+ map1.get( "consumer.timeout.ms"));
+
+ KafkaInternalDefaults.put("zookeeper.connection.timeout.ms",
+ //AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "zookeeper.connection.timeout.ms"));
+ map1.get("zookeeper.connection.timeout.ms"));
+ KafkaInternalDefaults.put("zookeeper.session.timeout.ms",
+ //AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "zookeeper.session.timeout.ms"));
+ map1.get("zookeeper.session.timeout.ms"));
+ KafkaInternalDefaults.put("zookeeper.sync.time.ms",
+ // AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "zookeeper.sync.time.ms"));
+ map1.get( "zookeeper.sync.time.ms"));
+ KafkaInternalDefaults.put("auto.commit.interval.ms",
+ //AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "auto.commit.interval.ms"));
+ map1.get( "auto.commit.interval.ms"));
+ KafkaInternalDefaults.put("fetch.message.max.bytes",
+ //AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "fetch.message.max.bytes"));
+ map1.get("fetch.message.max.bytes"));
+ KafkaInternalDefaults.put("auto.commit.enable",
+ // AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "auto.commit.enable"));
+ map1.get("auto.commit.enable"));
+ } catch (Exception e) {
+ log.error("Failed to load Kafka Internal Properties.", e);
+ }
+ }
+
+ private static final String KafkaConsumerKeys[] = { "socket.timeout.ms",
+ "socket.receive.buffer.bytes", "fetch.message.max.bytes",
+ "auto.commit.interval.ms", "queued.max.message.chunks",
+ "rebalance.max.retries", "fetch.min.bytes", "fetch.wait.max.bytes",
+ "rebalance.backoff.ms", "refresh.leader.backoff.ms",
+ "auto.offset.reset", "consumer.timeout.ms",
+ "zookeeper.session.timeout.ms", "zookeeper.connection.timeout.ms",
+ "zookeeper.sync.time.ms" };
+
+ private static String makeLongKey(String key, String prefix) {
+ return prefix + "." + key;
+ }
+
+ private void transferSettingIfProvided(Properties target, String key,
+ String prefix) {
+ String keyVal= AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,makeLongKey(key, prefix));
+
+ // if (fSettings.hasValueFor(makeLongKey(key, prefix))) {
+ if (null!=keyVal) {
+ // final String val = fSettings
+ // .getString(makeLongKey(key, prefix), "");
+ log.info("Setting [" + key + "] to " + keyVal + ".");
+ target.put(key, keyVal);
+ }
+ }
+
+ }
+
+
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPKafkaMetaBroker.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPKafkaMetaBroker.java
new file mode 100644
index 0000000..aad992c
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPKafkaMetaBroker.java
@@ -0,0 +1,462 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans;
+
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Properties;
+import java.util.Set;
+import java.util.TreeSet;
+
+import org.I0Itec.zkclient.ZkClient;
+import org.I0Itec.zkclient.exception.ZkNoNodeException;
+//import org.apache.log4-j.Logger;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+import org.json.JSONArray;
+import org.json.JSONObject;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metabroker.Broker;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metabroker.Topic;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.ConfigurationReader;
+import org.springframework.beans.factory.annotation.Qualifier;
+
+import com.att.nsa.configs.ConfigDb;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.configs.ConfigPath;
+import com.att.nsa.drumlin.service.standards.HttpStatusCodes;
+import com.att.nsa.drumlin.till.nv.rrNvReadable;
+import com.att.nsa.security.NsaAcl;
+import com.att.nsa.security.NsaAclUtils;
+import com.att.nsa.security.NsaApiKey;
+
+import kafka.admin.AdminUtils;
+import kafka.utils.ZKStringSerializer$;
+
+/**
+ * class performing all topic operations
+ *
+ * @author author
+ *
+ */
+
+public class DMaaPKafkaMetaBroker implements Broker {
+
+ //private static final Logger log = Logger.getLogger(DMaaPKafkaMetaBroker.class);
+ private static final EELFLogger log = EELFManager.getInstance().getLogger(ConfigurationReader.class);
+
+
+ /**
+ * DMaaPKafkaMetaBroker constructor initializing
+ *
+ * @param settings
+ * @param zk
+ * @param configDb
+ */
+ public DMaaPKafkaMetaBroker(@Qualifier("propertyReader") rrNvReadable settings,
+ @Qualifier("dMaaPZkClient") ZkClient zk, @Qualifier("dMaaPZkConfigDb") ConfigDb configDb) {
+ //fSettings = settings;
+ fZk = zk;
+ fCambriaConfig = configDb;
+ fBaseTopicData = configDb.parse("/topics");
+ }
+
+ @Override
+ public List<Topic> getAllTopics() throws ConfigDbException {
+ log.info("Retrieving list of all the topics.");
+ final LinkedList<Topic> result = new LinkedList<Topic>();
+ try {
+ log.info("Retrieving all topics from root: " + zkTopicsRoot);
+ final List<String> topics = fZk.getChildren(zkTopicsRoot);
+ for (String topic : topics) {
+ result.add(new KafkaTopic(topic, fCambriaConfig, fBaseTopicData));
+ }
+
+ JSONObject dataObj = new JSONObject();
+ dataObj.put("topics", new JSONObject());
+
+ for (String topic : topics) {
+ dataObj.getJSONObject("topics").put(topic, new JSONObject());
+ }
+ } catch (ZkNoNodeException excp) {
+ // very fresh kafka doesn't have any topics or a topics node
+ log.error("ZK doesn't have a Kakfa topics node at " + zkTopicsRoot, excp);
+ }
+ return result;
+ }
+
+ @Override
+ public Topic getTopic(String topic) throws ConfigDbException {
+ if (fZk.exists(zkTopicsRoot + "/" + topic)) {
+ return getKafkaTopicConfig(fCambriaConfig, fBaseTopicData, topic);
+ }
+ // else: no such topic in kafka
+ return null;
+ }
+
+ /**
+ * static method get KafkaTopic object
+ *
+ * @param db
+ * @param base
+ * @param topic
+ * @return
+ * @throws ConfigDbException
+ */
+ public static KafkaTopic getKafkaTopicConfig(ConfigDb db, ConfigPath base, String topic) throws ConfigDbException {
+ return new KafkaTopic(topic, db, base);
+ }
+
+ /**
+ * creating topic
+ */
+ @Override
+ public Topic createTopic(String topic, String desc, String ownerApiKey, int partitions, int replicas,
+ boolean transactionEnabled) throws TopicExistsException, CambriaApiException {
+ log.info("Creating topic: " + topic);
+ try {
+ log.info("Check if topic [" + topic + "] exist.");
+ // first check for existence "our way"
+ final Topic t = getTopic(topic);
+ if (t != null) {
+ log.info("Could not create topic [" + topic + "]. Topic Already exists.");
+ throw new TopicExistsException("Could not create topic [" + topic + "]. Topic Alreay exists.");
+ }
+ } catch (ConfigDbException e1) {
+ log.error("Topic [" + topic + "] could not be created. Couldn't check topic data in config db.", e1);
+ throw new CambriaApiException(HttpStatusCodes.k503_serviceUnavailable,
+ "Couldn't check topic data in config db.");
+ }
+
+ // we only allow 3 replicas. (If we don't test this, we get weird
+ // results from the cluster,
+ // so explicit test and fail.)
+ if (replicas < 1 || replicas > 3) {
+ log.info("Topic [" + topic + "] could not be created. The replica count must be between 1 and 3.");
+ throw new CambriaApiException(HttpStatusCodes.k400_badRequest,
+ "The replica count must be between 1 and 3.");
+ }
+ if (partitions < 1) {
+ log.info("Topic [" + topic + "] could not be created. The partition count must be at least 1.");
+ throw new CambriaApiException(HttpStatusCodes.k400_badRequest, "The partition count must be at least 1.");
+ }
+
+ // create via kafka
+ try {
+ ZkClient zkClient = null;
+ try {
+ log.info("Loading zookeeper client for creating topic.");
+ // FIXME: use of this scala module$ thing is a goofy hack to
+ // make Kafka aware of the
+ // topic creation. (Otherwise, the topic is only partially
+ // created in ZK.)
+ zkClient = new ZkClient(ConfigurationReader.getMainZookeeperConnectionString(), 10000, 10000,
+ ZKStringSerializer$.MODULE$);
+
+ log.info("Zookeeper client loaded successfully. Creating topic.");
+ AdminUtils.createTopic(zkClient, topic, partitions, replicas, new Properties());
+ } catch (kafka.common.TopicExistsException e) {
+ log.error("Topic [" + topic + "] could not be created. " + e.getMessage(), e);
+ throw new TopicExistsException(topic);
+ } catch (ZkNoNodeException e) {
+ log.error("Topic [" + topic + "] could not be created. The Kafka cluster is not setup.", e);
+ // Kafka throws this when the server isn't running (and perhaps
+ // hasn't ever run)
+ throw new CambriaApiException(HttpStatusCodes.k503_serviceUnavailable,
+ "The Kafka cluster is not setup.");
+ } catch (kafka.admin.AdminOperationException e) {
+ // Kafka throws this when the server isn't running (and perhaps
+ // hasn't ever run)
+ log.error("The Kafka cluster can't handle your request. Talk to the administrators: " + e.getMessage(),
+ e);
+ throw new CambriaApiException(HttpStatusCodes.k503_serviceUnavailable,
+ "The Kafka cluster can't handle your request. Talk to the administrators.");
+ } finally {
+ log.info("Closing zookeeper connection.");
+ if (zkClient != null)
+ zkClient.close();
+ }
+
+ log.info("Creating topic entry for topic: " + topic);
+ // underlying Kafka topic created. now setup our API info
+ return createTopicEntry(topic, desc, ownerApiKey, transactionEnabled);
+ } catch (ConfigDbException excp) {
+ log.error("Failed to create topic data. Talk to the administrators: " + excp.getMessage(), excp);
+ throw new CambriaApiException(HttpStatusCodes.k503_serviceUnavailable,
+ "Failed to create topic data. Talk to the administrators.");
+ }
+ }
+
+ @Override
+ public void deleteTopic(String topic) throws CambriaApiException, TopicExistsException {
+ log.info("Deleting topic: " + topic);
+ ZkClient zkClient = null;
+ try {
+ log.info("Loading zookeeper client for topic deletion.");
+ // FIXME: use of this scala module$ thing is a goofy hack to make
+ // Kafka aware of the
+ // topic creation. (Otherwise, the topic is only partially created
+ // in ZK.)
+ zkClient = new ZkClient(ConfigurationReader.getMainZookeeperConnectionString(), 10000, 10000,
+ ZKStringSerializer$.MODULE$);
+
+ log.info("Zookeeper client loaded successfully. Deleting topic.");
+ AdminUtils.deleteTopic(zkClient, topic);
+ } catch (kafka.common.TopicExistsException e) {
+ log.error("Failed to delete topic [" + topic + "]. " + e.getMessage(), e);
+ throw new TopicExistsException(topic);
+ } catch (ZkNoNodeException e) {
+ log.error("Failed to delete topic [" + topic + "]. The Kafka cluster is not setup." + e.getMessage(), e);
+ // Kafka throws this when the server isn't running (and perhaps
+ // hasn't ever run)
+ throw new CambriaApiException(HttpStatusCodes.k503_serviceUnavailable, "The Kafka cluster is not setup.");
+ } catch (kafka.admin.AdminOperationException e) {
+ // Kafka throws this when the server isn't running (and perhaps
+ // hasn't ever run)
+ log.error("The Kafka cluster can't handle your request. Talk to the administrators." + e.getMessage(), e);
+ throw new CambriaApiException(HttpStatusCodes.k503_serviceUnavailable,
+ "The Kafka cluster can't handle your request. Talk to the administrators.");
+ } finally {
+ log.info("Closing zookeeper connection.");
+ if (zkClient != null)
+ zkClient.close();
+ }
+
+ // throw new UnsupportedOperationException ( "We can't programmatically
+ // delete Kafka topics yet." );
+ }
+
+ //private final rrNvReadable fSettings;
+ private final ZkClient fZk;
+ private final ConfigDb fCambriaConfig;
+ private final ConfigPath fBaseTopicData;
+
+ private static final String zkTopicsRoot = "/brokers/topics";
+ private static final JSONObject kEmptyAcl = new JSONObject();
+
+ /**
+ * method Providing KafkaTopic Object associated with owner and
+ * transactionenabled or not
+ *
+ * @param name
+ * @param desc
+ * @param owner
+ * @param transactionEnabled
+ * @return
+ * @throws ConfigDbException
+ */
+ public KafkaTopic createTopicEntry(String name, String desc, String owner, boolean transactionEnabled)
+ throws ConfigDbException {
+ return createTopicEntry(fCambriaConfig, fBaseTopicData, name, desc, owner, transactionEnabled);
+ }
+
+ /**
+ * static method giving kafka topic object
+ *
+ * @param db
+ * @param basePath
+ * @param name
+ * @param desc
+ * @param owner
+ * @param transactionEnabled
+ * @return
+ * @throws ConfigDbException
+ */
+ public static KafkaTopic createTopicEntry(ConfigDb db, ConfigPath basePath, String name, String desc, String owner,
+ boolean transactionEnabled) throws ConfigDbException {
+ final JSONObject o = new JSONObject();
+ o.put("owner", owner);
+ o.put("description", desc);
+ o.put("txenabled", transactionEnabled);
+ db.store(basePath.getChild(name), o.toString());
+ return new KafkaTopic(name, db, basePath);
+ }
+
+ /**
+ * class performing all user opearation like user is eligible to read,
+ * write. permitting a user to write and read,
+ *
+ * @author author
+ *
+ */
+ public static class KafkaTopic implements Topic {
+ /**
+ * constructor initializes
+ *
+ * @param name
+ * @param configdb
+ * @param baseTopic
+ * @throws ConfigDbException
+ */
+ public KafkaTopic(String name, ConfigDb configdb, ConfigPath baseTopic) throws ConfigDbException {
+ fName = name;
+ fConfigDb = configdb;
+ fBaseTopicData = baseTopic;
+
+ String data = fConfigDb.load(fBaseTopicData.getChild(fName));
+ if (data == null) {
+ data = "{}";
+ }
+
+ final JSONObject o = new JSONObject(data);
+ fOwner = o.optString("owner", "");
+ fDesc = o.optString("description", "");
+ fTransactionEnabled = o.optBoolean("txenabled", false);// default
+ // value is
+ // false
+ // if this topic has an owner, it needs both read/write ACLs. If there's no
+ // owner (or it's empty), null is okay -- this is for existing or implicitly
+ // created topics.
+ JSONObject readers = o.optJSONObject ( "readers" );
+ if ( readers == null && fOwner.length () > 0 ) readers = kEmptyAcl;
+ fReaders = fromJson ( readers );
+
+ JSONObject writers = o.optJSONObject ( "writers" );
+ if ( writers == null && fOwner.length () > 0 ) writers = kEmptyAcl;
+ fWriters = fromJson ( writers );
+ }
+ private NsaAcl fromJson(JSONObject o) {
+ NsaAcl acl = new NsaAcl();
+ if (o != null) {
+ JSONArray a = o.optJSONArray("allowed");
+ if (a != null) {
+ for (int i = 0; i < a.length(); ++i) {
+ String user = a.getString(i);
+ acl.add(user);
+ }
+ }
+ }
+ return acl;
+ }
+ @Override
+ public String getName() {
+ return fName;
+ }
+
+ @Override
+ public String getOwner() {
+ return fOwner;
+ }
+
+ @Override
+ public String getDescription() {
+ return fDesc;
+ }
+
+ @Override
+ public NsaAcl getReaderAcl() {
+ return fReaders;
+ }
+
+ @Override
+ public NsaAcl getWriterAcl() {
+ return fWriters;
+ }
+
+ @Override
+ public void checkUserRead(NsaApiKey user) throws AccessDeniedException {
+ NsaAclUtils.checkUserAccess ( fOwner, getReaderAcl(), user );
+ }
+
+ @Override
+ public void checkUserWrite(NsaApiKey user) throws AccessDeniedException {
+ NsaAclUtils.checkUserAccess ( fOwner, getWriterAcl(), user );
+ }
+
+ @Override
+ public void permitWritesFromUser(String pubId, NsaApiKey asUser)
+ throws ConfigDbException, AccessDeniedException {
+ updateAcl(asUser, false, true, pubId);
+ }
+
+ @Override
+ public void denyWritesFromUser(String pubId, NsaApiKey asUser) throws ConfigDbException, AccessDeniedException {
+ updateAcl(asUser, false, false, pubId);
+ }
+
+ @Override
+ public void permitReadsByUser(String consumerId, NsaApiKey asUser)
+ throws ConfigDbException, AccessDeniedException {
+ updateAcl(asUser, true, true, consumerId);
+ }
+
+ @Override
+ public void denyReadsByUser(String consumerId, NsaApiKey asUser)
+ throws ConfigDbException, AccessDeniedException {
+ updateAcl(asUser, true, false, consumerId);
+ }
+
+ private void updateAcl(NsaApiKey asUser, boolean reader, boolean add, String key)
+ throws ConfigDbException, AccessDeniedException{
+ try
+ {
+ final NsaAcl acl = NsaAclUtils.updateAcl ( this, asUser, key, reader, add );
+
+ // we have to assume we have current data, or load it again. for the expected use
+ // case, assuming we can overwrite the data is fine.
+ final JSONObject o = new JSONObject ();
+ o.put ( "owner", fOwner );
+ o.put ( "readers", safeSerialize ( reader ? acl : fReaders ) );
+ o.put ( "writers", safeSerialize ( reader ? fWriters : acl ) );
+ fConfigDb.store ( fBaseTopicData.getChild ( fName ), o.toString () );
+
+ log.info ( "ACL_UPDATE: " + asUser.getKey () + " " + ( add ? "added" : "removed" ) + ( reader?"subscriber":"publisher" ) + " " + key + " on " + fName );
+
+ }
+ catch ( ConfigDbException x )
+ {
+ throw x;
+ }
+ catch ( AccessDeniedException x )
+ {
+ throw x;
+ }
+
+ }
+
+ private JSONObject safeSerialize(NsaAcl acl) {
+ return acl == null ? null : acl.serialize();
+ }
+
+ private final String fName;
+ private final ConfigDb fConfigDb;
+ private final ConfigPath fBaseTopicData;
+ private final String fOwner;
+ private final String fDesc;
+ private final NsaAcl fReaders;
+ private final NsaAcl fWriters;
+ private boolean fTransactionEnabled;
+
+ public boolean isTransactionEnabled() {
+ return fTransactionEnabled;
+ }
+
+ @Override
+ public Set<String> getOwners() {
+ final TreeSet<String> owners = new TreeSet<String> ();
+ owners.add ( fOwner );
+ return owners;
+ }
+ }
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPMetricsSet.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPMetricsSet.java
new file mode 100644
index 0000000..f98eeee
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPMetricsSet.java
@@ -0,0 +1,233 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans;
+
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.TimeUnit;
+
+import org.onap.dmaap.messagerouter.msgrtr.nsa.apiServer.metrics.cambria.DMaaPMetricsSender;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiVersionInfo;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.MetricsSet;
+
+import com.att.nsa.drumlin.till.nv.rrNvReadable;
+import com.att.nsa.metrics.impl.CdmConstant;
+import com.att.nsa.metrics.impl.CdmCounter;
+import com.att.nsa.metrics.impl.CdmMetricsRegistryImpl;
+import com.att.nsa.metrics.impl.CdmMovingAverage;
+import com.att.nsa.metrics.impl.CdmRateTicker;
+import com.att.nsa.metrics.impl.CdmSimpleMetric;
+import com.att.nsa.metrics.impl.CdmStringConstant;
+import com.att.nsa.metrics.impl.CdmTimeSince;
+
+/*@Component("dMaaPMetricsSet")*/
+/**
+ * Metrics related information
+ *
+ * @author author
+ *
+ */
+public class DMaaPMetricsSet extends CdmMetricsRegistryImpl implements MetricsSet {
+
+ private final CdmStringConstant fVersion;
+ private final CdmConstant fStartTime;
+ private final CdmTimeSince fUpTime;
+
+ private final CdmCounter fRecvTotal;
+ private final CdmRateTicker fRecvEpsInstant;
+ private final CdmRateTicker fRecvEpsShort;
+ private final CdmRateTicker fRecvEpsLong;
+
+ private final CdmCounter fSendTotal;
+ private final CdmRateTicker fSendEpsInstant;
+ private final CdmRateTicker fSendEpsShort;
+ private final CdmRateTicker fSendEpsLong;
+
+ private final CdmCounter fKafkaConsumerCacheMiss;
+ private final CdmCounter fKafkaConsumerCacheHit;
+
+ private final CdmCounter fKafkaConsumerClaimed;
+ private final CdmCounter fKafkaConsumerTimeout;
+
+ private final CdmSimpleMetric fFanOutRatio;
+
+ private final HashMap<String, CdmRateTicker> fPathUseRates;
+ private final HashMap<String, CdmMovingAverage> fPathAvgs;
+
+ private rrNvReadable fSettings;
+
+ private final ScheduledExecutorService fScheduler;
+
+ /**
+ * Constructor initialization
+ *
+ * @param cs
+ */
+ //public DMaaPMetricsSet() {
+ public DMaaPMetricsSet(rrNvReadable cs) {
+ //fSettings = cs;
+
+ fVersion = new CdmStringConstant("Version " + CambriaApiVersionInfo.getVersion());
+ super.putItem("version", fVersion);
+
+ final long startTime = System.currentTimeMillis();
+ final Date d = new Date(startTime);
+ final String text = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssz").format(d);
+ fStartTime = new CdmConstant(startTime / 1000, "Start Time (epoch); " + text);
+ super.putItem("startTime", fStartTime);
+
+ fUpTime = new CdmTimeSince("seconds since start");
+ super.putItem("upTime", fUpTime);
+
+ fRecvTotal = new CdmCounter("Total events received since start");
+ super.putItem("recvTotalEvents", fRecvTotal);
+
+ fRecvEpsInstant = new CdmRateTicker("recv eps (1 min)", 1, TimeUnit.SECONDS, 1, TimeUnit.MINUTES);
+ super.putItem("recvEpsInstant", fRecvEpsInstant);
+
+ fRecvEpsShort = new CdmRateTicker("recv eps (10 mins)", 1, TimeUnit.SECONDS, 10, TimeUnit.MINUTES);
+ super.putItem("recvEpsShort", fRecvEpsShort);
+
+ fRecvEpsLong = new CdmRateTicker("recv eps (1 hr)", 1, TimeUnit.SECONDS, 1, TimeUnit.HOURS);
+ super.putItem("recvEpsLong", fRecvEpsLong);
+
+ fSendTotal = new CdmCounter("Total events sent since start");
+ super.putItem("sendTotalEvents", fSendTotal);
+
+ fSendEpsInstant = new CdmRateTicker("send eps (1 min)", 1, TimeUnit.SECONDS, 1, TimeUnit.MINUTES);
+ super.putItem("sendEpsInstant", fSendEpsInstant);
+
+ fSendEpsShort = new CdmRateTicker("send eps (10 mins)", 1, TimeUnit.SECONDS, 10, TimeUnit.MINUTES);
+ super.putItem("sendEpsShort", fSendEpsShort);
+
+ fSendEpsLong = new CdmRateTicker("send eps (1 hr)", 1, TimeUnit.SECONDS, 1, TimeUnit.HOURS);
+ super.putItem("sendEpsLong", fSendEpsLong);
+
+ fKafkaConsumerCacheMiss = new CdmCounter("Kafka Consumer Cache Misses");
+ super.putItem("kafkaConsumerCacheMiss", fKafkaConsumerCacheMiss);
+
+ fKafkaConsumerCacheHit = new CdmCounter("Kafka Consumer Cache Hits");
+ super.putItem("kafkaConsumerCacheHit", fKafkaConsumerCacheHit);
+
+ fKafkaConsumerClaimed = new CdmCounter("Kafka Consumers Claimed");
+ super.putItem("kafkaConsumerClaims", fKafkaConsumerClaimed);
+
+ fKafkaConsumerTimeout = new CdmCounter("Kafka Consumers Timedout");
+ super.putItem("kafkaConsumerTimeouts", fKafkaConsumerTimeout);
+
+ // FIXME: CdmLevel is not exactly a great choice
+ fFanOutRatio = new CdmSimpleMetric() {
+ @Override
+ public String getRawValueString() {
+ return getRawValue().toString();
+ }
+
+ @Override
+ public Number getRawValue() {
+ final double s = fSendTotal.getValue();
+ final double r = fRecvTotal.getValue();
+ return r == 0.0 ? 0.0 : s / r;
+ }
+
+ @Override
+ public String summarize() {
+ return getRawValueString() + " sends per recv";
+ }
+
+ };
+ super.putItem("fanOut", fFanOutRatio);
+
+ // these are added to the metrics catalog as they're discovered
+ fPathUseRates = new HashMap<String, CdmRateTicker>();
+ fPathAvgs = new HashMap<String, CdmMovingAverage>();
+
+ fScheduler = Executors.newScheduledThreadPool(1);
+ }
+
+ @Override
+ public void setupCambriaSender() {
+ DMaaPMetricsSender.sendPeriodically(fScheduler, this, "cambria.apinode.metrics.dmaap");
+ }
+
+ @Override
+ public void onRouteComplete(String name, long durationMs) {
+ CdmRateTicker ticker = fPathUseRates.get(name);
+ if (ticker == null) {
+ ticker = new CdmRateTicker("calls/min on path " + name + "", 1, TimeUnit.MINUTES, 1, TimeUnit.HOURS);
+ fPathUseRates.put(name, ticker);
+ super.putItem("pathUse_" + name, ticker);
+ }
+ ticker.tick();
+
+ CdmMovingAverage durs = fPathAvgs.get(name);
+ if (durs == null) {
+ durs = new CdmMovingAverage("ms avg duration on path " + name + ", last 10 minutes", 10, TimeUnit.MINUTES);
+ fPathAvgs.put(name, durs);
+ super.putItem("pathDurationMs_" + name, durs);
+ }
+ durs.tick(durationMs);
+ }
+
+ @Override
+ public void publishTick(int amount) {
+ if (amount > 0) {
+ fRecvTotal.bumpBy(amount);
+ fRecvEpsInstant.tick(amount);
+ fRecvEpsShort.tick(amount);
+ fRecvEpsLong.tick(amount);
+ }
+ }
+
+ @Override
+ public void consumeTick(int amount) {
+ if (amount > 0) {
+ fSendTotal.bumpBy(amount);
+ fSendEpsInstant.tick(amount);
+ fSendEpsShort.tick(amount);
+ fSendEpsLong.tick(amount);
+ }
+ }
+
+ @Override
+ public void onKafkaConsumerCacheMiss() {
+ fKafkaConsumerCacheMiss.bump();
+ }
+
+ @Override
+ public void onKafkaConsumerCacheHit() {
+ fKafkaConsumerCacheHit.bump();
+ }
+
+ @Override
+ public void onKafkaConsumerClaimed() {
+ fKafkaConsumerClaimed.bump();
+ }
+
+ @Override
+ public void onKafkaConsumerTimeout() {
+ fKafkaConsumerTimeout.bump();
+ }
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPNsaApiDb.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPNsaApiDb.java
new file mode 100644
index 0000000..2826289
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPNsaApiDb.java
@@ -0,0 +1,139 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans;
+
+import java.security.Key;
+
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants.CambriaConstants;
+//import org.apache.log4-j.Logger;
+import org.springframework.beans.factory.annotation.Autowired;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.att.nsa.configs.ConfigDb;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.configs.confimpl.EncryptingLayer;
+import com.att.nsa.drumlin.till.nv.rrNvReadable;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
+import com.att.nsa.security.db.BaseNsaApiDbImpl;
+import com.att.nsa.security.db.EncryptingApiDbImpl;
+import com.att.nsa.security.db.NsaApiDb;
+import com.att.nsa.security.db.simple.NsaSimpleApiKey;
+import com.att.nsa.security.db.simple.NsaSimpleApiKeyFactory;
+import com.att.nsa.util.rrConvertor;
+
+/**
+ *
+ * @author author
+ *
+ */
+public class DMaaPNsaApiDb {
+
+ //private rrNvReadable settings;
+ private DMaaPZkConfigDb cdb;
+
+ //private static final Logger log = Logger
+ // .getLogger(DMaaPNsaApiDb.class.toString());
+ private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPNsaApiDb.class);
+
+/**
+ *
+ * Constructor initialized
+ * @param settings
+ * @param cdb
+ */
+ @Autowired
+ public DMaaPNsaApiDb(rrNvReadable settings, DMaaPZkConfigDb cdb) {
+ //this.setSettings(settings);
+ this.setCdb(cdb);
+ }
+ /**
+ *
+ * @param settings
+ * @param cdb
+ * @return
+ * @throws ConfigDbException
+ * @throws missingReqdSetting
+ */
+ public static NsaApiDb<NsaSimpleApiKey> buildApiKeyDb(
+ rrNvReadable settings, ConfigDb cdb) throws ConfigDbException,
+ missingReqdSetting {
+ // Cambria uses an encrypted api key db
+
+ //final String keyBase64 = settings.getString("cambria.secureConfig.key", null);
+ final String keyBase64 =com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"cambria.secureConfig.key");
+
+
+ // final String initVectorBase64 = settings.getString( "cambria.secureConfig.iv", null);
+ final String initVectorBase64 =com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"cambria.secureConfig.iv");
+ // if neither value was provided, don't encrypt api key db
+ if (keyBase64 == null && initVectorBase64 == null) {
+ log.info("This server is configured to use an unencrypted API key database. See the settings documentation.");
+ return new BaseNsaApiDbImpl<NsaSimpleApiKey>(cdb,
+ new NsaSimpleApiKeyFactory());
+ } else if (keyBase64 == null) {
+ // neither or both, otherwise something's goofed
+ throw new missingReqdSetting("cambria.secureConfig.key");
+ } else if (initVectorBase64 == null) {
+ // neither or both, otherwise something's goofed
+ throw new missingReqdSetting("cambria.secureConfig.iv");
+ } else {
+ log.info("This server is configured to use an encrypted API key database.");
+ final Key key = EncryptingLayer.readSecretKey(keyBase64);
+ final byte[] iv = rrConvertor.base64Decode(initVectorBase64);
+ return new EncryptingApiDbImpl<NsaSimpleApiKey>(cdb,
+ new NsaSimpleApiKeyFactory(), key, iv);
+ }
+ }
+
+ /**
+ * @return
+ * returns settings
+ */
+/* public rrNvReadable getSettings() {
+ return settings;
+ }*/
+
+ /**
+ * @param settings
+ * set settings
+ */
+ /*public void setSettings(rrNvReadable settings) {
+ this.settings = settings;
+ }*/
+
+ /**
+ * @return
+ * returns cbd
+ */
+ public DMaaPZkConfigDb getCdb() {
+ return cdb;
+ }
+ /**
+ * @param cdb
+ * set cdb
+ */
+ public void setCdb(DMaaPZkConfigDb cdb) {
+ this.cdb = cdb;
+ }
+
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPZkClient.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPZkClient.java
new file mode 100644
index 0000000..45af1f6
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPZkClient.java
@@ -0,0 +1,45 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans;
+
+import org.I0Itec.zkclient.ZkClient;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.ConfigurationReader;
+import org.springframework.beans.factory.annotation.Qualifier;
+
+import com.att.nsa.drumlin.till.nv.rrNvReadable;
+
+/**
+ * Created for Zookeeper client which will read configuration and settings parameter
+ * @author author
+ *
+ */
+public class DMaaPZkClient extends ZkClient {
+
+ /**
+ * This constructor will get the settings value from rrNvReadable
+ * and ConfigurationReader's zookeeper connection
+ * @param settings
+ */
+ public DMaaPZkClient(@Qualifier("propertyReader") rrNvReadable settings) {
+ super(ConfigurationReader.getMainZookeeperConnectionString());
+ }
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPZkConfigDb.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPZkConfigDb.java
new file mode 100644
index 0000000..db59224
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPZkConfigDb.java
@@ -0,0 +1,52 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans;
+
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants.CambriaConstants;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.ConfigurationReader;
+import org.springframework.beans.factory.annotation.Qualifier;
+
+import com.att.nsa.configs.confimpl.ZkConfigDb;
+import com.att.nsa.drumlin.till.nv.rrNvReadable;
+//import com.att.nsa.configs.confimpl.ZkConfigDb;
+/**
+ * Provide the zookeeper config db connection
+ * @author author
+ *
+ */
+public class DMaaPZkConfigDb extends ZkConfigDb {
+ /**
+ * This Constructor will provide the configuration details from the property reader
+ * and DMaaPZkClient
+ * @param zk
+ * @param settings
+ */
+ public DMaaPZkConfigDb(@Qualifier("dMaaPZkClient") DMaaPZkClient zk,
+ @Qualifier("propertyReader") rrNvReadable settings) {
+
+ //super(com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kSetting_ZkConfigDbRoot)==null?CambriaConstants.kDefault_ZkConfigDbRoot:com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kSetting_ZkConfigDbRoot));
+ super(ConfigurationReader.getMainZookeeperConnectionString(),ConfigurationReader.getMainZookeeperConnectionSRoot());
+
+ }
+
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/LogDetails.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/LogDetails.java
new file mode 100644
index 0000000..f28e9ed
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/LogDetails.java
@@ -0,0 +1,214 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+/**
+ *
+ */
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans;
+
+import java.util.Date;
+
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants.CambriaConstants;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.Utils;
+
+/**
+ * @author author
+ *
+ */
+
+public class LogDetails {
+
+ private String publisherId;
+ private String topicId;
+ private String subscriberGroupId;
+ private String subscriberId;
+ private String publisherIp;
+ private String messageBatchId;
+ private String messageSequence;
+ private String messageTimestamp;
+ private String consumeTimestamp;
+ private String transactionIdTs;
+ private String serverIp;
+
+ private long messageLengthInBytes;
+ private long totalMessageCount;
+
+ private boolean transactionEnabled;
+ /**
+ * This is for transaction enabled logging details
+ *
+ */
+ public LogDetails() {
+ super();
+ }
+
+ public String getTransactionId() {
+ StringBuilder transactionId = new StringBuilder();
+ transactionId.append(transactionIdTs);
+ transactionId.append(CambriaConstants.TRANSACTION_ID_SEPARATOR);
+ transactionId.append(publisherIp);
+ transactionId.append(CambriaConstants.TRANSACTION_ID_SEPARATOR);
+ transactionId.append(messageBatchId);
+ transactionId.append(CambriaConstants.TRANSACTION_ID_SEPARATOR);
+ transactionId.append(messageSequence);
+
+ return transactionId.toString();
+ }
+
+ public String getPublisherId() {
+ return publisherId;
+ }
+
+ public void setPublisherId(String publisherId) {
+ this.publisherId = publisherId;
+ }
+
+ public String getTopicId() {
+ return topicId;
+ }
+
+ public void setTopicId(String topicId) {
+ this.topicId = topicId;
+ }
+
+ public String getSubscriberGroupId() {
+ return subscriberGroupId;
+ }
+
+ public void setSubscriberGroupId(String subscriberGroupId) {
+ this.subscriberGroupId = subscriberGroupId;
+ }
+
+ public String getSubscriberId() {
+ return subscriberId;
+ }
+
+ public void setSubscriberId(String subscriberId) {
+ this.subscriberId = subscriberId;
+ }
+
+ public String getPublisherIp() {
+ return publisherIp;
+ }
+
+ public void setPublisherIp(String publisherIp) {
+ this.publisherIp = publisherIp;
+ }
+
+ public String getMessageBatchId() {
+ return messageBatchId;
+ }
+
+ public void setMessageBatchId(Long messageBatchId) {
+ this.messageBatchId = Utils.getFromattedBatchSequenceId(messageBatchId);
+ }
+
+ public String getMessageSequence() {
+ return messageSequence;
+ }
+
+ public void setMessageSequence(String messageSequence) {
+ this.messageSequence = messageSequence;
+ }
+
+ public String getMessageTimestamp() {
+ return messageTimestamp;
+ }
+
+ public void setMessageTimestamp(String messageTimestamp) {
+ this.messageTimestamp = messageTimestamp;
+ }
+
+ public String getPublishTimestamp() {
+ return Utils.getFormattedDate(new Date());
+ }
+
+ public String getConsumeTimestamp() {
+ return consumeTimestamp;
+ }
+
+ public void setConsumeTimestamp(String consumeTimestamp) {
+ this.consumeTimestamp = consumeTimestamp;
+ }
+
+ public long getMessageLengthInBytes() {
+ return messageLengthInBytes;
+ }
+
+ public void setMessageLengthInBytes(long messageLengthInBytes) {
+ this.messageLengthInBytes = messageLengthInBytes;
+ }
+
+ public long getTotalMessageCount() {
+ return totalMessageCount;
+ }
+
+ public void setTotalMessageCount(long totalMessageCount) {
+ this.totalMessageCount = totalMessageCount;
+ }
+
+ public boolean isTransactionEnabled() {
+ return transactionEnabled;
+ }
+
+ public void setTransactionEnabled(boolean transactionEnabled) {
+ this.transactionEnabled = transactionEnabled;
+ }
+
+ public String getTransactionIdTs() {
+ return transactionIdTs;
+ }
+
+ public void setTransactionIdTs(String transactionIdTs) {
+ this.transactionIdTs = transactionIdTs;
+ }
+
+ public String getPublisherLogDetails() {
+
+ StringBuilder buffer = new StringBuilder();
+ buffer.append("[publisherId=" + publisherId);
+ buffer.append(", topicId=" + topicId);
+ buffer.append(", messageTimestamp=" + messageTimestamp);
+ buffer.append(", publisherIp=" + publisherIp);
+ buffer.append(", messageBatchId=" + messageBatchId);
+ buffer.append(", messageSequence=" + messageSequence );
+ buffer.append(", messageLengthInBytes=" + messageLengthInBytes);
+ buffer.append(", transactionEnabled=" + transactionEnabled);
+ buffer.append(", transactionId=" + getTransactionId());
+ buffer.append(", publishTimestamp=" + getPublishTimestamp());
+ buffer.append(", serverIp=" + getServerIp()+"]");
+ return buffer.toString();
+
+ }
+
+ public String getServerIp() {
+ return serverIp;
+ }
+
+ public void setServerIp(String serverIp) {
+ this.serverIp = serverIp;
+ }
+
+ public void setMessageBatchId(String messageBatchId) {
+ this.messageBatchId = messageBatchId;
+ }
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/TopicBean.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/TopicBean.java
new file mode 100644
index 0000000..9ff8a32
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/TopicBean.java
@@ -0,0 +1,155 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+/**
+ *
+ */
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans;
+
+import java.io.Serializable;
+
+import javax.xml.bind.annotation.XmlRootElement;
+
+/**
+ * @author author
+ *
+ */
+@XmlRootElement
+public class TopicBean implements Serializable {
+
+ private static final long serialVersionUID = -8620390377775457949L;
+ private String topicName;
+ private String topicDescription;
+
+ private int partitionCount = 1; //default values
+ private int replicationCount = 1; //default value
+
+ private boolean transactionEnabled;
+
+ /**
+ * constructor
+ */
+ public TopicBean() {
+ super();
+ }
+
+ /**
+ * constructor initialization with topic details name, description,
+ * partition, replication, transaction
+ *
+ * @param topicName
+ * @param description
+ * @param partitionCount
+ * @param replicationCount
+ * @param transactionEnabled
+ */
+ public TopicBean(String topicName, String topicDescription, int partitionCount, int replicationCount,
+ boolean transactionEnabled) {
+ super();
+ this.topicName = topicName;
+ this.topicDescription = topicDescription;
+ this.partitionCount = partitionCount;
+ this.replicationCount = replicationCount;
+ this.transactionEnabled = transactionEnabled;
+ }
+
+ /**
+ * @return
+ * returns topic name which is of String type
+ */
+ public String getTopicName() {
+ return topicName;
+ }
+
+ /**
+ * @param topicName
+ * set topic name
+ */
+ public void setTopicName(String topicName) {
+ this.topicName = topicName;
+ }
+
+
+ /**
+ * @return
+ * returns partition count which is of int type
+ */
+ public int getPartitionCount() {
+ return partitionCount;
+ }
+
+ /**
+ * @param partitionCount
+ * set partition Count
+ */
+ public void setPartitionCount(int partitionCount) {
+ this.partitionCount = partitionCount;
+ }
+
+ /**
+ * @return
+ * returns replication count which is of int type
+ */
+ public int getReplicationCount() {
+ return replicationCount;
+ }
+
+ /**
+ * @param
+ * set replication count which is of int type
+ */
+ public void setReplicationCount(int replicationCount) {
+ this.replicationCount = replicationCount;
+ }
+
+ /**
+ * @return
+ * returns boolean value which indicates whether transaction is Enabled
+ */
+ public boolean isTransactionEnabled() {
+ return transactionEnabled;
+ }
+
+ /**
+ * @param
+ * sets boolean value which indicates whether transaction is Enabled
+ */
+ public void setTransactionEnabled(boolean transactionEnabled) {
+ this.transactionEnabled = transactionEnabled;
+ }
+
+ /**
+ *
+ * @return returns description which is of String type
+ */
+ public String getTopicDescription() {
+ return topicDescription;
+ }
+ /**
+ *
+ * @param topicDescription
+ * set description which is of String type
+ */
+ public void setTopicDescription(String topicDescription) {
+ this.topicDescription = topicDescription;
+ }
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/constants/CambriaConstants.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/constants/CambriaConstants.java
new file mode 100644
index 0000000..98d0766
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/constants/CambriaConstants.java
@@ -0,0 +1,125 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants;
+
+import org.apache.coyote.http11.Http11NioProtocol;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.Utils;
+
+/**
+ * This is the constant files for all the property or parameters.
+ * @author author
+ *
+ */
+public interface CambriaConstants {
+
+ String CAMBRIA = "Cambria";
+ String DMAAP = "DMaaP";
+
+ String kDefault_ZkRoot = "/fe3c/cambria";
+
+ String kSetting_ZkConfigDbRoot = "config.zk.root";
+ String kDefault_ZkConfigDbRoot = kDefault_ZkRoot + "/config";
+String msgRtr_prop="MsgRtrApi.properties";
+ String kBrokerType = "broker.type";
+
+ /**
+ * value to use to signal kafka broker type.
+ */
+ String kBrokerType_Kafka = "kafka";
+ String kBrokerType_Memory = "memory";
+ String kSetting_AdminSecret = "authentication.adminSecret";
+
+ String kSetting_ApiNodeIdentifier = "cambria.api.node.identifier";
+
+ /**
+ * value to use to signal max empty poll per minute
+ */
+ String kSetting_MaxEmptyPollsPerMinute = "cambria.rateLimit.maxEmptyPollsPerMinute";
+ double kDefault_MaxEmptyPollsPerMinute = 10.0;
+
+ String kSetting_SleepMsOnRateLimit = "cambria.rateLimit.delay.ms";
+ long kDefault_SleepMsOnRateLimit = Utils.getSleepMsForRate ( kDefault_MaxEmptyPollsPerMinute );
+
+ String kSetting_RateLimitWindowLength = "cambria.rateLimit.window.minutes";
+ int kDefault_RateLimitWindowLength = 5;
+
+ String kConfig = "c";
+
+ String kSetting_Port = "cambria.service.port";
+ /**
+ * value to use to signal default port
+ */
+ int kDefault_Port = 3904;
+
+ String kSetting_MaxThreads = "tomcat.maxthreads";
+ int kDefault_MaxThreads = -1;
+
+
+// String kSetting_TomcatProtocolClass = "tomcat.protocolClass";
+ //String kDefault_TomcatProtocolClass = Http11NioProtocol.class.getName ();
+
+ String kSetting_ZkConfigDbServers = "config.zk.servers";
+
+ /**
+ * value to indicate localhost port number
+ */
+ String kDefault_ZkConfigDbServers = "localhost:2181";
+
+ /**
+ * value to use to signal Session time out
+ */
+ String kSetting_ZkSessionTimeoutMs = "cambria.consumer.cache.zkSessionTimeout";
+ int kDefault_ZkSessionTimeoutMs = 20 * 1000;
+
+ /**
+ * value to use to signal connection time out
+ */
+ String kSetting_ZkConnectionTimeoutMs = "cambria.consumer.cache.zkConnectionTimeout";
+ int kDefault_ZkConnectionTimeoutMs = 5 * 1000;
+
+ String TRANSACTION_ID_SEPARATOR = "::";
+
+ /**
+ * value to use to signal there's no timeout on the consumer request.
+ */
+ public static final int kNoTimeout = 10000;
+
+ /**
+ * value to use to signal no limit in the number of messages returned.
+ */
+ public static final int kNoLimit = 0;
+
+ /**
+ * value to use to signal that the caller wants the next set of events
+ */
+ public static final int kNextOffset = -1;
+
+ /**
+ * value to use to signal there's no filter on the response stream.
+ */
+ public static final String kNoFilter = "";
+
+ //Added for Metric publish
+ public static final int kStdCambriaServicePort = 3904;
+ public static final String kBasePath = "/events/";
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/DMaaPAccessDeniedException.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/DMaaPAccessDeniedException.java
new file mode 100644
index 0000000..190714f
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/DMaaPAccessDeniedException.java
@@ -0,0 +1,42 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception;
+
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
+
+public class DMaaPAccessDeniedException extends CambriaApiException{
+
+
+
+ public DMaaPAccessDeniedException(ErrorResponse errRes) {
+ super(errRes);
+
+ }
+
+ /**
+ *
+ */
+ private static final long serialVersionUID = 1L;
+
+
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/DMaaPCambriaExceptionMapper.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/DMaaPCambriaExceptionMapper.java
new file mode 100644
index 0000000..10d127f
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/DMaaPCambriaExceptionMapper.java
@@ -0,0 +1,92 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception;
+
+import javax.inject.Singleton;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.ext.ExceptionMapper;
+import javax.ws.rs.ext.Provider;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.apache.http.HttpStatus;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
+import org.springframework.beans.factory.annotation.Autowired;
+
+/**
+ * Exception Mapper class to handle
+ * CambriaApiException
+ * @author author
+ *
+ */
+@Provider
+@Singleton
+public class DMaaPCambriaExceptionMapper implements ExceptionMapper<CambriaApiException>{
+
+private ErrorResponse errRes;
+
+//private static final Logger LOGGER = Logger.getLogger(DMaaPCambriaExceptionMapper.class);
+private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(DMaaPCambriaExceptionMapper.class);
+
+ @Autowired
+ private DMaaPErrorMessages msgs;
+
+ public DMaaPCambriaExceptionMapper() {
+ super();
+ LOGGER.info("Cambria Exception Mapper Created..");
+ }
+
+ @Override
+ public Response toResponse(CambriaApiException ex) {
+
+ LOGGER.info("Reached Cambria Exception Mapper..");
+
+ /**
+ * Cambria Generic Exception
+ */
+ if(ex instanceof CambriaApiException)
+ {
+
+ errRes = ex.getErrRes();
+ if(errRes!=null) {
+
+ return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON)
+ .build();
+ }
+ else
+ {
+ return Response.status(ex.getStatus()).entity(ex.getMessage()).type(MediaType.APPLICATION_JSON)
+ .build();
+ }
+
+
+ }
+ else
+ {
+ errRes = new ErrorResponse(HttpStatus.SC_EXPECTATION_FAILED, DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(), msgs.getServerUnav());
+ return Response.status(HttpStatus.SC_EXPECTATION_FAILED).entity(errRes).type(MediaType.APPLICATION_JSON).build();
+ }
+
+ }
+
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/DMaaPErrorMessages.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/DMaaPErrorMessages.java
new file mode 100644
index 0000000..5ef2493
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/DMaaPErrorMessages.java
@@ -0,0 +1,239 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception;
+
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.stereotype.Component;
+
+/**
+ * This Class reads the error message properties
+ * from the properties file
+ * @author author
+ *
+ */
+@Component
+public class DMaaPErrorMessages {
+
+ @Value("${resource.not.found}")
+ private String notFound;
+
+ @Value("${server.unavailable}")
+ private String serverUnav;
+
+ @Value("${http.method.not.allowed}")
+ private String methodNotAllowed;
+
+ @Value("${incorrect.request.json}")
+ private String badRequest;
+
+ @Value("${network.time.out}")
+ private String nwTimeout;
+
+ @Value("${get.topic.failure}")
+ private String topicsfailure;
+
+ @Value("${not.permitted.access.1}")
+ private String notPermitted1;
+
+ @Value("${not.permitted.access.2}")
+ private String notPermitted2;
+
+ @Value("${get.topic.details.failure}")
+ private String topicDetailsFail;
+
+ @Value("${create.topic.failure}")
+ private String createTopicFail;
+
+ @Value("${delete.topic.failure}")
+ private String deleteTopicFail;
+
+ @Value("${incorrect.json}")
+ private String incorrectJson;
+
+ @Value("${consume.msg.error}")
+ private String consumeMsgError;
+
+ @Value("${publish.msg.error}")
+ private String publishMsgError;
+
+
+ @Value("${publish.msg.count}")
+ private String publishMsgCount;
+
+
+ @Value("${authentication.failure}")
+ private String authFailure;
+ @Value("${msg_size_exceeds}")
+ private String msgSizeExceeds;
+
+
+ @Value("${topic.not.exist}")
+ private String topicNotExist;
+
+ public String getMsgSizeExceeds() {
+ return msgSizeExceeds;
+ }
+
+ public void setMsgSizeExceeds(String msgSizeExceeds) {
+ this.msgSizeExceeds = msgSizeExceeds;
+ }
+
+ public String getNotFound() {
+ return notFound;
+ }
+
+ public void setNotFound(String notFound) {
+ this.notFound = notFound;
+ }
+
+ public String getServerUnav() {
+ return serverUnav;
+ }
+
+ public void setServerUnav(String serverUnav) {
+ this.serverUnav = serverUnav;
+ }
+
+ public String getMethodNotAllowed() {
+ return methodNotAllowed;
+ }
+
+ public void setMethodNotAllowed(String methodNotAllowed) {
+ this.methodNotAllowed = methodNotAllowed;
+ }
+
+ public String getBadRequest() {
+ return badRequest;
+ }
+
+ public void setBadRequest(String badRequest) {
+ this.badRequest = badRequest;
+ }
+
+ public String getNwTimeout() {
+ return nwTimeout;
+ }
+
+ public void setNwTimeout(String nwTimeout) {
+ this.nwTimeout = nwTimeout;
+ }
+
+ public String getNotPermitted1() {
+ return notPermitted1;
+ }
+
+ public void setNotPermitted1(String notPermitted1) {
+ this.notPermitted1 = notPermitted1;
+ }
+
+ public String getNotPermitted2() {
+ return notPermitted2;
+ }
+
+ public void setNotPermitted2(String notPermitted2) {
+ this.notPermitted2 = notPermitted2;
+ }
+
+ public String getTopicsfailure() {
+ return topicsfailure;
+ }
+
+ public void setTopicsfailure(String topicsfailure) {
+ this.topicsfailure = topicsfailure;
+ }
+
+ public String getTopicDetailsFail() {
+ return topicDetailsFail;
+ }
+
+ public void setTopicDetailsFail(String topicDetailsFail) {
+ this.topicDetailsFail = topicDetailsFail;
+ }
+
+ public String getCreateTopicFail() {
+ return createTopicFail;
+ }
+
+ public void setCreateTopicFail(String createTopicFail) {
+ this.createTopicFail = createTopicFail;
+ }
+
+ public String getIncorrectJson() {
+ return incorrectJson;
+ }
+
+ public void setIncorrectJson(String incorrectJson) {
+ this.incorrectJson = incorrectJson;
+ }
+
+ public String getDeleteTopicFail() {
+ return deleteTopicFail;
+ }
+
+ public void setDeleteTopicFail(String deleteTopicFail) {
+ this.deleteTopicFail = deleteTopicFail;
+ }
+
+ public String getConsumeMsgError() {
+ return consumeMsgError;
+ }
+
+ public void setConsumeMsgError(String consumeMsgError) {
+ this.consumeMsgError = consumeMsgError;
+ }
+
+ public String getPublishMsgError() {
+ return publishMsgError;
+ }
+
+ public void setPublishMsgError(String publishMsgError) {
+ this.publishMsgError = publishMsgError;
+ }
+
+ public String getPublishMsgCount() {
+ return publishMsgCount;
+ }
+
+ public String getAuthFailure() {
+ return authFailure;
+ }
+
+ public void setAuthFailure(String authFailure) {
+ this.authFailure = authFailure;
+ }
+
+ public void setPublishMsgCount(String publishMsgCount) {
+ this.publishMsgCount = publishMsgCount;
+ }
+
+ public String getTopicNotExist() {
+ return topicNotExist;
+ }
+
+ public void setTopicNotExist(String topicNotExist) {
+ this.topicNotExist = topicNotExist;
+ }
+
+
+
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/DMaaPResponseCode.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/DMaaPResponseCode.java
new file mode 100644
index 0000000..1302686
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/DMaaPResponseCode.java
@@ -0,0 +1,93 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception;
+
+/**
+ * Define the Error Response Codes for MR
+ * using this enumeration
+ * @author author
+ *
+ */
+public enum DMaaPResponseCode {
+
+
+ /**
+ * GENERIC
+ */
+ RESOURCE_NOT_FOUND(3001),
+ SERVER_UNAVAILABLE(3002),
+ METHOD_NOT_ALLOWED(3003),
+ GENERIC_INTERNAL_ERROR(1004),
+ /**
+ * AAF
+ */
+ INVALID_CREDENTIALS(4001),
+ ACCESS_NOT_PERMITTED(4002),
+ UNABLE_TO_AUTHORIZE(4003),
+ /**
+ * PUBLISH AND SUBSCRIBE
+ */
+ MSG_SIZE_EXCEEDS_BATCH_LIMIT(5001),
+ UNABLE_TO_PUBLISH(5002),
+ INCORRECT_BATCHING_FORMAT(5003),
+ MSG_SIZE_EXCEEDS_MSG_LIMIT(5004),
+ INCORRECT_JSON(5005),
+ CONN_TIMEOUT(5006),
+ PARTIAL_PUBLISH_MSGS(5007),
+ CONSUME_MSG_ERROR(5008),
+ PUBLISH_MSG_ERROR(5009),
+ RETRIEVE_TRANSACTIONS(5010),
+ RETRIEVE_TRANSACTIONS_DETAILS(5011),
+ TOO_MANY_REQUESTS(5012),
+
+ RATE_LIMIT_EXCEED(301),
+
+ /**
+ * TOPICS
+ */
+ GET_TOPICS_FAIL(6001),
+ GET_TOPICS_DETAILS_FAIL(6002),
+ CREATE_TOPIC_FAIL(6003),
+ DELETE_TOPIC_FAIL(6004),
+ GET_PUBLISHERS_BY_TOPIC(6005),
+ GET_CONSUMERS_BY_TOPIC(6006),
+ PERMIT_PUBLISHER_FOR_TOPIC(6007),
+ REVOKE_PUBLISHER_FOR_TOPIC(6008),
+ PERMIT_CONSUMER_FOR_TOPIC(6009),
+ REVOKE_CONSUMER_FOR_TOPIC(6010),
+ GET_CONSUMER_CACHE(6011),
+ DROP_CONSUMER_CACHE(6012),
+ GET_METRICS_ERROR(6013),
+ GET_BLACKLIST(6014),
+ ADD_BLACKLIST(6015),
+ REMOVE_BLACKLIST(6016),
+ TOPIC_NOT_IN_AAF(6017);
+ private int responseCode;
+
+ public int getResponseCode() {
+ return responseCode;
+ }
+ private DMaaPResponseCode (final int code) {
+ responseCode = code;
+ }
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/DMaaPWebExceptionMapper.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/DMaaPWebExceptionMapper.java
new file mode 100644
index 0000000..f526eb5
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/DMaaPWebExceptionMapper.java
@@ -0,0 +1,137 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception;
+
+import javax.inject.Singleton;
+import javax.ws.rs.BadRequestException;
+import javax.ws.rs.InternalServerErrorException;
+import javax.ws.rs.NotAllowedException;
+import javax.ws.rs.NotAuthorizedException;
+import javax.ws.rs.NotFoundException;
+import javax.ws.rs.ServiceUnavailableException;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.ext.ExceptionMapper;
+import javax.ws.rs.ext.Provider;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.apache.http.HttpStatus;
+//import org.apache.log-4j.Logger;
+import org.springframework.beans.factory.annotation.Autowired;
+
+/**
+ * Exception Mapper class to handle
+ * Jersey Exceptions
+ * @author author
+ *
+ */
+@Provider
+@Singleton
+public class DMaaPWebExceptionMapper implements ExceptionMapper<WebApplicationException>{
+
+ //private static final Logger LOGGER = Logger
+ // .getLogger(DMaaPWebExceptionMapper.class);
+ private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(DMaaPWebExceptionMapper.class);
+ private ErrorResponse errRes;
+
+ @Autowired
+ private DMaaPErrorMessages msgs;
+
+ public DMaaPWebExceptionMapper() {
+ super();
+ LOGGER.info("WebException Mapper Created..");
+ }
+
+ @Override
+ public Response toResponse(WebApplicationException ex) {
+
+ LOGGER.info("Reached WebException Mapper");
+
+ /**
+ * Resource Not Found
+ */
+ if(ex instanceof NotFoundException)
+ {
+ errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND,DMaaPResponseCode.RESOURCE_NOT_FOUND.getResponseCode(),msgs.getNotFound());
+
+ LOGGER.info(errRes.toString());
+
+ return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON)
+ .build();
+
+ }
+
+ if(ex instanceof InternalServerErrorException)
+ {
+ errRes = new ErrorResponse(HttpStatus.SC_INTERNAL_SERVER_ERROR,DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(),msgs.getServerUnav());
+
+ LOGGER.info(errRes.toString());
+ return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON)
+ .build();
+
+ }
+
+ if(ex instanceof NotAuthorizedException)
+ {
+ errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),msgs.getAuthFailure());
+
+ LOGGER.info(errRes.toString());
+ return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON)
+ .build();
+ }
+
+ if(ex instanceof BadRequestException)
+ {
+ errRes = new ErrorResponse(HttpStatus.SC_BAD_REQUEST,DMaaPResponseCode.INCORRECT_JSON.getResponseCode(),msgs.getBadRequest());
+
+ LOGGER.info(errRes.toString());
+ return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON)
+ .build();
+ }
+ if(ex instanceof NotAllowedException)
+ {
+ errRes = new ErrorResponse(HttpStatus.SC_METHOD_NOT_ALLOWED,DMaaPResponseCode.METHOD_NOT_ALLOWED.getResponseCode(),msgs.getMethodNotAllowed());
+
+ LOGGER.info(errRes.toString());
+ return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON)
+ .build();
+ }
+
+ if(ex instanceof ServiceUnavailableException)
+ {
+ errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE,DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(),msgs.getServerUnav());
+
+ LOGGER.info(errRes.toString());
+ return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON)
+ .build();
+ }
+
+
+ return Response.serverError().build();
+ }
+
+
+
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/ErrorResponse.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/ErrorResponse.java
new file mode 100644
index 0000000..3bc5364
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/ErrorResponse.java
@@ -0,0 +1,135 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception;
+import org.json.JSONObject;
+/**
+ * Represents the Error Response Object
+ * that is rendered as a JSON object when
+ * an exception or error occurs on MR Rest Service.
+ * @author author
+ *
+ */
+//@XmlRootElement
+public class ErrorResponse {
+
+ private int httpStatusCode;
+ private int mrErrorCode;
+ private String errorMessage;
+ private String helpURL;
+ private String statusTs;
+ private String topic;
+ private String publisherId;
+ private String publisherIp;
+ private String subscriberId;
+ private String subscriberIp;
+
+
+ public ErrorResponse(int httpStatusCode, int mrErrorCode,
+ String errorMessage, String helpURL, String statusTs, String topic,
+ String publisherId, String publisherIp, String subscriberId,
+ String subscriberIp) {
+ super();
+ this.httpStatusCode = httpStatusCode;
+ this.mrErrorCode = mrErrorCode;
+ this.errorMessage = errorMessage;
+ this.helpURL = "https://wiki.web.att.com/display/DMAAP/DMaaP+Home";
+ this.statusTs = statusTs;
+ this.topic = topic;
+ this.publisherId = publisherId;
+ this.publisherIp = publisherIp;
+ this.subscriberId = subscriberId;
+ this.subscriberIp = subscriberIp;
+ }
+
+ public ErrorResponse(int httpStatusCode, int mrErrorCode,
+ String errorMessage) {
+ super();
+ this.httpStatusCode = httpStatusCode;
+ this.mrErrorCode = mrErrorCode;
+ this.errorMessage = errorMessage;
+ this.helpURL = "https://wiki.web.att.com/display/DMAAP/DMaaP+Home";
+
+ }
+
+ public int getHttpStatusCode() {
+ return httpStatusCode;
+ }
+
+ public void setHttpStatusCode(int httpStatusCode) {
+ this.httpStatusCode = httpStatusCode;
+ }
+
+ public int getMrErrorCode() {
+ return mrErrorCode;
+ }
+
+
+ public void setMrErrorCode(int mrErrorCode) {
+ this.mrErrorCode = mrErrorCode;
+ }
+
+
+ public String getErrorMessage() {
+ return errorMessage;
+ }
+
+ public void setErrorMessage(String errorMessage) {
+ this.errorMessage = errorMessage;
+ }
+
+ public String getHelpURL() {
+ return helpURL;
+ }
+
+ public void setHelpURL(String helpURL) {
+ this.helpURL = helpURL;
+ }
+
+ @Override
+ public String toString() {
+ return "ErrorResponse {\"httpStatusCode\":\"" + httpStatusCode
+ + "\", \"mrErrorCode\":\"" + mrErrorCode + "\", \"errorMessage\":\""
+ + errorMessage + "\", \"helpURL\":\"" + helpURL + "\", \"statusTs\":\""+statusTs+"\""
+ + ", \"topicId\":\""+topic+"\", \"publisherId\":\""+publisherId+"\""
+ + ", \"publisherIp\":\""+publisherIp+"\", \"subscriberId\":\""+subscriberId+"\""
+ + ", \"subscriberIp\":\""+subscriberIp+"\"}";
+ }
+
+ public String getErrMapperStr1() {
+ return "ErrorResponse [httpStatusCode=" + httpStatusCode + ", mrErrorCode=" + mrErrorCode + ", errorMessage="
+ + errorMessage + ", helpURL=" + helpURL + "]";
+ }
+
+
+
+ public JSONObject getErrMapperStr() {
+ JSONObject o = new JSONObject();
+ o.put("status", getHttpStatusCode());
+ o.put("mrstatus", getMrErrorCode());
+ o.put("message", getErrorMessage());
+ o.put("helpURL", getHelpURL());
+ return o;
+ }
+
+
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/listener/CambriaServletContextListener.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/listener/CambriaServletContextListener.java
new file mode 100644
index 0000000..35c0b27
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/listener/CambriaServletContextListener.java
@@ -0,0 +1,64 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.listener;
+
+import javax.servlet.ServletContextEvent;
+import javax.servlet.ServletContextListener;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+/**
+ * This is the Cambria Servlet Context Listner which helpes while loading the app which provide the endpoints
+ * @author author
+ *
+ */
+public class CambriaServletContextListener implements ServletContextListener {
+
+ DME2EndPointLoader loader = DME2EndPointLoader.getInstance();
+// private static Logger log = Logger.getLogger(CambriaServletContextListener.class);
+ private static final EELFLogger log = EELFManager.getInstance().getLogger(CambriaServletContextListener.class);
+
+
+ @Override
+
+ /**
+ * contextDestroyed() loads unpublished end points
+ * @param arg0
+ */
+ public void contextDestroyed(ServletContextEvent arg0) {
+ log.info("CambriaServletContextListener contextDestroyed");
+
+ loader.unPublishEndPoints();
+ }
+
+ @Override
+ /**
+ * contextInitialized() loads published end points
+ * @param arg0
+ */
+ public void contextInitialized(ServletContextEvent arg0) {
+ log.info("CambriaServletContextListener contextInitialized");
+ loader.publishEndPoints();
+ }
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/listener/DME2EndPointLoader.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/listener/DME2EndPointLoader.java
new file mode 100644
index 0000000..9332aeb
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/listener/DME2EndPointLoader.java
@@ -0,0 +1,124 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.listener;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Properties;
+
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service.impl.EventsServiceImpl;
+
+import com.att.aft.dme2.api.DME2Exception;
+import com.att.aft.dme2.api.DME2Manager;
+import com.att.aft.dme2.manager.registry.DME2EndpointRegistry;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+/**
+ *
+ * @author author
+ *
+ */
+public class DME2EndPointLoader {
+
+ private String latitude;
+ private String longitude;
+ private String version;
+ private String serviceName;
+ private String env;
+ private String routeOffer;
+ private String hostName;
+ private String port;
+ private String contextPath;
+ private String protocol;
+ private String serviceURL;
+ private static DME2EndPointLoader loader = new DME2EndPointLoader();
+// private static final Logger LOG = LoggerFactory.getLogger(EventsServiceImpl.class);
+ private static final EELFLogger LOG = EELFManager.getInstance().getLogger(EventsServiceImpl.class);
+ private DME2EndPointLoader() {
+ }
+
+ public static DME2EndPointLoader getInstance() {
+ return loader;
+ }
+
+ /**
+ * publishing endpoints
+ */
+ public void publishEndPoints() {
+
+ try {
+ InputStream input = this.getClass().getResourceAsStream("/endpoint.properties");
+ Properties props = new Properties();
+ props.load(input);
+
+ latitude = props.getProperty("Latitude");
+ longitude = props.getProperty("Longitude");
+ version = props.getProperty("Version");
+ serviceName = props.getProperty("ServiceName");
+ env = props.getProperty("Environment");
+ routeOffer = props.getProperty("RouteOffer");
+ hostName = props.getProperty("HostName");
+ port = props.getProperty("Port");
+ contextPath = props.getProperty("ContextPath");
+ protocol = props.getProperty("Protocol");
+
+ System.setProperty("AFT_LATITUDE", latitude);
+ System.setProperty("AFT_LONGITUDE", longitude);
+ System.setProperty("AFT_ENVIRONMENT", "AFTUAT");
+
+ serviceURL = "service=" + serviceName + "/" + "version=" + version + "/" + "envContext=" + env + "/"
+ + "routeOffer=" + routeOffer;
+
+ DME2Manager manager = new DME2Manager("testEndpointPublish", props);
+ manager.setClientCredentials("sh301n", "");
+ DME2EndpointRegistry svcRegistry = manager.getEndpointRegistry();
+ // Publish API takes service name, context path, hostname, port and
+ // protocol as args
+ svcRegistry.publish(serviceURL, contextPath, hostName, Integer.parseInt(port), protocol);
+
+ } catch (IOException | DME2Exception e) {
+ LOG.error("Failed due to :" + e);
+ }
+
+ }
+/**
+ * unpublishing endpoints
+ */
+ public void unPublishEndPoints() {
+
+ DME2Manager manager;
+ try {
+ System.setProperty("AFT_LATITUDE", latitude);
+ System.setProperty("AFT_LONGITUDE", longitude);
+ System.setProperty("AFT_ENVIRONMENT", "AFTUAT");
+
+ manager = DME2Manager.getDefaultInstance();
+ DME2EndpointRegistry svcRegistry = manager.getEndpointRegistry();
+ svcRegistry.unpublish(serviceURL, hostName, Integer.parseInt(port));
+ } catch (DME2Exception e) {
+ LOG.error("Failed due to DME2Exception" + e);
+ }
+
+ }
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metabroker/Broker.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metabroker/Broker.java
new file mode 100644
index 0000000..9634cc2
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metabroker/Broker.java
@@ -0,0 +1,93 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metabroker;
+
+import java.util.List;
+
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
+
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+
+/**
+ * A broker interface to manage metadata around topics, etc.
+ *
+ * @author author
+ *
+ */
+public interface Broker {
+ /**
+ *
+ * @author author
+ *
+ */
+ public class TopicExistsException extends Exception {
+ /**
+ *
+ * @param topicName
+ */
+ public TopicExistsException(String topicName) {
+ super("Topic " + topicName + " exists.");
+ }
+
+ private static final long serialVersionUID = 1L;
+ }
+
+ /**
+ * Get all topics in the underlying broker.
+ *
+ * @return
+ * @throws ConfigDbException
+ */
+ List<Topic> getAllTopics() throws ConfigDbException;
+
+ /**
+ * Get a specific topic from the underlying broker.
+ *
+ * @param topic
+ * @return a topic, or null
+ */
+ Topic getTopic(String topic) throws ConfigDbException;
+
+ /**
+ * create a topic
+ *
+ * @param topic
+ * @param description
+ * @param ownerApiKey
+ * @param partitions
+ * @param replicas
+ * @param transactionEnabled
+ * @return
+ * @throws TopicExistsException
+ * @throws CambriaApiException
+ */
+ Topic createTopic(String topic, String description, String ownerApiKey, int partitions, int replicas,
+ boolean transactionEnabled) throws TopicExistsException, CambriaApiException;
+
+ /**
+ * Delete a topic by name
+ *
+ * @param topic
+ */
+ void deleteTopic(String topic) throws AccessDeniedException, CambriaApiException, TopicExistsException;
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metabroker/Topic.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metabroker/Topic.java
new file mode 100644
index 0000000..f38a4a6
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metabroker/Topic.java
@@ -0,0 +1,133 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metabroker;
+
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.security.NsaAcl;
+import com.att.nsa.security.NsaApiKey;
+import com.att.nsa.security.ReadWriteSecuredResource;
+/**
+ * This is the interface for topic and all the topic related operations
+ * get topic name, owner, description, transactionEnabled etc.
+ * @author author
+ *
+ */
+public interface Topic extends ReadWriteSecuredResource
+{
+ /**
+ * User defined exception for access denied while access the topic for Publisher and consumer
+ * @author author
+ *
+ *//*
+ public class AccessDeniedException extends Exception
+ {
+ *//**
+ * AccessDenied Description
+ *//*
+ public AccessDeniedException () { super ( "Access denied." ); }
+ *//**
+ * AccessDenied Exception for the user while authenticating the user request
+ * @param user
+ *//*
+ public AccessDeniedException ( String user ) { super ( "Access denied for " + user ); }
+ private static final long serialVersionUID = 1L;
+ }*/
+
+ /**
+ * Get this topic's name
+ * @return
+ */
+ String getName ();
+
+ /**
+ * Get the API key of the owner of this topic.
+ * @return
+ */
+ String getOwner ();
+
+ /**
+ * Get a description of the topic, as set by the owner at creation time.
+ * @return
+ */
+ String getDescription ();
+
+ /**
+ * If the topic is transaction enabled
+ * @return boolean true/false
+ */
+ boolean isTransactionEnabled();
+
+ /**
+ * Get the ACL for reading on this topic. Can be null.
+ * @return
+ */
+ NsaAcl getReaderAcl ();
+
+ /**
+ * Get the ACL for writing on this topic. Can be null.
+ * @return
+ */
+ NsaAcl getWriterAcl ();
+
+ /**
+ * Check if this user can read the topic. Throw otherwise. Note that
+ * user may be null.
+ * @param user
+ */
+ void checkUserRead ( NsaApiKey user ) throws AccessDeniedException;
+
+ /**
+ * Check if this user can write to the topic. Throw otherwise. Note
+ * that user may be null.
+ * @param user
+ */
+ void checkUserWrite ( NsaApiKey user ) throws AccessDeniedException;
+
+ /**
+ * allow the given user to publish
+ * @param publisherId
+ * @param asUser
+ */
+ void permitWritesFromUser ( String publisherId, NsaApiKey asUser ) throws AccessDeniedException, ConfigDbException;
+
+ /**
+ * deny the given user from publishing
+ * @param publisherId
+ * @param asUser
+ */
+ void denyWritesFromUser ( String publisherId, NsaApiKey asUser ) throws AccessDeniedException, ConfigDbException;
+
+ /**
+ * allow the given user to read the topic
+ * @param consumerId
+ * @param asUser
+ */
+ void permitReadsByUser ( String consumerId, NsaApiKey asUser ) throws AccessDeniedException, ConfigDbException;
+
+ /**
+ * deny the given user from reading the topic
+ * @param consumerId
+ * @param asUser
+ * @throws ConfigDbException
+ */
+ void denyReadsByUser ( String consumerId, NsaApiKey asUser ) throws AccessDeniedException, ConfigDbException;
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/CambriaBatchingPublisher.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/CambriaBatchingPublisher.java
new file mode 100644
index 0000000..de6044a
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/CambriaBatchingPublisher.java
@@ -0,0 +1,52 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metrics.publisher;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * A Cambria batching publisher is a publisher with additional functionality
+ * for managing delayed sends.
+ *
+ * @author author
+ *
+ */
+public interface CambriaBatchingPublisher extends CambriaPublisher
+{
+ /**
+ * Get the number of messages that have not yet been sent.
+ * @return the number of pending messages
+ */
+ int getPendingMessageCount ();
+
+ /**
+ * Close this publisher, sending any remaining messages.
+ * @param timeout an amount of time to wait for unsent messages to be sent
+ * @param timeoutUnits the time unit for the timeout arg
+ * @return a list of any unsent messages after the timeout
+ * @throws IOException
+ * @throws InterruptedException
+ */
+ List<message> close ( long timeout, TimeUnit timeoutUnits ) throws IOException, InterruptedException;
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/CambriaClient.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/CambriaClient.java
new file mode 100644
index 0000000..f5ac924
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/CambriaClient.java
@@ -0,0 +1,89 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metrics.publisher;
+
+//import org.slf4j.Logger;
+
+//
+import com.att.eelf.configuration.EELFLogger;
+//import com.att.eelf.configuration.EELFManager;
+
+/**
+ *
+ * @author author
+ *
+ */
+public interface CambriaClient {
+ /**
+ * An exception at the Cambria layer. This is used when the HTTP transport
+ * layer returns a success code but the transaction is not completed as
+ * expected.
+ */
+ public class CambriaApiException extends Exception {
+ /**
+ *
+ * @param msg
+ */
+ public CambriaApiException(String msg) {
+ super(msg);
+ }
+
+ /**
+ *
+ * @param msg
+ * @param t
+ */
+ public CambriaApiException(String msg, Throwable t) {
+ super(msg, t);
+ }
+
+ private static final long serialVersionUID = 1L;
+ }
+
+ /**
+ * Optionally set the Logger to use
+ *
+ * @param log
+ */
+ void logTo(EELFLogger log);
+
+ /**
+ * Set the API credentials for this client connection. Subsequent calls will
+ * include authentication headers.who i
+ *
+ * @param apiKey
+ * @param apiSecret
+ */
+ void setApiCredentials(String apiKey, String apiSecret);
+
+ /**
+ * Remove API credentials, if any, on this connection. Subsequent calls will
+ * not include authentication headers.
+ */
+ void clearApiCredentials();
+
+ /**
+ * Close this connection. Some client interfaces have additional close
+ * capability.
+ */
+ void close();
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/CambriaConsumer.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/CambriaConsumer.java
new file mode 100644
index 0000000..ad2613f
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/CambriaConsumer.java
@@ -0,0 +1,52 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metrics.publisher;
+
+import java.io.IOException;
+
+/**
+ * This interface will provide fetch mechanism for consumer
+ * @author author
+ *
+ */
+public interface CambriaConsumer extends CambriaClient
+{
+ /**
+ * Fetch a set of messages. The consumer's timeout and message limit are used if set in the constructor call.
+
+ * @return a set of messages
+ * @throws IOException
+ */
+ Iterable<String> fetch () throws IOException;
+
+ /**
+ * Fetch a set of messages with an explicit timeout and limit for this call. These values
+ * override any set in the constructor call.
+ *
+ * @param timeoutMs The amount of time in milliseconds that the server should keep the connection
+ * open while waiting for message traffic. Use -1 for default timeout (controlled on the server-side).
+ * @param limit A limit on the number of messages returned in a single call. Use -1 for no limit.
+ * @return a set messages
+ * @throws IOException if there's a problem connecting to the server
+ */
+ Iterable<String> fetch ( int timeoutMs, int limit ) throws IOException;
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/CambriaPublisher.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/CambriaPublisher.java
new file mode 100644
index 0000000..9b3ac12
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/CambriaPublisher.java
@@ -0,0 +1,101 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metrics.publisher;
+
+import java.io.IOException;
+import java.util.Collection;
+
+/**
+ * A Cambria publishing interface.
+ *
+ * @author author
+ *
+ */
+public interface CambriaPublisher extends CambriaClient {
+ /**
+ * A simple message container
+ */
+ public static class message {
+ /**
+ *
+ * @param partition
+ * @param msg
+ */
+ public message(String partition, String msg) {
+ fPartition = partition == null ? "" : partition;
+ fMsg = msg;
+ if (fMsg == null) {
+ throw new IllegalArgumentException("Can't send a null message.");
+ }
+ }
+
+ /**
+ *
+ * @param msg
+ */
+ public message(message msg) {
+ this(msg.fPartition, msg.fMsg);
+ }
+
+ /**
+ * declaring partition string
+ */
+ public final String fPartition;
+ /**
+ * declaring fMsg String
+ */
+ public final String fMsg;
+ }
+
+ /**
+ * Send the given message using the given partition.
+ *
+ * @param partition
+ * @param msg
+ * @return the number of pending messages
+ * @throws IOException
+ */
+ int send(String partition, String msg) throws IOException;
+
+ /**
+ * Send the given message using its partition.
+ *
+ * @param msg
+ * @return the number of pending messages
+ * @throws IOException
+ */
+ int send(message msg) throws IOException;
+
+ /**
+ * Send the given messages using their partitions.
+ *
+ * @param msgs
+ * @return the number of pending messages
+ * @throws IOException
+ */
+ int send(Collection<message> msgs) throws IOException;
+
+ /**
+ * Close this publisher. It's an error to call send() after close()
+ */
+ void close();
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/CambriaPublisherUtility.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/CambriaPublisherUtility.java
new file mode 100644
index 0000000..066a2f3
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/CambriaPublisherUtility.java
@@ -0,0 +1,146 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metrics.publisher;
+
+import java.io.UnsupportedEncodingException;
+import java.net.URLEncoder;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+
+import org.apache.http.HttpHost;
+/**
+ *
+ * @author author
+ *
+ */
+public class CambriaPublisherUtility
+{
+ public static final String kBasePath = "/events/";
+ public static final int kStdCambriaServicePort = 3904;
+/**
+ *
+ * Translates a string into <code>application/x-www-form-urlencoded</code>
+ * format using a specific encoding scheme.
+ * @param s
+ * @return
+ *
+ */
+ public static String escape ( String s )
+ {
+ try
+ {
+ return URLEncoder.encode ( s, "UTF-8");
+ }
+ catch ( UnsupportedEncodingException e )
+ {
+ throw new RuntimeException ( e );
+ }
+ }
+/**
+ *
+ * building url
+ * @param rawTopic
+ * @return
+ */
+ public static String makeUrl ( String rawTopic )
+ {
+ final String cleanTopic = escape ( rawTopic );
+
+ final StringBuffer url = new StringBuffer().
+ append ( CambriaPublisherUtility.kBasePath ).
+ append ( cleanTopic );
+ return url.toString ();
+ }
+/**
+ *
+ * building consumerUrl
+ * @param topic
+ * @param rawConsumerGroup
+ * @param rawConsumerId
+ * @return
+ */
+ public static String makeConsumerUrl ( String topic, String rawConsumerGroup, String rawConsumerId )
+ {
+ final String cleanConsumerGroup = escape ( rawConsumerGroup );
+ final String cleanConsumerId = escape ( rawConsumerId );
+ return CambriaPublisherUtility.kBasePath + topic + "/" + cleanConsumerGroup + "/" + cleanConsumerId;
+ }
+
+ /**
+ * Create a list of HttpHosts from an input list of strings. Input strings have
+ * host[:port] as format. If the port section is not provided, the default port is used.
+ *
+ * @param hosts
+ * @return a list of hosts
+ */
+ public static List<HttpHost> createHostsList(Collection<String> hosts)
+ {
+ final ArrayList<HttpHost> convertedHosts = new ArrayList<HttpHost> ();
+ for ( String host : hosts )
+ {
+ if ( host.length () == 0 ) continue;
+ convertedHosts.add ( hostForString ( host ) );
+ }
+ return convertedHosts;
+ }
+
+ /**
+ * Return an HttpHost from an input string. Input string has
+ * host[:port] as format. If the port section is not provided, the default port is used.
+ *
+ * @param hosts
+ * @return a list of hosts
+ * if host.length<1 throws IllegalArgumentException
+ *
+ */
+ public static HttpHost hostForString ( String host )
+ {
+ if ( host.length() < 1 ) throw new IllegalArgumentException ( "An empty host entry is invalid." );
+
+ String hostPart = host;
+ int port = kStdCambriaServicePort;
+
+ final int colon = host.indexOf ( ':' );
+ if ( colon == 0 ) throw new IllegalArgumentException ( "Host entry '" + host + "' is invalid." );
+ if ( colon > 0 )
+ {
+ hostPart = host.substring ( 0, colon ).trim();
+
+ final String portPart = host.substring ( colon + 1 ).trim();
+ if ( portPart.length () > 0 )
+ {
+ try
+ {
+ port = Integer.parseInt ( portPart );
+ }
+ catch ( NumberFormatException x )
+ {
+ throw new IllegalArgumentException ( "Host entry '" + host + "' is invalid.", x );
+ }
+ }
+ // else: use default port on "foo:"
+ }
+
+ return new HttpHost ( hostPart, port );
+ }
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/DMaaPCambriaClientFactory.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/DMaaPCambriaClientFactory.java
new file mode 100644
index 0000000..1f32511
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/DMaaPCambriaClientFactory.java
@@ -0,0 +1,423 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metrics.publisher;
+
+import java.net.MalformedURLException;
+import java.util.Collection;
+import java.util.TreeSet;
+import java.util.UUID;
+
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metrics.publisher.impl.DMaaPCambriaConsumerImpl;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metrics.publisher.impl.DMaaPCambriaSimplerBatchPublisher;
+
+/**
+ * A factory for Cambria clients.<br/>
+ * <br/>
+ * Use caution selecting a consumer creator factory. If the call doesn't accept
+ * a consumer group name, then it creates a consumer that is not restartable.
+ * That is, if you stop your process and start it again, your client will NOT
+ * receive any missed messages on the topic. If you need to ensure receipt of
+ * missed messages, then you must use a consumer that's created with a group
+ * name and ID. (If you create multiple consumer processes using the same group,
+ * load is split across them. Be sure to use a different ID for each instance.)<br/>
+ * <br/>
+ * Publishers
+ *
+ * @author author
+ */
+public class DMaaPCambriaClientFactory {
+ /**
+ * Create a consumer instance with the default timeout and no limit on
+ * messages returned. This consumer operates as an independent consumer
+ * (i.e., not in a group) and is NOT re-startable across sessions.
+ *
+ * @param hostList
+ * A comma separated list of hosts to use to connect to Cambria.
+ * You can include port numbers (3904 is the default).
+ *
+ * @param topic
+ * The topic to consume
+ *
+ * @return a consumer
+ */
+ public static CambriaConsumer createConsumer(String hostList, String topic) {
+ return createConsumer(DMaaPCambriaConsumerImpl.stringToList(hostList),
+ topic);
+ }
+
+ /**
+ * Create a consumer instance with the default timeout and no limit on
+ * messages returned. This consumer operates as an independent consumer
+ * (i.e., not in a group) and is NOT re-startable across sessions.
+ *
+ * @param hostSet
+ * The host used in the URL to Cambria. Entries can be
+ * "host:port".
+ * @param topic
+ * The topic to consume
+ *
+ * @return a consumer
+ */
+ public static CambriaConsumer createConsumer(Collection<String> hostSet,
+ String topic) {
+ return createConsumer(hostSet, topic, null);
+ }
+
+ /**
+ * Create a consumer instance with server-side filtering, the default
+ * timeout, and no limit on messages returned. This consumer operates as an
+ * independent consumer (i.e., not in a group) and is NOT re-startable
+ * across sessions.
+ *
+ * @param hostSet
+ * The host used in the URL to Cambria. Entries can be
+ * "host:port".
+ * @param topic
+ * The topic to consume
+ * @param filter
+ * a filter to use on the server side
+ *
+ * @return a consumer
+ */
+ public static CambriaConsumer createConsumer(Collection<String> hostSet,
+ String topic, String filter) {
+ return createConsumer(hostSet, topic, UUID.randomUUID().toString(),
+ "0", -1, -1, filter, null, null);
+ }
+
+ /**
+ * Create a consumer instance with the default timeout, and no limit on
+ * messages returned. This consumer can operate in a logical group and is
+ * re-startable across sessions when you use the same group and ID on
+ * restart.
+ *
+ * @param hostSet
+ * The host used in the URL to Cambria. Entries can be
+ * "host:port".
+ * @param topic
+ * The topic to consume
+ * @param consumerGroup
+ * The name of the consumer group this consumer is part of
+ * @param consumerId
+ * The unique id of this consume in its group
+ *
+ * @return a consumer
+ */
+ public static CambriaConsumer createConsumer(Collection<String> hostSet,
+ final String topic, final String consumerGroup,
+ final String consumerId) {
+ return createConsumer(hostSet, topic, consumerGroup, consumerId, -1, -1);
+ }
+
+ /**
+ * Create a consumer instance with the default timeout, and no limit on
+ * messages returned. This consumer can operate in a logical group and is
+ * re-startable across sessions when you use the same group and ID on
+ * restart.
+ *
+ * @param hostSet
+ * The host used in the URL to Cambria. Entries can be
+ * "host:port".
+ * @param topic
+ * The topic to consume
+ * @param consumerGroup
+ * The name of the consumer group this consumer is part of
+ * @param consumerId
+ * The unique id of this consume in its group
+ * @param timeoutMs
+ * The amount of time in milliseconds that the server should keep
+ * the connection open while waiting for message traffic. Use -1
+ * for default timeout.
+ * @param limit
+ * A limit on the number of messages returned in a single call.
+ * Use -1 for no limit.
+ *
+ * @return a consumer
+ */
+ public static CambriaConsumer createConsumer(Collection<String> hostSet,
+ final String topic, final String consumerGroup,
+ final String consumerId, int timeoutMs, int limit) {
+ return createConsumer(hostSet, topic, consumerGroup, consumerId,
+ timeoutMs, limit, null, null, null);
+ }
+
+ /**
+ * Create a consumer instance with the default timeout, and no limit on
+ * messages returned. This consumer can operate in a logical group and is
+ * re-startable across sessions when you use the same group and ID on
+ * restart. This consumer also uses server-side filtering.
+ *
+ * @param hostList
+ * A comma separated list of hosts to use to connect to Cambria.
+ * You can include port numbers (3904 is the default).
+ * @param topic
+ * The topic to consume
+ * @param consumerGroup
+ * The name of the consumer group this consumer is part of
+ * @param consumerId
+ * The unique id of this consume in its group
+ * @param timeoutMs
+ * The amount of time in milliseconds that the server should keep
+ * the connection open while waiting for message traffic. Use -1
+ * for default timeout.
+ * @param limit
+ * A limit on the number of messages returned in a single call.
+ * Use -1 for no limit.
+ * @param filter
+ * A Highland Park filter expression using only built-in filter
+ * components. Use null for "no filter".
+ * @param apiKey
+ * key associated with a user
+ * @param apiSecret
+ * of a user
+ *
+ * @return a consumer
+ */
+ public static CambriaConsumer createConsumer(String hostList,
+ final String topic, final String consumerGroup,
+ final String consumerId, int timeoutMs, int limit, String filter,
+ String apiKey, String apiSecret) {
+ return createConsumer(DMaaPCambriaConsumerImpl.stringToList(hostList),
+ topic, consumerGroup, consumerId, timeoutMs, limit, filter,
+ apiKey, apiSecret);
+ }
+
+ /**
+ * Create a consumer instance with the default timeout, and no limit on
+ * messages returned. This consumer can operate in a logical group and is
+ * re-startable across sessions when you use the same group and ID on
+ * restart. This consumer also uses server-side filtering.
+ *
+ * @param hostSet
+ * The host used in the URL to Cambria. Entries can be
+ * "host:port".
+ * @param topic
+ * The topic to consume
+ * @param consumerGroup
+ * The name of the consumer group this consumer is part of
+ * @param consumerId
+ * The unique id of this consume in its group
+ * @param timeoutMs
+ * The amount of time in milliseconds that the server should keep
+ * the connection open while waiting for message traffic. Use -1
+ * for default timeout.
+ * @param limit
+ * A limit on the number of messages returned in a single call.
+ * Use -1 for no limit.
+ * @param filter
+ * A Highland Park filter expression using only built-in filter
+ * components. Use null for "no filter".
+ * @param apiKey
+ * key associated with a user
+ * @param apiSecret
+ * of a user
+ * @return a consumer
+ */
+ public static CambriaConsumer createConsumer(Collection<String> hostSet,
+ final String topic, final String consumerGroup,
+ final String consumerId, int timeoutMs, int limit, String filter,
+ String apiKey, String apiSecret) {
+ if (sfMock != null)
+ return sfMock;
+ try {
+ return new DMaaPCambriaConsumerImpl(hostSet, topic, consumerGroup,
+ consumerId, timeoutMs, limit, filter, apiKey, apiSecret);
+ } catch (MalformedURLException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ /*************************************************************************/
+ /*************************************************************************/
+ /*************************************************************************/
+
+ /**
+ * Create a publisher that sends each message (or group of messages)
+ * immediately. Most applications should favor higher latency for much
+ * higher message throughput and the "simple publisher" is not a good
+ * choice.
+ *
+ * @param hostlist
+ * The host used in the URL to Cambria. Can be "host:port", can
+ * be multiple comma-separated entries.
+ * @param topic
+ * The topic on which to publish messages.
+ * @return a publisher
+ */
+ public static CambriaBatchingPublisher createSimplePublisher(
+ String hostlist, String topic) {
+ return createBatchingPublisher(hostlist, topic, 1, 1);
+ }
+
+ /**
+ * Create a publisher that batches messages. Be sure to close the publisher
+ * to send the last batch and ensure a clean shutdown. Message payloads are
+ * not compressed.
+ *
+ * @param hostlist
+ * The host used in the URL to Cambria. Can be "host:port", can
+ * be multiple comma-separated entries.
+ * @param topic
+ * The topic on which to publish messages.
+ * @param maxBatchSize
+ * The largest set of messages to batch
+ * @param maxAgeMs
+ * The maximum age of a message waiting in a batch
+ *
+ * @return a publisher
+ */
+ public static CambriaBatchingPublisher createBatchingPublisher(
+ String hostlist, String topic, int maxBatchSize, long maxAgeMs) {
+ return createBatchingPublisher(hostlist, topic, maxBatchSize, maxAgeMs,
+ false);
+ }
+
+ /**
+ * Create a publisher that batches messages. Be sure to close the publisher
+ * to send the last batch and ensure a clean shutdown.
+ *
+ * @param hostlist
+ * The host used in the URL to Cambria. Can be "host:port", can
+ * be multiple comma-separated entries.
+ * @param topic
+ * The topic on which to publish messages.
+ * @param maxBatchSize
+ * The largest set of messages to batch
+ * @param maxAgeMs
+ * The maximum age of a message waiting in a batch
+ * @param compress
+ * use gzip compression
+ *
+ * @return a publisher
+ */
+ public static CambriaBatchingPublisher createBatchingPublisher(
+ String hostlist, String topic, int maxBatchSize, long maxAgeMs,
+ boolean compress) {
+ return createBatchingPublisher(
+ DMaaPCambriaConsumerImpl.stringToList(hostlist), topic,
+ maxBatchSize, maxAgeMs, compress);
+ }
+
+ /**
+ * Create a publisher that batches messages. Be sure to close the publisher
+ * to send the last batch and ensure a clean shutdown.
+ *
+ * @param hostSet
+ * A set of hosts to be used in the URL to Cambria. Can be
+ * "host:port". Use multiple entries to enable failover.
+ * @param topic
+ * The topic on which to publish messages.
+ * @param maxBatchSize
+ * The largest set of messages to batch
+ * @param maxAgeMs
+ * The maximum age of a message waiting in a batch
+ * @param compress
+ * use gzip compression
+ *
+ * @return a publisher
+ */
+ public static CambriaBatchingPublisher createBatchingPublisher(
+ String[] hostSet, String topic, int maxBatchSize, long maxAgeMs,
+ boolean compress) {
+ final TreeSet<String> hosts = new TreeSet<String>();
+ for (String hp : hostSet) {
+ hosts.add(hp);
+ }
+ return createBatchingPublisher(hosts, topic, maxBatchSize, maxAgeMs,
+ compress);
+ }
+
+ /**
+ * Create a publisher that batches messages. Be sure to close the publisher
+ * to send the last batch and ensure a clean shutdown.
+ *
+ * @param hostSet
+ * A set of hosts to be used in the URL to Cambria. Can be
+ * "host:port". Use multiple entries to enable failover.
+ * @param topic
+ * The topic on which to publish messages.
+ * @param maxBatchSize
+ * The largest set of messages to batch
+ * @param maxAgeMs
+ * The maximum age of a message waiting in a batch
+ * @param compress
+ * use gzip compression
+ *
+ * @return a publisher
+ */
+ public static CambriaBatchingPublisher createBatchingPublisher(
+ Collection<String> hostSet, String topic, int maxBatchSize,
+ long maxAgeMs, boolean compress) {
+ return new DMaaPCambriaSimplerBatchPublisher.Builder()
+ .againstUrls(hostSet).onTopic(topic)
+ .batchTo(maxBatchSize, maxAgeMs).compress(compress).build();
+ }
+
+ /**
+ * Create an identity manager client to work with API keys.
+ *
+ * @param hostSet
+ * A set of hosts to be used in the URL to Cambria. Can be
+ * "host:port". Use multiple entries to enable failover.
+ * @param apiKey
+ * Your API key
+ * @param apiSecret
+ * Your API secret
+ * @return an identity manager
+ */
+ /*
+ * public static CambriaIdentityManager createIdentityManager (
+ * Collection<String> hostSet, String apiKey, String apiSecret ) { final
+ * CambriaIdentityManager cim = new CambriaMetaClient ( hostSet );
+ * cim.setApiCredentials ( apiKey, apiSecret ); return cim; }
+ */
+
+ /**
+ * Create a topic manager for working with topics.
+ *
+ * @param hostSet
+ * A set of hosts to be used in the URL to Cambria. Can be
+ * "host:port". Use multiple entries to enable failover.
+ * @param apiKey
+ * Your API key
+ * @param apiSecret
+ * Your API secret
+ * @return a topic manager
+ */
+ /*
+ * public static CambriaTopicManager createTopicManager ( Collection<String>
+ * hostSet, String apiKey, String apiSecret ) { final CambriaMetaClient tmi
+ * = new CambriaMetaClient ( hostSet ); tmi.setApiCredentials ( apiKey,
+ * apiSecret ); return tmi; }
+ */
+
+ /**
+ * Inject a consumer. Used to support unit tests.
+ *
+ * @param cc
+ */
+ public static void $testInject(CambriaConsumer cc) {
+ sfMock = cc;
+ }
+
+ private static CambriaConsumer sfMock = null;
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/impl/CambriaBaseClient.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/impl/CambriaBaseClient.java
new file mode 100644
index 0000000..397e818
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/impl/CambriaBaseClient.java
@@ -0,0 +1,99 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metrics.publisher.impl;
+
+import java.net.MalformedURLException;
+import java.util.Collection;
+import java.util.Set;
+import java.util.TreeSet;
+import java.util.concurrent.TimeUnit;
+
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants.CambriaConstants;
+
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.att.nsa.apiClient.http.CacheUse;
+import com.att.nsa.apiClient.http.HttpClient;
+
+/**
+ *
+ * @author author
+ *
+ */
+public class CambriaBaseClient extends HttpClient implements org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metrics.publisher.CambriaClient
+{
+ protected CambriaBaseClient ( Collection<String> hosts ) throws MalformedURLException
+ {
+ this ( hosts, null );
+ }
+
+ protected CambriaBaseClient ( Collection<String> hosts, String clientSignature ) throws MalformedURLException
+ {
+// super ( hosts, CambriaConstants.kStdCambriaServicePort, clientSignature,
+// CacheUse.NONE, 1, 1, TimeUnit.MILLISECONDS );
+ super(ConnectionType.HTTP, hosts, CambriaConstants.kStdCambriaServicePort, clientSignature, CacheUse.NONE, 1, 1L, TimeUnit.MILLISECONDS, 32, 32, 600000);
+
+ //fLog = LoggerFactory.getLogger ( this.getClass().getName () );
+ fLog = EELFManager.getInstance().getLogger(this.getClass().getName());
+ //( this.getClass().getName () );
+ }
+
+ @Override
+ public void close ()
+ {
+ }
+
+ protected Set<String> jsonArrayToSet ( JSONArray a ) throws JSONException
+ {
+ if ( a == null ) return null;
+
+ final TreeSet<String> set = new TreeSet<String> ();
+ for ( int i=0; i<a.length (); i++ )
+ {
+ set.add ( a.getString ( i ));
+ }
+ return set;
+ }
+ /**
+ * @param log
+ */
+ public void logTo ( EELFLogger log )
+ {
+ fLog = log;
+
+ //replaceLogger ( log );
+ }
+
+ protected EELFLogger getLog ()
+ {
+ return fLog;
+ }
+
+ private EELFLogger fLog;
+
+
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/impl/Clock.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/impl/Clock.java
new file mode 100644
index 0000000..e7531c7
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/impl/Clock.java
@@ -0,0 +1,74 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metrics.publisher.impl;
+
+/**
+ *
+ * This class maintains the system clocks
+ * @author author
+ *
+ */
+public class Clock
+{
+ public synchronized static Clock getIt ()
+ {
+ if ( sfClock == null )
+ {
+ sfClock = new Clock ();
+ }
+ return sfClock;
+ }
+
+ /**
+ *
+ * Get the system's current time in milliseconds.
+ * @return the current time
+ *
+ */
+ public static long now ()
+ {
+ return getIt().nowImpl ();
+ }
+
+ /**
+ * Get current time in milliseconds
+ * @return current time in ms
+ */
+ protected long nowImpl ()
+ {
+ return System.currentTimeMillis ();
+ }
+
+ /**
+ * Initialize constructor
+ */
+ protected Clock ()
+ {
+ }
+
+ private static Clock sfClock = null;
+
+ protected synchronized static void register ( Clock testClock )
+ {
+ sfClock = testClock;
+ }
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/impl/DMaaPCambriaConsumerImpl.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/impl/DMaaPCambriaConsumerImpl.java
new file mode 100644
index 0000000..332c8b1
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/impl/DMaaPCambriaConsumerImpl.java
@@ -0,0 +1,170 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metrics.publisher.impl;
+
+import java.io.IOException;
+import java.io.UnsupportedEncodingException;
+import java.net.MalformedURLException;
+import java.net.URLEncoder;
+import java.util.Collection;
+import java.util.LinkedList;
+import java.util.List;
+
+import jline.internal.Log;
+
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metrics.publisher.CambriaPublisherUtility;
+
+import com.att.nsa.apiClient.http.HttpException;
+import com.att.nsa.apiClient.http.HttpObjectNotFoundException;
+
+/**
+ *
+ * @author author
+ *
+ */
+public class DMaaPCambriaConsumerImpl extends CambriaBaseClient
+ implements org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metrics.publisher.CambriaConsumer {
+ private final String fTopic;
+ private final String fGroup;
+ private final String fId;
+ private final int fTimeoutMs;
+ private final int fLimit;
+ private final String fFilter;
+
+ /**
+ *
+ * @param hostPart
+ * @param topic
+ * @param consumerGroup
+ * @param consumerId
+ * @param timeoutMs
+ * @param limit
+ * @param filter
+ * @param apiKey
+ * @param apiSecret
+ * @throws MalformedURLException
+ */
+ public DMaaPCambriaConsumerImpl(Collection<String> hostPart, final String topic, final String consumerGroup,
+ final String consumerId, int timeoutMs, int limit, String filter, String apiKey, String apiSecret) throws MalformedURLException {
+ super(hostPart, topic + "::" + consumerGroup + "::" + consumerId);
+
+ fTopic = topic;
+ fGroup = consumerGroup;
+ fId = consumerId;
+ fTimeoutMs = timeoutMs;
+ fLimit = limit;
+ fFilter = filter;
+
+ setApiCredentials(apiKey, apiSecret);
+ }
+
+ /**
+ * method converts String to list
+ *
+ * @param str
+ * @return
+ */
+ public static List<String> stringToList(String str) {
+ final LinkedList<String> set = new LinkedList<String>();
+ if (str != null) {
+ final String[] parts = str.trim().split(",");
+ for (String part : parts) {
+ final String trimmed = part.trim();
+ if (trimmed.length() > 0) {
+ set.add(trimmed);
+ }
+ }
+ }
+ return set;
+ }
+
+ @Override
+ public Iterable<String> fetch() throws IOException {
+ // fetch with the timeout and limit set in constructor
+ return fetch(fTimeoutMs, fLimit);
+ }
+
+ @Override
+ public Iterable<String> fetch(int timeoutMs, int limit) throws IOException {
+ final LinkedList<String> msgs = new LinkedList<String>();
+
+ final String urlPath = createUrlPath(timeoutMs, limit);
+
+ getLog().info("UEB GET " + urlPath);
+ try {
+ final JSONObject o = get(urlPath);
+
+ if (o != null) {
+ final JSONArray a = o.getJSONArray("result");
+ if (a != null) {
+ for (int i = 0; i < a.length(); i++) {
+ msgs.add(a.getString(i));
+ }
+ }
+ }
+ } catch (HttpObjectNotFoundException e) {
+ // this can happen if the topic is not yet created. ignore.
+ Log.error("Failed due to topic is not yet created" + e);
+ } catch (JSONException e) {
+ // unexpected response
+ reportProblemWithResponse();
+ Log.error("Failed due to jsonException", e);
+ } catch (HttpException e) {
+ throw new IOException(e);
+ }
+
+ return msgs;
+ }
+
+ protected String createUrlPath(int timeoutMs, int limit) {
+ final StringBuilder url = new StringBuilder(CambriaPublisherUtility.makeConsumerUrl(fTopic, fGroup, fId));
+ final StringBuilder adds = new StringBuilder();
+ if (timeoutMs > -1) {
+ adds.append("timeout=").append(timeoutMs);
+ }
+
+ if (limit > -1) {
+ if (adds.length() > 0) {
+ adds.append("&");
+ }
+ adds.append("limit=").append(limit);
+ }
+ if (fFilter != null && fFilter.length() > 0) {
+ try {
+ if (adds.length() > 0) {
+ adds.append("&");
+ }
+ adds.append("filter=").append(URLEncoder.encode(fFilter, "UTF-8"));
+ } catch (UnsupportedEncodingException e) {
+ Log.error("Failed due to UnsupportedEncodingException" + e);
+ }
+ }
+ if (adds.length() > 0) {
+ url.append("?").append(adds.toString());
+ }
+ return url.toString();
+ }
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisher.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisher.java
new file mode 100644
index 0000000..2b9bad4
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisher.java
@@ -0,0 +1,430 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metrics.publisher.impl;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.net.MalformedURLException;
+import java.util.Collection;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.ScheduledThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
+import java.util.zip.GZIPOutputStream;
+
+import javax.ws.rs.client.Client;
+import javax.ws.rs.client.ClientBuilder;
+import javax.ws.rs.client.Entity;
+import javax.ws.rs.client.WebTarget;
+import javax.ws.rs.core.Response;
+
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants.CambriaConstants;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metrics.publisher.CambriaPublisherUtility;
+
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+
+/**
+ *
+ * class DMaaPCambriaSimplerBatchPublisher used to send the publish the messages
+ * in batch
+ *
+ * @author author
+ *
+ */
+public class DMaaPCambriaSimplerBatchPublisher extends CambriaBaseClient
+ implements org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metrics.publisher.CambriaBatchingPublisher {
+ /**
+ *
+ * static inner class initializes with urls, topic,batchSize
+ *
+ * @author author
+ *
+ */
+ public static class Builder {
+ public Builder() {
+ }
+
+ /**
+ * constructor initialize with url
+ *
+ * @param baseUrls
+ * @return
+ *
+ */
+ public Builder againstUrls(Collection<String> baseUrls) {
+ fUrls = baseUrls;
+ return this;
+ }
+
+ /**
+ * constructor initializes with topics
+ *
+ * @param topic
+ * @return
+ *
+ */
+ public Builder onTopic(String topic) {
+ fTopic = topic;
+ return this;
+ }
+
+ /**
+ * constructor initilazes with batch size and batch time
+ *
+ * @param maxBatchSize
+ * @param maxBatchAgeMs
+ * @return
+ *
+ */
+ public Builder batchTo(int maxBatchSize, long maxBatchAgeMs) {
+ fMaxBatchSize = maxBatchSize;
+ fMaxBatchAgeMs = maxBatchAgeMs;
+ return this;
+ }
+
+ /**
+ * constructor initializes with compress
+ *
+ * @param compress
+ * @return
+ */
+ public Builder compress(boolean compress) {
+ fCompress = compress;
+ return this;
+ }
+
+ /**
+ * method returns DMaaPCambriaSimplerBatchPublisher object
+ *
+ * @return
+ */
+ public DMaaPCambriaSimplerBatchPublisher build() {
+ try {
+ return new DMaaPCambriaSimplerBatchPublisher(fUrls, fTopic, fMaxBatchSize, fMaxBatchAgeMs, fCompress);
+ } catch (MalformedURLException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ private Collection<String> fUrls;
+ private String fTopic;
+ private int fMaxBatchSize = 100;
+ private long fMaxBatchAgeMs = 1000;
+ private boolean fCompress = false;
+ };
+
+ /**
+ *
+ * @param partition
+ * @param msg
+ */
+ @Override
+ public int send(String partition, String msg) {
+ return send(new message(partition, msg));
+ }
+
+ /**
+ * @param msg
+ */
+ @Override
+ public int send(message msg) {
+ final LinkedList<message> list = new LinkedList<message>();
+ list.add(msg);
+ return send(list);
+ }
+
+ /**
+ * @param msgs
+ */
+ @Override
+ public synchronized int send(Collection<message> msgs) {
+ if (fClosed) {
+ throw new IllegalStateException("The publisher was closed.");
+ }
+
+ for (message userMsg : msgs) {
+ fPending.add(new TimestampedMessage(userMsg));
+ }
+ return getPendingMessageCount();
+ }
+
+ /**
+ * getPending message count
+ */
+ @Override
+ public synchronized int getPendingMessageCount() {
+ return fPending.size();
+ }
+
+ /**
+ *
+ * @exception InterruptedException
+ * @exception IOException
+ */
+ @Override
+ public void close() {
+ try {
+ final List<message> remains = close(Long.MAX_VALUE, TimeUnit.MILLISECONDS);
+ if (remains.size() > 0) {
+ getLog().warn("Closing publisher with " + remains.size() + " messages unsent. "
+ + "Consider using CambriaBatchingPublisher.close( long timeout, TimeUnit timeoutUnits ) to recapture unsent messages on close.");
+ }
+ } catch (InterruptedException e) {
+ getLog().warn("Possible message loss. " + e.getMessage(), e);
+ } catch (IOException e) {
+ getLog().warn("Possible message loss. " + e.getMessage(), e);
+ }
+ }
+
+ /**
+ * @param time
+ * @param unit
+ */
+ @Override
+ public List<message> close(long time, TimeUnit unit) throws IOException, InterruptedException {
+ synchronized (this) {
+ fClosed = true;
+
+ // stop the background sender
+ fExec.setContinueExistingPeriodicTasksAfterShutdownPolicy(false);
+ fExec.setExecuteExistingDelayedTasksAfterShutdownPolicy(false);
+ fExec.shutdown();
+ }
+
+ final long now = Clock.now();
+ final long waitInMs = TimeUnit.MILLISECONDS.convert(time, unit);
+ final long timeoutAtMs = now + waitInMs;
+
+ while (Clock.now() < timeoutAtMs && getPendingMessageCount() > 0) {
+ send(true);
+ Thread.sleep(250);
+ }
+ // synchronizing the current object
+ synchronized (this) {
+ final LinkedList<message> result = new LinkedList<message>();
+ fPending.drainTo(result);
+ return result;
+ }
+ }
+
+ /**
+ * Possibly send a batch to the cambria server. This is called by the
+ * background thread and the close() method
+ *
+ * @param force
+ */
+ private synchronized void send(boolean force) {
+ if (force || shouldSendNow()) {
+ if (!sendBatch()) {
+ getLog().warn("Send failed, " + fPending.size() + " message to send.");
+
+ // note the time for back-off
+ fDontSendUntilMs = sfWaitAfterError + Clock.now();
+ }
+ }
+ }
+
+ /**
+ *
+ * @return
+ */
+ private synchronized boolean shouldSendNow() {
+ boolean shouldSend = false;
+ if (fPending.size() > 0) {
+ final long nowMs = Clock.now();
+
+ shouldSend = (fPending.size() >= fMaxBatchSize);
+ if (!shouldSend) {
+ final long sendAtMs = fPending.peek().timestamp + fMaxBatchAgeMs;
+ shouldSend = sendAtMs <= nowMs;
+ }
+
+ // however, wait after an error
+ shouldSend = shouldSend && nowMs >= fDontSendUntilMs;
+ }
+ return shouldSend;
+ }
+
+ /**
+ *
+ * @return
+ */
+ private synchronized boolean sendBatch() {
+ // it's possible for this call to be made with an empty list. in this
+ // case, just return.
+ if (fPending.size() < 1) {
+ return true;
+ }
+
+ final long nowMs = Clock.now();
+ final String url = CambriaPublisherUtility.makeUrl(fTopic);
+
+ getLog().info("sending " + fPending.size() + " msgs to " + url + ". Oldest: "
+ + (nowMs - fPending.peek().timestamp) + " ms");
+
+ try {
+
+ final ByteArrayOutputStream baseStream = new ByteArrayOutputStream();
+ OutputStream os = baseStream;
+ if (fCompress) {
+ os = new GZIPOutputStream(baseStream);
+ }
+ for (TimestampedMessage m : fPending) {
+ os.write(("" + m.fPartition.length()).getBytes());
+ os.write('.');
+ os.write(("" + m.fMsg.length()).getBytes());
+ os.write('.');
+ os.write(m.fPartition.getBytes());
+ os.write(m.fMsg.getBytes());
+ os.write('\n');
+ }
+ os.close();
+
+ final long startMs = Clock.now();
+
+ // code from REST Client Starts
+
+ // final String serverCalculatedSignature = sha1HmacSigner.sign
+ // ("2015-09-21T11:38:19-0700", "iHAxArrj6Ve9JgmHvR077QiV");
+
+ Client client = ClientBuilder.newClient();
+ String metricTopicname = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"metrics.send.cambria.topic");
+ if (null==metricTopicname) {
+
+ metricTopicname="msgrtr.apinode.metrics.dmaap";
+ }
+ WebTarget target = client
+ .target("http://localhost:" + CambriaConstants.kStdCambriaServicePort);
+ target = target.path("/events/" + fTopic);
+ getLog().info("url : " + target.getUri().toString());
+ // API Key
+
+ Entity<byte[]> data = Entity.entity(baseStream.toByteArray(), "application/cambria");
+
+ Response response = target.request().post(data);
+ // header("X-CambriaAuth",
+ // "2OH46YIWa329QpEF:"+serverCalculatedSignature).
+ // header("X-CambriaDate", "2015-09-21T11:38:19-0700").
+ // post(Entity.json(baseStream.toByteArray()));
+
+ getLog().info("Response received :: " + response.getStatus());
+ getLog().info("Response received :: " + response.toString());
+
+ // code from REST Client Ends
+
+ /*
+ * final JSONObject result = post ( url, contentType,
+ * baseStream.toByteArray(), true ); final String logLine =
+ * "cambria reply ok (" + (Clock.now()-startMs) + " ms):" +
+ * result.toString (); getLog().info ( logLine );
+ */
+ fPending.clear();
+ return true;
+ } catch (IllegalArgumentException x) {
+ getLog().warn(x.getMessage(), x);
+ }
+ /*
+ * catch ( HttpObjectNotFoundException x ) { getLog().warn (
+ * x.getMessage(), x ); } catch ( HttpException x ) { getLog().warn (
+ * x.getMessage(), x ); }
+ */
+ catch (IOException x) {
+ getLog().warn(x.getMessage(), x);
+ }
+ return false;
+ }
+
+ private final String fTopic;
+ private final int fMaxBatchSize;
+ private final long fMaxBatchAgeMs;
+ private final boolean fCompress;
+ private boolean fClosed;
+
+ private final LinkedBlockingQueue<TimestampedMessage> fPending;
+ private long fDontSendUntilMs;
+ private final ScheduledThreadPoolExecutor fExec;
+
+ private static final long sfWaitAfterError = 1000;
+
+ /**
+ *
+ * @param hosts
+ * @param topic
+ * @param maxBatchSize
+ * @param maxBatchAgeMs
+ * @param compress
+ * @throws MalformedURLException
+ */
+ private DMaaPCambriaSimplerBatchPublisher(Collection<String> hosts, String topic, int maxBatchSize,
+ long maxBatchAgeMs, boolean compress) throws MalformedURLException {
+
+ super(hosts);
+
+ if (topic == null || topic.length() < 1) {
+ throw new IllegalArgumentException("A topic must be provided.");
+ }
+
+ fClosed = false;
+ fTopic = topic;
+ fMaxBatchSize = maxBatchSize;
+ fMaxBatchAgeMs = maxBatchAgeMs;
+ fCompress = compress;
+
+ fPending = new LinkedBlockingQueue<TimestampedMessage>();
+ fDontSendUntilMs = 0;
+
+ fExec = new ScheduledThreadPoolExecutor(1);
+ fExec.scheduleAtFixedRate(new Runnable() {
+ @Override
+ public void run() {
+ send(false);
+ }
+ }, 100, 50, TimeUnit.MILLISECONDS);
+ }
+
+ /**
+ *
+ *
+ * @author author
+ *
+ */
+ private static class TimestampedMessage extends message {
+ /**
+ * to store timestamp value
+ */
+ public final long timestamp;
+
+ /**
+ * constructor initialize with message
+ *
+ * @param m
+ *
+ */
+ public TimestampedMessage(message m) {
+ super(m);
+ timestamp = Clock.now();
+ }
+ }
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/CambriaEventSet.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/CambriaEventSet.java
new file mode 100644
index 0000000..9eb1691
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/CambriaEventSet.java
@@ -0,0 +1,115 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.resources;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.zip.GZIPInputStream;
+
+import javax.servlet.http.HttpServletResponse;
+
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Publisher.message;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.resources.streamReaders.CambriaJsonStreamReader;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.resources.streamReaders.CambriaRawStreamReader;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.resources.streamReaders.CambriaStreamReader;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.resources.streamReaders.CambriaTextStreamReader;
+
+import com.att.nsa.apiServer.streams.ChunkedInputStream;
+import com.att.nsa.drumlin.service.standards.HttpStatusCodes;
+
+/**
+ * An inbound event set.
+ *
+ * @author author
+ */
+public class CambriaEventSet {
+ private final reader fReader;
+
+ /**
+ * constructor initialization
+ *
+ * @param mediaType
+ * @param originalStream
+ * @param chunked
+ * @param defPartition
+ * @throws CambriaApiException
+ */
+ public CambriaEventSet(String mediaType, InputStream originalStream,
+ boolean chunked, String defPartition) throws CambriaApiException {
+ InputStream is = originalStream;
+ if (chunked) {
+ is = new ChunkedInputStream(originalStream);
+ }
+
+ if (("application/json").equals(mediaType)) {
+ if (chunked) {
+ throw new CambriaApiException(
+ HttpServletResponse.SC_BAD_REQUEST,
+ "The JSON stream reader doesn't support chunking.");
+ }
+ fReader = new CambriaJsonStreamReader(is, defPartition);
+ } else if (("application/cambria").equals(mediaType)) {
+ fReader = new CambriaStreamReader(is);
+ } else if (("application/cambria-zip").equals(mediaType)) {
+ try {
+ is = new GZIPInputStream(is);
+ } catch (IOException e) {
+ throw new CambriaApiException(HttpStatusCodes.k400_badRequest,
+ "Couldn't read compressed format: " + e);
+ }
+ fReader = new CambriaStreamReader(is);
+ } else if (("text/plain").equals(mediaType)) {
+ fReader = new CambriaTextStreamReader(is, defPartition);
+ } else {
+ fReader = new CambriaRawStreamReader(is, defPartition);
+ }
+ }
+
+ /**
+ * Get the next message from this event set. Returns null when the end of
+ * stream is reached. Will block until a message arrives (or the stream is
+ * closed/broken).
+ *
+ * @return a message, or null
+ * @throws IOException
+ * @throws CambriaApiException
+ */
+ public message next() throws IOException, CambriaApiException {
+ return fReader.next();
+ }
+
+ /**
+ *
+ * @author author
+ *
+ */
+ public interface reader {
+ /**
+ *
+ * @return
+ * @throws IOException
+ * @throws CambriaApiException
+ */
+ message next() throws IOException, CambriaApiException;
+ }
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/CambriaOutboundEventStream.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/CambriaOutboundEventStream.java
new file mode 100644
index 0000000..9fbc7f7
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/CambriaOutboundEventStream.java
@@ -0,0 +1,516 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.resources;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.Date;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.json.JSONTokener;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Consumer;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Consumer.Message;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants.CambriaConstants;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metabroker.Topic;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.Utils;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.DMaaPResponseBuilder.StreamWriter;
+
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+
+
+/**
+ * class used to write the consumed messages
+ *
+ * @author author
+ *
+ */
+public class CambriaOutboundEventStream implements StreamWriter {
+ private static final int kTopLimit = 1024 * 4;
+
+ /**
+ *
+ * static innerclass it takes all the input parameter for kafka consumer
+ * like limit, timeout, meta, pretty
+ *
+ * @author author
+ *
+ */
+ public static class Builder {
+
+ // Required
+ private final Consumer fConsumer;
+ //private final rrNvReadable fSettings; // used during write to tweak
+ // format, decide to explicitly
+ // close stream or not
+
+ // Optional
+ private int fLimit;
+ private int fTimeoutMs;
+ private String fTopicFilter;
+ private boolean fPretty;
+ private boolean fWithMeta;
+
+ // private int fOffset;
+ /**
+ * constructor it initializes all the consumer parameters
+ *
+ * @param c
+ * @param settings
+ */
+ public Builder(Consumer c) {
+ this.fConsumer = c;
+ //this.fSettings = settings;
+
+ fLimit = CambriaConstants.kNoTimeout;
+ fTimeoutMs = CambriaConstants.kNoLimit;
+ fTopicFilter = CambriaConstants.kNoFilter;
+ fPretty = false;
+ fWithMeta = false;
+ // fOffset = CambriaEvents.kNextOffset;
+ }
+
+ /**
+ *
+ * constructor initializes with limit
+ *
+ * @param l
+ * only l no of messages will be consumed
+ * @return
+ */
+ public Builder limit(int l) {
+ this.fLimit = l;
+ return this;
+ }
+
+ /**
+ * constructor initializes with timeout
+ *
+ * @param t
+ * if there is no message to consume, them DMaaP will wait
+ * for t time
+ * @return
+ */
+ public Builder timeout(int t) {
+ this.fTimeoutMs = t;
+ return this;
+ }
+
+ /**
+ * constructor initializes with filter
+ *
+ * @param f
+ * filter
+ * @return
+ */
+ public Builder filter(String f) {
+ this.fTopicFilter = f;
+ return this;
+ }
+
+ /**
+ * constructor initializes with boolean value pretty
+ *
+ * @param p
+ * messages print in new line
+ * @return
+ */
+ public Builder pretty(boolean p) {
+ fPretty = p;
+ return this;
+ }
+
+ /**
+ * constructor initializes with boolean value meta
+ *
+ * @param withMeta,
+ * along with messages offset will print
+ * @return
+ */
+ public Builder withMeta(boolean withMeta) {
+ fWithMeta = withMeta;
+ return this;
+ }
+
+ // public Builder atOffset ( int pos )
+ // {
+ // fOffset = pos;
+ // return this;
+ // }
+ /**
+ * method returs object of CambriaOutboundEventStream
+ *
+ * @return
+ * @throws CambriaApiException
+ */
+ public CambriaOutboundEventStream build() throws CambriaApiException {
+ return new CambriaOutboundEventStream(this);
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ /**
+ *
+ * @param builder
+ * @throws CambriaApiException
+ *
+ */
+ private CambriaOutboundEventStream(Builder builder) throws CambriaApiException {
+ fConsumer = builder.fConsumer;
+ fLimit = builder.fLimit;
+ fTimeoutMs = builder.fTimeoutMs;
+ //fSettings = builder.fSettings;
+ fSent = 0;
+ fPretty = builder.fPretty;
+ fWithMeta = builder.fWithMeta;
+
+// if (CambriaConstants.kNoFilter.equals(builder.fTopicFilter)) {
+// fHpAlarmFilter = null;
+// fHppe = null;
+// } else {
+// try {
+// final JSONObject filter = new JSONObject(new JSONTokener(builder.fTopicFilter));
+// HpConfigContext<HpEvent> cc = new HpConfigContext<HpEvent>();
+// fHpAlarmFilter = cc.create(HpAlarmFilter.class, filter);
+// final EventFactory<HpJsonEvent> ef = new HpJsonEventFactory();
+// fHppe = new HpProcessingEngine<HpJsonEvent>(ef);
+// } catch (HpReaderException e) {
+// // JSON was okay, but the filter engine says it's bogus
+// throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST,
+// "Couldn't create filter: " + e.getMessage());
+// } catch (JSONException e) {
+// // user sent a bogus JSON object
+// throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST,
+// "Couldn't parse JSON: " + e.getMessage());
+// }
+// }
+ }
+
+ /**
+ *
+ * interface provides onWait and onMessage methods
+ *
+ */
+ public interface operation {
+ /**
+ * Call thread.sleep
+ * @throws IOException
+ */
+ void onWait() throws IOException;
+/**
+ * provides the output based in the consumer paramter
+ * @param count
+ * @param msg
+ * @throws IOException
+ */
+ void onMessage(int count, Message msg) throws IOException;
+ }
+
+ /**
+ *
+ * @return
+ */
+ public int getSentCount() {
+ return fSent;
+ }
+
+ @Override
+ /**
+ *
+ * @param os
+ * throws IOException
+ */
+ public void write(final OutputStream os) throws IOException {
+ //final boolean transactionEnabled = topic.isTransactionEnabled();
+ //final boolean transactionEnabled = isTransEnabled();
+ final boolean transactionEnabled = istransEnable;
+ os.write('[');
+
+ fSent = forEachMessage(new operation() {
+ @Override
+ public void onMessage(int count, Message msg) throws IOException, JSONException {
+
+ String message = "";
+ JSONObject jsonMessage = null;
+ if (transactionEnabled) {
+ jsonMessage = new JSONObject(msg.getMessage());
+ message = jsonMessage.getString("message");
+ }
+
+ if (count > 0) {
+ os.write(',');
+ }
+
+ if (fWithMeta) {
+ final JSONObject entry = new JSONObject();
+ entry.put("offset", msg.getOffset());
+ entry.put("message", message);
+ os.write(entry.toString().getBytes());
+ } else {
+ //os.write(message.getBytes());
+ String jsonString = "";
+ if(transactionEnabled){
+ jsonString= JSONObject.valueToString(message);
+ }else{
+ jsonString = JSONObject.valueToString (msg.getMessage());
+ }
+ os.write ( jsonString.getBytes () );
+ }
+
+ if (fPretty) {
+ os.write('\n');
+ }
+
+
+ String metricTopicname= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"metrics.send.cambria.topic");
+ if (null==metricTopicname)
+ metricTopicname="msgrtr.apinode.metrics.dmaap";
+
+ if (!metricTopicname.equalsIgnoreCase(topic.getName())) {
+ if (transactionEnabled) {
+ final String transactionId = jsonMessage.getString("transactionId");
+ responseTransactionId = transactionId;
+
+ StringBuilder consumerInfo = new StringBuilder();
+ if (null != dmaapContext && null != dmaapContext.getRequest()) {
+ final HttpServletRequest request = dmaapContext.getRequest();
+ consumerInfo.append("consumerIp= \"" + request.getRemoteHost() + "\",");
+ consumerInfo.append("consServerIp= \"" + request.getLocalAddr() + "\",");
+ consumerInfo.append("consumerId= \"" + Utils.getUserApiKey(request) + "\",");
+ consumerInfo.append(
+ "consumerGroup= \"" + getConsumerGroupFromRequest(request.getRequestURI()) + "\",");
+ consumerInfo.append("consumeTime= \"" + Utils.getFormattedDate(new Date()) + "\",");
+ }
+
+ log.info("Consumer [" + consumerInfo.toString() + "transactionId= \"" + transactionId
+ + "\",messageLength= \"" + message.length() + "\",topic= \"" + topic.getName() + "\"]");
+ }
+ }
+
+ }
+
+ @Override
+ /**
+ *
+ * It makes thread to wait
+ * @throws IOException
+ */
+ public void onWait() throws IOException {
+ os.flush(); // likely totally unnecessary for a network socket
+ try {
+ // FIXME: would be good to wait/signal
+ Thread.sleep(100);
+ } catch (InterruptedException e) {
+ // ignore
+ }
+ }
+ });
+
+ //if (null != dmaapContext && isTransactionEnabled()) {
+ if (null != dmaapContext && istransEnable) {
+
+ dmaapContext.getResponse().setHeader("transactionId",
+ Utils.getResponseTransactionId(responseTransactionId));
+ }
+
+ os.write(']');
+ os.flush();
+
+ boolean close_out_stream = true;
+ String strclose_out_stream = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"close.output.stream");
+ if(null!=strclose_out_stream)close_out_stream=Boolean.parseBoolean(strclose_out_stream);
+
+ //if (fSettings.getBoolean("close.output.stream", true)) {
+ if (close_out_stream) {
+ os.close();
+ }
+ }
+
+ /**
+ *
+ * @param requestURI
+ * @return
+ */
+ private String getConsumerGroupFromRequest(String requestURI) {
+ if (null != requestURI && !requestURI.isEmpty()) {
+
+ String consumerDetails = requestURI.substring(requestURI.indexOf("events/") + 7);
+
+ int startIndex = consumerDetails.indexOf("/") + 1;
+ int endIndex = consumerDetails.lastIndexOf("/");
+ return consumerDetails.substring(startIndex, endIndex);
+ }
+ return null;
+ }
+/**
+ *
+ * @param op
+ * @return
+ * @throws IOException
+ * @throws JSONException
+ */
+ public int forEachMessage(operation op) throws IOException, JSONException {
+ final int effectiveLimit = (fLimit == 0 ? kTopLimit : fLimit);
+
+ int count = 0;
+ boolean firstPing = true;
+
+ final long startMs = System.currentTimeMillis();
+ final long timeoutMs = fTimeoutMs + startMs;
+
+ while (firstPing || (count == 0 && System.currentTimeMillis() < timeoutMs)) {
+ if (!firstPing) {
+ op.onWait();
+ }
+ firstPing = false;
+
+ Consumer.Message msg = null;
+ while (count < effectiveLimit && (msg = fConsumer.nextMessage()) != null) {
+
+
+ String message = "";
+ // if (topic.isTransactionEnabled() || true) {
+ if (istransEnable) {
+ // As part of DMaaP changes we are wrapping the original
+ // message into a json object
+ // and then this json object is further wrapped into message
+ // object before publishing,
+ // so extracting the original message from the message
+ // object for matching with filter.
+ final JSONObject jsonMessage = new JSONObject(msg.getMessage());
+ message = jsonMessage.getString("message");
+ } else {
+ message = msg.getMessage();
+ }
+
+ // If filters are enabled/set, message should be in JSON format
+ // for filters to work for
+ // otherwise filter will automatically ignore message in
+ // non-json format.
+ if (filterMatches(message)) {
+ op.onMessage(count, msg);
+ count++;
+ }
+ }
+ }
+
+ return count;
+ }
+
+ /**
+ *
+ * Checks whether filter is initialized
+ */
+// private boolean isFilterInitialized() {
+// return (fHpAlarmFilter != null && fHppe != null);
+// }
+
+ /**
+ *
+ * @param msg
+ * @return
+ */
+ private boolean filterMatches(String msg) {
+ boolean result = true;
+// if (isFilterInitialized()) {
+// try {
+// final HpJsonEvent e = new HpJsonEvent("e", new JSONObject(msg));
+// result = fHpAlarmFilter.matches(fHppe, e);
+// } catch (JSONException x) {
+// // the msg may not be JSON
+// result = false;
+// log.error("Failed due to " + x.getMessage());
+// } catch (Exception x) {
+// log.error("Error using filter: " + x.getMessage(), x);
+// }
+// }
+
+ return result;
+ }
+
+ public DMaaPContext getDmaapContext() {
+ return dmaapContext;
+ }
+
+ public void setDmaapContext(DMaaPContext dmaapContext) {
+ this.dmaapContext = dmaapContext;
+ }
+
+ public Topic getTopic() {
+ return topic;
+ }
+
+ public void setTopic(Topic topic) {
+ this.topic = topic;
+ }
+
+ public void setTopicStyle(boolean aaftopic) {
+ this.isAAFTopic = aaftopic;
+ }
+
+ public void setTransEnabled ( boolean transEnable) {
+ this.istransEnable = transEnable;
+ }
+
+ /*private boolean isTransactionEnabled() {
+ //return topic.isTransactionEnabled();
+ return true; // let metrics creates for all the topics
+ }*/
+
+ private boolean isTransEnabled() {
+ String istransidUEBtopicreqd = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"transidUEBtopicreqd");
+ boolean istransidreqd=false;
+ if ((null != istransidUEBtopicreqd && istransidUEBtopicreqd.equalsIgnoreCase("true")) || isAAFTopic){
+ istransidreqd = true;
+ }
+
+ return istransidreqd;
+
+ }
+
+ private final Consumer fConsumer;
+ private final int fLimit;
+ private final int fTimeoutMs;
+ //private final rrNvReadable fSettings;
+ private final boolean fPretty;
+ private final boolean fWithMeta;
+ private int fSent;
+// private final HpAlarmFilter<HpJsonEvent> fHpAlarmFilter;
+// private final HpProcessingEngine<HpJsonEvent> fHppe;
+ private DMaaPContext dmaapContext;
+ private String responseTransactionId;
+ private Topic topic;
+ private boolean isAAFTopic = false;
+ private boolean istransEnable = false;
+
+
+ //private static final Logger log = Logger.getLogger(CambriaOutboundEventStream.class);
+
+ private static final EELFLogger log = EELFManager.getInstance().getLogger(CambriaOutboundEventStream.class);
+} \ No newline at end of file
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/streamReaders/CambriaJsonStreamReader.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/streamReaders/CambriaJsonStreamReader.java
new file mode 100644
index 0000000..5aefe2d
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/streamReaders/CambriaJsonStreamReader.java
@@ -0,0 +1,171 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.resources.streamReaders;
+
+import java.io.InputStream;
+
+import javax.servlet.http.HttpServletResponse;
+
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.json.JSONTokener;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Publisher.message;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.LogDetails;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.resources.CambriaEventSet.reader;
+
+/**
+ *
+ * @author author
+ *
+ */
+public class CambriaJsonStreamReader implements reader {
+ private final JSONTokener fTokens;
+ private final boolean fIsList;
+ private long fCount;
+ private final String fDefPart;
+ public static final String kKeyField = "cambria.partition";
+
+ /**
+ *
+ * @param is
+ * @param defPart
+ * @throws CambriaApiException
+ */
+ public CambriaJsonStreamReader(InputStream is, String defPart) throws CambriaApiException {
+ try {
+ fTokens = new JSONTokener(is);
+ fCount = 0;
+ fDefPart = defPart;
+
+ final int c = fTokens.next();
+ if (c == '[') {
+ fIsList = true;
+ } else if (c == '{') {
+ fTokens.back();
+ fIsList = false;
+ } else {
+ throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, "Expecting an array or an object.");
+ }
+ } catch (JSONException e) {
+ throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, e.getMessage());
+ }
+ }
+
+ @Override
+ public message next() throws CambriaApiException {
+ try {
+ if (!fTokens.more()) {
+ return null;
+ }
+
+ final int c = fTokens.next();
+
+ /*if (c ==','){
+ fCloseCount++;
+ System.out.println("fCloseCount=" + fCloseCount +" fCount "+fCount);
+ }*/
+ if (fIsList) {
+ if (c == ']' || (fCount > 0 && c == 10))
+ return null;
+
+
+ if (fCount > 0 && c != ',' && c!= 10) {
+ throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST,
+ "Expected ',' or closing ']' after last object.");
+ }
+
+ if (fCount == 0 && c != '{' && c!= 10 && c!=32) {
+ throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, "Expected { to start an object.");
+ }
+ } else if (fCount != 0 || c != '{') {
+ throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, "Expected '{' to start an object.");
+ }
+
+ if (c == '{') {
+ fTokens.back();
+ }
+ final JSONObject o = new JSONObject(fTokens);
+ fCount++;
+ return new msg(o);
+ } catch (JSONException e) {
+ throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, e.getMessage());
+
+ }
+ }
+
+ private class msg implements message {
+ private final String fKey;
+ private String fMsg;
+ private LogDetails logDetails;
+ private boolean transactionEnabled;
+
+ /**
+ * constructor
+ *
+ * @param o
+ */
+ //public msg(JSONObject o){}
+
+
+ public msg(JSONObject o) {
+ String key = o.optString(kKeyField, fDefPart);
+ if (key == null) {
+ key = "" + System.currentTimeMillis();
+ }
+ fKey = key;
+
+ fMsg = o.toString().trim();
+
+ }
+
+ @Override
+ public String getKey() {
+ return fKey;
+ }
+
+ @Override
+ public String getMessage() {
+ return fMsg;
+ }
+
+ @Override
+ public boolean isTransactionEnabled() {
+ return transactionEnabled;
+ }
+
+ @Override
+ public void setTransactionEnabled(boolean transactionEnabled) {
+ this.transactionEnabled = transactionEnabled;
+ }
+
+ @Override
+ public void setLogDetails(LogDetails logDetails) {
+ this.logDetails = logDetails;
+ }
+
+ @Override
+ public LogDetails getLogDetails() {
+ return logDetails;
+ }
+ }
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/streamReaders/CambriaRawStreamReader.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/streamReaders/CambriaRawStreamReader.java
new file mode 100644
index 0000000..f0ec225
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/streamReaders/CambriaRawStreamReader.java
@@ -0,0 +1,142 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.resources.streamReaders;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+import javax.servlet.http.HttpServletResponse;
+
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Publisher.message;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.LogDetails;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.resources.CambriaEventSet.reader;
+
+import com.att.nsa.util.StreamTools;
+
+/**
+ *
+ * This stream reader reads raw bytes creating a single message.
+ * @author author
+ *
+ */
+public class CambriaRawStreamReader implements reader
+{
+ /**
+ * This is the constructor of CambriaRawStreamReader, it will basically the read from Input stream
+ * @param is
+ * @param defPart
+ * @throws CambriaApiException
+ */
+ public CambriaRawStreamReader ( InputStream is, String defPart ) throws CambriaApiException
+ {
+ fStream = is;
+ fDefPart = defPart;
+ fClosed = false;
+ }
+
+ @Override
+ /**
+ *
+ * next() method reads the bytes and
+ * iterates through the messages
+ * @throws CambriaApiException
+ *
+ */
+ public message next () throws CambriaApiException
+ {
+ if ( fClosed ) return null;
+
+ try
+ {
+ final byte[] rawBytes = StreamTools.readBytes ( fStream );
+ fClosed = true;
+ return new message ()
+ {
+ private LogDetails logDetails;
+ private boolean transactionEnabled;
+
+ /**
+ * returns boolean value which
+ * indicates whether transaction is enabled
+ */
+ public boolean isTransactionEnabled() {
+ return transactionEnabled;
+ }
+
+ /**
+ * sets boolean value which
+ * indicates whether transaction is enabled
+ */
+ public void setTransactionEnabled(boolean transactionEnabled) {
+ this.transactionEnabled = transactionEnabled;
+ }
+
+ @Override
+ /**
+ * @returns key
+ * It ch4ecks whether fDefPart value is Null.
+ * If yes, it will return ystem.currentTimeMillis () else
+ * it will return fDefPart variable value
+ */
+ public String getKey ()
+ {
+ return fDefPart == null ? "" + System.currentTimeMillis () : fDefPart;
+ }
+
+ @Override
+ /**
+ * returns the message in String type object
+ */
+ public String getMessage ()
+ {
+ return new String ( rawBytes );
+ }
+
+ /**
+ * set log details in logDetails variable
+ */
+ @Override
+ public void setLogDetails(LogDetails logDetails) {
+ this.logDetails = logDetails;
+ }
+
+ @Override
+ /**
+ * get the log details
+ */
+ public LogDetails getLogDetails() {
+ return this.logDetails;
+ }
+ };
+ }
+ catch ( IOException e )
+ {
+ throw new CambriaApiException ( HttpServletResponse.SC_BAD_REQUEST, e.getMessage () );
+ }
+ }
+
+ private final InputStream fStream;
+ private final String fDefPart;
+ private boolean fClosed;
+ //private String transactionId;
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/streamReaders/CambriaStreamReader.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/streamReaders/CambriaStreamReader.java
new file mode 100644
index 0000000..bff6398
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/streamReaders/CambriaStreamReader.java
@@ -0,0 +1,229 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.resources.streamReaders;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+import javax.servlet.http.HttpServletResponse;
+
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Publisher.message;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.LogDetails;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.resources.CambriaEventSet.reader;
+
+/**
+ * Read an optionally chunked stream in the Cambria app format. This format
+ * allows for speedier server-side message parsing than pure JSON. It's looks
+ * like:<br/>
+ * <br/>
+ * &lt;keyLength&gt;.&lt;msgLength&gt;.&lt;key&gt;&lt;message&gt;<br/>
+ * <br/>
+ * Whitespace before/after each entry is ignored, so messages can be delivered
+ * with newlines between them, or not.
+ *
+ * @author author
+ *
+ */
+public class CambriaStreamReader implements reader {
+ /**
+ * constructor initializing InputStream with fStream
+ *
+ * @param senderStream
+ * @throws CambriaApiException
+ */
+ public CambriaStreamReader(InputStream senderStream) throws CambriaApiException {
+ fStream = senderStream;
+ }
+
+ @Override
+ /**
+ * next method iterates through msg length
+ * throws IOException
+ * throws CambriaApiException
+ *
+ */
+ public message next() throws IOException, CambriaApiException {
+ final int keyLen = readLength();
+ if (keyLen == -1)
+ return null;
+
+ final int msgLen = readLength();
+ final String keyPart = readString(keyLen);
+ final String msgPart = readString(msgLen);
+
+ return new msg(keyPart, msgPart);
+ }
+
+ private static class msg implements message {
+ /**
+ * constructor initialization
+ *
+ * @param key
+ * @param msg
+ */
+ public msg(String key, String msg) {
+ // if no key, use the current time. This allows the message to be
+ // delivered
+ // in any order without forcing it into a single partition as empty
+ // string would.
+ if (key.length() < 1) {
+ key = "" + System.currentTimeMillis();
+ }
+
+ fKey = key;
+ fMsg = msg;
+ }
+
+ @Override
+ /**
+ * @returns fkey
+ */
+ public String getKey() {
+ return fKey;
+ }
+
+ @Override
+ /**
+ * returns the message in String type object
+ */
+ public String getMessage() {
+ return fMsg;
+ }
+
+ private final String fKey;
+ private final String fMsg;
+ private LogDetails logDetails;
+ private boolean transactionEnabled;
+
+ /**
+ * returns boolean value which
+ * indicates whether transaction is enabled
+ */
+ public boolean isTransactionEnabled() {
+ return transactionEnabled;
+ }
+
+ /**
+ * sets boolean value which
+ * indicates whether transaction is enabled
+ */
+ public void setTransactionEnabled(boolean transactionEnabled) {
+ this.transactionEnabled = transactionEnabled;
+ }
+
+ @Override
+ /**
+ * set log details in logDetails variable
+ */
+ public void setLogDetails(LogDetails logDetails) {
+ this.logDetails = logDetails;
+ }
+
+ @Override
+ /**
+ * get the log details
+ */
+ public LogDetails getLogDetails() {
+ return this.logDetails;
+ }
+
+ }
+
+ private final InputStream fStream;
+
+ /**
+ * max cambria length indicates message length
+
+ // This limit is here to prevent the server from spinning on a long string of numbers
+ // that is delivered with 'application/cambria' as the format. The limit needs to be
+ // large enough to support the max message length (currently 1MB, the default Kafka
+ // limit)
+ * */
+
+ private static final int kMaxCambriaLength = 4*1000*1024;
+
+
+ /**
+ *
+ * @return
+ * @throws IOException
+ * @throws CambriaApiException
+ */
+ private int readLength() throws IOException, CambriaApiException {
+ // always ignore leading whitespace
+ int c = fStream.read();
+ while (Character.isWhitespace(c)) {
+ c = fStream.read();
+ }
+
+ if (c == -1) {
+ return -1;
+ }
+
+ int result = 0;
+ while (Character.isDigit(c)) {
+ result = (result * 10) + (c - '0');
+ if (result > kMaxCambriaLength) {
+ throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, "Expected . after length.");
+ }
+ c = fStream.read();
+ }
+
+ if (c != '.') {
+ throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, "Expected . after length.");
+ }
+
+ return result;
+ }
+
+ /**
+ *
+ * @param len
+ * @return
+ * @throws IOException
+ * @throws CambriaApiException
+ */
+ private String readString(int len) throws IOException, CambriaApiException {
+ final byte[] buffer = new byte[len];
+
+ final long startMs = System.currentTimeMillis();
+ final long timeoutMs = startMs + 30000; // FIXME configurable
+
+ int readTotal = 0;
+ while (readTotal < len) {
+ final int read = fStream.read(buffer, readTotal, len - readTotal);
+ if (read == -1 || System.currentTimeMillis() > timeoutMs) {
+ // EOF
+ break;
+ }
+ readTotal += read;
+ }
+
+ if (readTotal < len) {
+ throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST,
+ "End of stream while reading " + len + " bytes");
+ }
+
+ return new String(buffer);
+ }
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/streamReaders/CambriaTextStreamReader.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/streamReaders/CambriaTextStreamReader.java
new file mode 100644
index 0000000..ce2cffd
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/streamReaders/CambriaTextStreamReader.java
@@ -0,0 +1,140 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.resources.streamReaders;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+
+import javax.servlet.http.HttpServletResponse;
+
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Publisher.message;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.LogDetails;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.resources.CambriaEventSet.reader;
+
+/**
+ * This stream reader just pulls single lines. It uses the default partition if provided. If
+ * not, the key is the current time, which does not guarantee ordering.
+ *
+ * @author author
+ *
+ */
+public class CambriaTextStreamReader implements reader
+{
+ /**
+ * This is the constructor for Cambria Text Reader format
+ * @param is
+ * @param defPart
+ * @throws CambriaApiException
+ */
+ public CambriaTextStreamReader ( InputStream is, String defPart ) throws CambriaApiException
+ {
+ fReader = new BufferedReader ( new InputStreamReader ( is ) );
+ fDefPart = defPart;
+ }
+
+ @Override
+ /**
+ * next() method iterates through msg length
+ * throws IOException
+ * throws CambriaApiException
+ *
+ */
+ public message next () throws CambriaApiException
+ {
+ try
+ {
+ final String line = fReader.readLine ();
+ if ( line == null ) return null;
+
+ return new message ()
+ {
+ private LogDetails logDetails;
+ private boolean transactionEnabled;
+
+ /**
+ * returns boolean value which
+ * indicates whether transaction is enabled
+ * @return
+ */
+ public boolean isTransactionEnabled() {
+ return transactionEnabled;
+ }
+
+ /**
+ * sets boolean value which
+ * indicates whether transaction is enabled
+ */
+ public void setTransactionEnabled(boolean transactionEnabled) {
+ this.transactionEnabled = transactionEnabled;
+ }
+
+ @Override
+ /**
+ * @returns key
+ * It ch4ecks whether fDefPart value is Null.
+ * If yes, it will return ystem.currentTimeMillis () else
+ * it will return fDefPart variable value
+ */
+ public String getKey ()
+ {
+ return fDefPart == null ? "" + System.currentTimeMillis () : fDefPart;
+ }
+
+ @Override
+ /**
+ * returns the message in String type object
+ * @return
+ */
+ public String getMessage ()
+ {
+ return line;
+ }
+
+ @Override
+ /**
+ * set log details in logDetails variable
+ */
+ public void setLogDetails(LogDetails logDetails) {
+ this.logDetails = logDetails;
+ }
+
+ @Override
+ /**
+ * get the log details
+ */
+ public LogDetails getLogDetails() {
+ return this.logDetails;
+ }
+ };
+ }
+ catch ( IOException e )
+ {
+ throw new CambriaApiException ( HttpServletResponse.SC_BAD_REQUEST, e.getMessage () );
+ }
+ }
+
+ private final BufferedReader fReader;
+ private final String fDefPart;
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/DMaaPAAFAuthenticator.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/DMaaPAAFAuthenticator.java
new file mode 100644
index 0000000..9593b51
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/DMaaPAAFAuthenticator.java
@@ -0,0 +1,39 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security;
+
+import javax.servlet.http.HttpServletRequest;
+
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
+
+
+
+
+/**
+ *
+ * @author author
+ *
+ */
+public interface DMaaPAAFAuthenticator {
+ boolean aafAuthentication( HttpServletRequest req , String role);
+ String aafPermissionString(String permission, String action) throws CambriaApiException;
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/DMaaPAAFAuthenticatorImpl.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/DMaaPAAFAuthenticatorImpl.java
new file mode 100644
index 0000000..223e8c5
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/DMaaPAAFAuthenticatorImpl.java
@@ -0,0 +1,90 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security;
+
+import java.util.Date;
+
+import javax.servlet.http.HttpServletRequest;
+
+import org.apache.http.HttpStatus;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants.CambriaConstants;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception.DMaaPResponseCode;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception.ErrorResponse;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.Utils;
+
+
+/**
+ *
+ * @author author
+ *
+ */
+public class DMaaPAAFAuthenticatorImpl implements DMaaPAAFAuthenticator {
+
+ /**
+ * @param req
+ * @param role
+ */
+ @Override
+ public boolean aafAuthentication(HttpServletRequest req, String role) {
+ boolean auth = false;
+ if(req.isUserInRole(role))
+ {
+
+ auth = true;
+ }
+ return auth;
+ }
+
+ @Override
+ public String aafPermissionString(String topicName, String action) throws CambriaApiException {
+
+
+ String permission = "";
+ String nameSpace ="";
+ if(topicName.contains(".") && (topicName.contains("com.att")||topicName.contains("org"))) {
+ //String topic = topicName.substring(topicName.lastIndexOf(".")+1);
+ nameSpace = topicName.substring(0,topicName.lastIndexOf("."));
+ }
+ else {
+ nameSpace = null;
+ nameSpace= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"defaultNSforUEB");
+
+ if(null==nameSpace)nameSpace="com.att.dmaap.mr.ueb";
+
+
+ /*ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+ DMaaPResponseCode.TOPIC_NOT_IN_AAF.getResponseCode(), "Topic does not exist in AAF"
+ , null, Utils.getFormattedDate(new Date()), topicName,
+ null, null, null, null);
+
+ throw new CambriaApiException(errRes);*/
+ }
+
+ permission = nameSpace+".mr.topic|:topic."+topicName+"|"+action;
+ return permission;
+
+ }
+
+
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/DMaaPAuthenticator.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/DMaaPAuthenticator.java
new file mode 100644
index 0000000..b0c87af
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/DMaaPAuthenticator.java
@@ -0,0 +1,62 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security;
+
+import javax.servlet.http.HttpServletRequest;
+
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
+
+import com.att.nsa.security.NsaApiKey;
+
+
+/**
+ * An interface for authenticating an inbound request.
+ * @author author
+ *
+ * @param <K> NsaApiKey
+ */
+public interface DMaaPAuthenticator<K extends NsaApiKey> {
+
+ /**
+ * Qualify a request as possibly using the authentication method that this class implements.
+ * @param req
+ * @return true if the request might be authenticated by this class
+ */
+ boolean qualify ( HttpServletRequest req );
+
+ /**
+ * Check for a request being authentic. If it is, return the API key. If not, return null.
+ * @param req An inbound web request
+ * @return the API key for an authentic request, or null
+ */
+ K isAuthentic ( HttpServletRequest req );
+ /**
+ * Check for a ctx being authenticate. If it is, return the API key. If not, return null.
+ * @param ctx
+ * @return the API key for an authentication request, or null
+ */
+ K authenticate ( DMaaPContext ctx );
+
+
+ void addAuthenticator(DMaaPAuthenticator<K> a);
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/DMaaPAuthenticatorImpl.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/DMaaPAuthenticatorImpl.java
new file mode 100644
index 0000000..d1d5019
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/DMaaPAuthenticatorImpl.java
@@ -0,0 +1,136 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security;
+
+import java.util.LinkedList;
+
+import javax.servlet.http.HttpServletRequest;
+
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security.impl.DMaaPOriginalUebAuthenticator;
+
+import com.att.nsa.security.NsaApiKey;
+import com.att.nsa.security.NsaAuthenticator;
+import com.att.nsa.security.authenticators.OriginalUebAuthenticator;
+import com.att.nsa.security.db.NsaApiDb;
+import com.att.nsa.security.db.simple.NsaSimpleApiKey;
+
+/**
+ *
+ * @author author
+ *
+ * @param <K>
+ */
+public class DMaaPAuthenticatorImpl<K extends NsaApiKey> implements DMaaPAuthenticator<K> {
+
+ private final LinkedList<DMaaPAuthenticator<K>> fAuthenticators;
+
+
+
+ // Setting timeout to a large value for testing purpose.
+ // private static final long kDefaultRequestTimeWindow = 1000 * 60 * 10; //
+ // 10 minutes
+ private static final long kDefaultRequestTimeWindow = 1000 * 60 * 10 * 10 * 10 * 10 * 10;
+
+ /**
+ * Construct the security manager against an API key database
+ *
+ * @param db
+ * the API key db
+ */
+ public DMaaPAuthenticatorImpl(NsaApiDb<K> db) {
+ this(db, kDefaultRequestTimeWindow);
+ }
+
+
+
+
+ /**
+ * Construct the security manager against an API key database with a
+ * specific request time window size
+ *
+ * @param db
+ * the API key db
+ * @param authTimeWindowMs
+ * the size of the time window for request authentication
+ */
+ public DMaaPAuthenticatorImpl(NsaApiDb<K> db, long authTimeWindowMs) {
+ fAuthenticators = new LinkedList<DMaaPAuthenticator<K>>();
+
+ fAuthenticators.add(new DMaaPOriginalUebAuthenticator<K>(db, authTimeWindowMs));
+ }
+
+ /**
+ * Authenticate a user's request. This method returns the API key if the
+ * user is authentic, null otherwise.
+ *
+ * @param ctx
+ * @return an api key record, or null
+ */
+ public K authenticate(DMaaPContext ctx) {
+ final HttpServletRequest req = ctx.getRequest();
+ for (DMaaPAuthenticator<K> a : fAuthenticators) {
+ if (a.qualify(req)) {
+ final K k = a.isAuthentic(req);
+ if (k != null)
+ return k;
+ }
+ // else: this request doesn't look right to the authenticator
+ }
+ return null;
+ }
+
+ /**
+ * Get the user associated with the incoming request, or null if the user is
+ * not authenticated.
+ *
+ * @param ctx
+ * @return
+ */
+ public static NsaSimpleApiKey getAuthenticatedUser(DMaaPContext ctx) {
+ final DMaaPAuthenticator<NsaSimpleApiKey> m = ctx.getConfigReader().getfSecurityManager();
+ return m.authenticate(ctx);
+ }
+
+ /**
+ * method by default returning false
+ * @param req
+ * @return false
+ */
+ public boolean qualify(HttpServletRequest req) {
+ return false;
+ }
+/**
+ * method by default returning null
+ * @param req
+ * @return null
+ */
+ public K isAuthentic(HttpServletRequest req) {
+ return null;
+ }
+
+ public void addAuthenticator ( DMaaPAuthenticator<K> a )
+ {
+ this.fAuthenticators.add(a);
+ }
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/impl/DMaaPMechIdAuthenticator.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/impl/DMaaPMechIdAuthenticator.java
new file mode 100644
index 0000000..dcf98f8
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/impl/DMaaPMechIdAuthenticator.java
@@ -0,0 +1,88 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security.impl;
+
+import javax.servlet.http.HttpServletRequest;
+
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security.DMaaPAuthenticator;
+
+import com.att.nsa.security.NsaApiKey;
+import com.att.nsa.security.authenticators.MechIdAuthenticator;
+//import com.att.nsa.security.db.NsaApiDb;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+/**
+ * An authenticator for AT&T MechIds.
+ *
+ * @author author
+ *
+ * @param <K>
+ */
+public class DMaaPMechIdAuthenticator <K extends NsaApiKey> implements DMaaPAuthenticator<K> {
+
+/**
+ * This is not yet implemented. by refault its returing false
+ * @param req HttpServletRequest
+ * @return false
+ */
+ public boolean qualify (HttpServletRequest req) {
+ // we haven't implemented anything here yet, so there's no qualifying request
+ return false;
+ }
+/**
+ * This metod authenticate the mech id
+ * @param req
+ * @return APIkey or null
+ */
+ public K isAuthentic (HttpServletRequest req) {
+ final String remoteAddr = req.getRemoteAddr();
+ authLog ( "MechId auth is not yet implemented.", remoteAddr );
+ return null;
+ }
+
+ private static void authLog ( String msg, String remoteAddr )
+ {
+ log.info ( "AUTH-LOG(" + remoteAddr + "): " + msg );
+ }
+
+// private final NsaApiDb<K> fDb;
+ //private static final Logger log = Logger.getLogger( MechIdAuthenticator.class.toString());
+ private static final EELFLogger log = EELFManager.getInstance().getLogger(MechIdAuthenticator.class);
+/**
+ * Curently its not yet implemented returning null
+ * @param ctx DMaaP context
+ * @return APIkey or null
+ */
+ @Override
+ public K authenticate(DMaaPContext ctx) {
+ // TODO Auto-generated method stub
+ return null;
+ }
+@Override
+public void addAuthenticator(DMaaPAuthenticator<K> a) {
+ // TODO Auto-generated method stub
+
+}
+
+} \ No newline at end of file
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/impl/DMaaPOriginalUebAuthenticator.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/impl/DMaaPOriginalUebAuthenticator.java
new file mode 100644
index 0000000..8026ab5
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/impl/DMaaPOriginalUebAuthenticator.java
@@ -0,0 +1,293 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security.impl;
+
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import javax.servlet.http.HttpServletRequest;
+
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security.DMaaPAuthenticator;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.drumlin.till.data.sha1HmacSigner;
+import com.att.nsa.security.NsaApiKey;
+import com.att.nsa.security.db.NsaApiDb;
+
+/**
+ * This authenticator handles an AWS-like authentication, originally used by the
+ * Cambria server (the API server for UEB).
+ *
+ * @author author
+ *
+ * @param <K>
+ */
+public class DMaaPOriginalUebAuthenticator<K extends NsaApiKey> implements DMaaPAuthenticator<K> {
+ /**
+ * constructor initialization
+ *
+ * @param db
+ * @param requestTimeWindowMs
+ */
+ public DMaaPOriginalUebAuthenticator(NsaApiDb<K> db, long requestTimeWindowMs) {
+ fDb = db;
+ fRequestTimeWindowMs = requestTimeWindowMs;
+ //fAuthenticators = new LinkedList<DMaaPAuthenticator<K>>();
+
+ //fAuthenticators.add(new DMaaPOriginalUebAuthenticator<K>(db, requestTimeWindowMs));
+
+ }
+
+ @Override
+ public boolean qualify(HttpServletRequest req) {
+ // accept anything that comes in with X-(Cambria)Auth in the header
+ final String xAuth = getFirstHeader(req, new String[] { "X-CambriaAuth", "X-Auth" });
+ return xAuth != null;
+ }
+
+ /**
+ * method for authentication
+ *
+ * @param req
+ * @return
+ */
+ public K isAuthentic(HttpServletRequest req) {
+ final String remoteAddr = req.getRemoteAddr();
+ // Cambria originally used "Cambria..." headers, but as the API key
+ // system is now more
+ // general, we take either form.
+ final String xAuth = getFirstHeader(req, new String[] { "X-CambriaAuth", "X-Auth" });
+ final String xDate = getFirstHeader(req, new String[] { "X-CambriaDate", "X-Date" });
+
+ final String httpDate = req.getHeader("Date");
+
+ final String xNonce = getFirstHeader(req, new String[] { "X-Nonce" });
+ return authenticate(remoteAddr, xAuth, xDate, httpDate, xNonce);
+ }
+
+ /**
+ * Authenticate a user's request. This method returns the API key if the
+ * user is authentic, null otherwise.
+ *
+ * @param remoteAddr
+ * @param xAuth
+ * @param xDate
+ * @param httpDate
+ * @param nonce
+ * @return an api key record, or null
+ */
+ public K authenticate(String remoteAddr, String xAuth, String xDate, String httpDate, String nonce) {
+ if (xAuth == null) {
+ authLog("No X-Auth header on request", remoteAddr);
+ return null;
+ }
+
+ final String[] xAuthParts = xAuth.split(":");
+ if (xAuthParts.length != 2) {
+ authLog("Bad X-Auth header format (" + xAuth + ")", remoteAddr);
+ return null;
+ }
+
+
+ // get the api key and signature
+ final String clientApiKey = xAuthParts[0];
+ final String clientApiHash = xAuthParts[1];
+ if (clientApiKey.length() == 0 || clientApiHash.length() == 0) {
+ authLog("Bad X-Auth header format (" + xAuth + ")", remoteAddr);
+ return null;
+ }
+ // if the user provided X-Date, use that. Otherwise, go for Date
+ final String dateString = xDate != null ? xDate : httpDate;
+ final Date clientDate = getClientDate(dateString);
+ if (clientDate == null) {
+ authLog("Couldn't parse client date '" + dateString + "'. Preferring X-Date over Date.", remoteAddr);
+ return null;
+ }
+ // check the time range
+ final long nowMs = System.currentTimeMillis();
+ final long diffMs = Math.abs(nowMs - clientDate.getTime());
+ if (diffMs > fRequestTimeWindowMs) {
+ authLog("Client date is not in acceptable range of server date. Client:" + clientDate.getTime()
+ + ", Server: " + nowMs + ", Threshold: " + fRequestTimeWindowMs + ".", remoteAddr);
+ return null;
+ }
+ K apiRecord;
+ try {
+ apiRecord = fDb.loadApiKey(clientApiKey);
+ if (apiRecord == null) {
+ authLog("No such API key " + clientApiKey, remoteAddr);
+ return null;
+ }
+ } catch (ConfigDbException e) {
+ authLog("Couldn't load API key " + clientApiKey + ": " + e.getMessage(), remoteAddr);
+ return null;
+ }
+ // make the signed content
+ final StringBuilder sb = new StringBuilder();
+ sb.append(dateString);
+ if (nonce != null) {
+ sb.append(":");
+ sb.append(nonce);
+ }
+ final String signedContent = sb.toString();
+ // now check the signed date string
+ final String serverCalculatedSignature = sha1HmacSigner.sign(signedContent, apiRecord.getSecret());
+ if (serverCalculatedSignature == null || !serverCalculatedSignature.equals(clientApiHash)) {
+ authLog("Signatures don't match. Rec'd " + clientApiHash + ", expect " + serverCalculatedSignature + ".",
+ remoteAddr);
+ return null;
+ }
+ authLog("authenticated " + apiRecord.getKey(), remoteAddr);
+ return apiRecord;
+ }
+
+ /**
+ * Get the first value of the first existing header from the headers list
+ *
+ * @param req
+ * @param headers
+ * @return a header value, or null if none exist
+ */
+ private static String getFirstHeader(HttpServletRequest req, String[] headers) {
+ for (String header : headers) {
+ final String result = req.getHeader(header);
+ if (result != null)
+ return result;
+ }
+ return null;
+ }
+
+ /**
+ * Parse the date string into a Date using one of the supported date
+ * formats.
+ *
+ * @param dateHeader
+ * @return a date, or null
+ */
+ private static Date getClientDate(String dateString) {
+ if (dateString == null) {
+ return null;
+ }
+
+ // parse the date
+ Date result = null;
+ for (String dateFormat : kDateFormats) {
+ final SimpleDateFormat parser = new SimpleDateFormat(dateFormat, java.util.Locale.US);
+ if (!dateFormat.contains("z") && !dateFormat.contains("Z")) {
+ parser.setTimeZone(TIMEZONE_GMT);
+ }
+
+ try {
+ result = parser.parse(dateString);
+ break;
+ } catch (ParseException e) {
+ // presumably wrong format
+ }
+ }
+ return result;
+ }
+
+ private static void authLog(String msg, String remoteAddr) {
+ log.info("AUTH-LOG(" + remoteAddr + "): " + msg);
+ }
+
+ private final NsaApiDb<K> fDb;
+ private final long fRequestTimeWindowMs;
+
+ private static final java.util.TimeZone TIMEZONE_GMT = java.util.TimeZone.getTimeZone("GMT");
+
+ private static final String kDateFormats[] =
+ {
+ // W3C date format (RFC 3339).
+ "yyyy-MM-dd'T'HH:mm:ssz",
+ "yyyy-MM-dd'T'HH:mm:ssXXX", // as of Java 7, reqd to handle colon in TZ offset
+
+ // Preferred HTTP date format (RFC 1123).
+ "EEE, dd MMM yyyy HH:mm:ss zzz",
+
+ // simple unix command line 'date' format
+ "EEE MMM dd HH:mm:ss z yyyy",
+
+ // Common date format (RFC 822).
+ "EEE, dd MMM yy HH:mm:ss z",
+ "EEE, dd MMM yy HH:mm z",
+ "dd MMM yy HH:mm:ss z",
+ "dd MMM yy HH:mm z",
+
+ // Obsoleted HTTP date format (ANSI C asctime() format).
+ "EEE MMM dd HH:mm:ss yyyy",
+
+ // Obsoleted HTTP date format (RFC 1036).
+ "EEEE, dd-MMM-yy HH:mm:ss zzz",
+ };
+
+ /*private static final String kDateFormats[] = {
+ // W3C date format (RFC 3339).
+ "yyyy-MM-dd'T'HH:mm:ssz",
+
+ // Preferred HTTP date format (RFC 1123).
+ "EEE, dd MMM yyyy HH:mm:ss zzz",
+
+ // simple unix command line 'date' format
+ "EEE MMM dd HH:mm:ss z yyyy",
+
+ // Common date format (RFC 822).
+ "EEE, dd MMM yy HH:mm:ss z", "EEE, dd MMM yy HH:mm z", "dd MMM yy HH:mm:ss z", "dd MMM yy HH:mm z",
+
+ // Obsoleted HTTP date format (ANSI C asctime() format).
+ "EEE MMM dd HH:mm:ss yyyy",
+
+ // Obsoleted HTTP date format (RFC 1036).
+ "EEEE, dd-MMM-yy HH:mm:ss zzz", }; */
+ // logger declaration
+ //private static final Logger log = Logger.getLogger(DMaaPOriginalUebAuthenticator.class.toString());
+ private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPOriginalUebAuthenticator.class);
+ @Override
+// public K authenticate(DMaaPContext ctx) {
+ // TODO Auto-generated method stub
+ //return null;
+ //}
+
+ public K authenticate(DMaaPContext ctx) {
+
+ /*final HttpServletRequest req = ctx.getRequest();
+ for (DMaaPAuthenticator<K> a : fAuthenticators) {
+ if (a.qualify(req)) {
+ final K k = a.isAuthentic(req);
+ if (k != null)
+ return k;
+ }
+ // else: this request doesn't look right to the authenticator
+ }*/
+ return null;
+ }
+
+
+ public void addAuthenticator ( DMaaPAuthenticator<K> a )
+ {
+ //this.fAuthenticators.add(a);
+ }
+ //private final LinkedList<DMaaPAuthenticator<K>> fAuthenticators;
+} \ No newline at end of file
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/AdminService.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/AdminService.java
new file mode 100644
index 0000000..b4a7282
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/AdminService.java
@@ -0,0 +1,83 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service;
+
+import java.io.IOException;
+
+import org.json.JSONException;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
+
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+
+/**
+ * @author author
+ *
+ */
+public interface AdminService {
+ /**
+ * method provide consumerCache
+ *
+ * @param dMaaPContext
+ * @throws IOException
+ */
+ void showConsumerCache(DMaaPContext dMaaPContext) throws IOException,AccessDeniedException;
+
+ /**
+ * method drops consumer cache
+ *
+ * @param dMaaPContext
+ * @throws JSONException
+ * @throws IOException
+ */
+ void dropConsumerCache(DMaaPContext dMaaPContext) throws JSONException, IOException,AccessDeniedException;
+
+
+ /**
+ * Get list of blacklisted ips
+ * @param dMaaPContext context
+ * @throws IOException ex
+ * @throws AccessDeniedException ex
+ */
+ void getBlacklist ( DMaaPContext dMaaPContext ) throws IOException, AccessDeniedException;
+
+ /**
+ * Add ip to blacklist
+ * @param dMaaPContext context
+ * @param ip ip
+ * @throws IOException ex
+ * @throws ConfigDbException ex
+ * @throws AccessDeniedException ex
+ */
+ void addToBlacklist ( DMaaPContext dMaaPContext, String ip ) throws IOException, ConfigDbException, AccessDeniedException;
+
+ /**
+ * Remove ip from blacklist
+ * @param dMaaPContext context
+ * @param ip ip
+ * @throws IOException ex
+ * @throws ConfigDbException ex
+ * @throws AccessDeniedException ex
+ */
+ void removeFromBlacklist ( DMaaPContext dMaaPContext, String ip ) throws IOException, ConfigDbException, AccessDeniedException;
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/ApiKeysService.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/ApiKeysService.java
new file mode 100644
index 0000000..bb3685d
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/ApiKeysService.java
@@ -0,0 +1,106 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service;
+
+import java.io.IOException;
+
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.ApiKeyBean;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
+
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.security.db.NsaApiDb.KeyExistsException;
+
+/**
+ * Declaring all the method in interface that is mainly used for authentication
+ * purpose.
+ *
+ *
+ */
+
+public interface ApiKeysService {
+ /**
+ * This method declaration for getting all ApiKey that has generated on
+ * server.
+ *
+ * @param dmaapContext
+ * @throws ConfigDbException
+ * @throws IOException
+ */
+
+ public void getAllApiKeys(DMaaPContext dmaapContext)
+ throws ConfigDbException, IOException;
+
+ /**
+ * Getting information about specific ApiKey
+ *
+ * @param dmaapContext
+ * @param apikey
+ * @throws ConfigDbException
+ * @throws IOException
+ */
+
+ public void getApiKey(DMaaPContext dmaapContext, String apikey)
+ throws ConfigDbException, IOException;
+
+ /**
+ * Thid method is used for create a particular ApiKey
+ *
+ * @param dmaapContext
+ * @param nsaApiKey
+ * @throws KeyExistsException
+ * @throws ConfigDbException
+ * @throws IOException
+ */
+
+ public void createApiKey(DMaaPContext dmaapContext, ApiKeyBean nsaApiKey)
+ throws KeyExistsException, ConfigDbException, IOException;
+
+ /**
+ * This method is used for update ApiKey that is already generated on
+ * server.
+ *
+ * @param dmaapContext
+ * @param apikey
+ * @param nsaApiKey
+ * @throws ConfigDbException
+ * @throws IOException
+ * @throws AccessDeniedException
+ * @throws com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException
+ */
+ public void updateApiKey(DMaaPContext dmaapContext, String apikey,
+ ApiKeyBean nsaApiKey) throws ConfigDbException, IOException,AccessDeniedException
+ ;
+
+ /**
+ * This method is used for delete specific ApiKey
+ *
+ * @param dmaapContext
+ * @param apikey
+ * @throws ConfigDbException
+ * @throws IOException
+ * @throws AccessDeniedException
+ */
+
+ public void deleteApiKey(DMaaPContext dmaapContext, String apikey)
+ throws ConfigDbException, IOException,AccessDeniedException;
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/EventsService.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/EventsService.java
new file mode 100644
index 0000000..526d185
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/EventsService.java
@@ -0,0 +1,76 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.ConsumerFactory.UnavailableException;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metabroker.Broker.TopicExistsException;
+
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
+
+/**
+ *
+ * @author author
+ *
+ */
+public interface EventsService {
+ /**
+ *
+ * @param ctx
+ * @param topic
+ * @param consumerGroup
+ * @param clientId
+ * @throws ConfigDbException
+ * @throws TopicExistsException
+ * @throws AccessDeniedException
+ * @throws UnavailableException
+ * @throws CambriaApiException
+ * @throws IOException
+ */
+ public void getEvents(DMaaPContext ctx, String topic, String consumerGroup, String clientId)
+ throws ConfigDbException, TopicExistsException,UnavailableException,
+ CambriaApiException, IOException,AccessDeniedException;
+
+ /**
+ *
+ * @param ctx
+ * @param topic
+ * @param msg
+ * @param defaultPartition
+ * @param requestTime
+ * @throws ConfigDbException
+ * @throws AccessDeniedException
+ * @throws TopicExistsException
+ * @throws CambriaApiException
+ * @throws IOException
+ */
+ public void pushEvents(DMaaPContext ctx, final String topic, InputStream msg, final String defaultPartition,
+ final String requestTime) throws ConfigDbException, AccessDeniedException, TopicExistsException,
+ CambriaApiException, IOException,missingReqdSetting;
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/MMService.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/MMService.java
new file mode 100644
index 0000000..021db2c
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/MMService.java
@@ -0,0 +1,68 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+import org.json.JSONException;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.ConsumerFactory.UnavailableException;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metabroker.Broker.TopicExistsException;
+
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+
+/**
+ * Contains the logic for executing calls to the Mirror Maker agent tool.
+ *
+ * @author <a href="mailto:"></a>
+ *
+ * @since May 25, 2016
+ */
+
+public interface MMService {
+
+ /*
+ * this method calls the add white list method of a Mirror Maker agent API
+ */
+ public void addWhiteList();
+
+ /*
+ * this method calls the remove white list method of a Mirror Maker agent API
+ */
+ public void removeWhiteList();
+
+ /*
+ * This method calls the list white list method of a Mirror Maker agent API
+ */
+ public void listWhiteList();
+
+ public String subscribe(DMaaPContext ctx, String topic, String consumerGroup, String clientId) throws ConfigDbException, TopicExistsException,
+ AccessDeniedException, UnavailableException, CambriaApiException, IOException;
+
+ public void pushEvents(DMaaPContext ctx, final String topic, InputStream msg, final String defaultPartition,
+ final String requestTime) throws ConfigDbException, AccessDeniedException, TopicExistsException,
+ CambriaApiException, IOException, missingReqdSetting;
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/MetricsService.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/MetricsService.java
new file mode 100644
index 0000000..aa3f967
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/MetricsService.java
@@ -0,0 +1,54 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service;
+
+/**
+ * @author
+ *
+ */
+import java.io.IOException;
+
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
+
+/**
+ *
+ * @author author
+ *
+ */
+public interface MetricsService {
+ /**
+ *
+ * @param ctx
+ * @throws IOException
+ */
+ public void get(DMaaPContext ctx) throws IOException;
+
+ /**
+ *
+ * @param ctx
+ * @param name
+ * @throws IOException
+ * @throws CambriaApiException
+ */
+ public void getMetricByName(DMaaPContext ctx, String name) throws IOException, CambriaApiException;
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/TopicService.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/TopicService.java
new file mode 100644
index 0000000..fc91f63
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/TopicService.java
@@ -0,0 +1,176 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service;
+
+import java.io.IOException;
+
+import org.json.JSONException;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.TopicBean;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metabroker.Broker.TopicExistsException;
+
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import com.att.nsa.configs.ConfigDbException;
+
+/**
+ * interface provide all the topic related operations
+ *
+ * @author author
+ *
+ */
+public interface TopicService {
+ /**
+ * method fetch details of all the topics
+ *
+ * @param dmaapContext
+ * @throws JSONException
+ * @throws ConfigDbException
+ * @throws IOException
+ */
+ void getTopics(DMaaPContext dmaapContext) throws JSONException, ConfigDbException, IOException;
+ void getAllTopics(DMaaPContext dmaapContext) throws JSONException, ConfigDbException, IOException;
+
+ /**
+ * method fetch details of specific topic
+ *
+ * @param dmaapContext
+ * @param topicName
+ * @throws ConfigDbException
+ * @throws IOException
+ * @throws TopicExistsException
+ */
+ void getTopic(DMaaPContext dmaapContext, String topicName)
+ throws ConfigDbException, IOException, TopicExistsException;
+
+ /**
+ * method used to create the topic
+ *
+ * @param dmaapContext
+ * @param topicBean
+ * @throws CambriaApiException
+ * @throws TopicExistsException
+ * @throws IOException
+ * @throws AccessDeniedException
+ * @throws JSONException
+ */
+
+ void createTopic(DMaaPContext dmaapContext, TopicBean topicBean)
+ throws CambriaApiException, TopicExistsException, IOException, AccessDeniedException;
+
+ /**
+ * method used to delete to topic
+ *
+ * @param dmaapContext
+ * @param topicName
+ * @throws IOException
+ * @throws AccessDeniedException
+ * @throws ConfigDbException
+ * @throws CambriaApiException
+ * @throws TopicExistsException
+ */
+
+ void deleteTopic(DMaaPContext dmaapContext, String topicName)
+ throws IOException, AccessDeniedException, ConfigDbException, CambriaApiException, TopicExistsException;
+
+ /**
+ * method provides list of all the publishers associated with a topic
+ *
+ * @param dmaapContext
+ * @param topicName
+ * @throws IOException
+ * @throws ConfigDbException
+ * @throws TopicExistsException
+ */
+ void getPublishersByTopicName(DMaaPContext dmaapContext, String topicName)
+ throws IOException, ConfigDbException, TopicExistsException;
+
+ /**
+ * method provides details of all the consumer associated with a specific
+ * topic
+ *
+ * @param dmaapContext
+ * @param topicName
+ * @throws IOException
+ * @throws ConfigDbException
+ * @throws TopicExistsException
+ */
+ void getConsumersByTopicName(DMaaPContext dmaapContext, String topicName)
+ throws IOException, ConfigDbException, TopicExistsException;
+
+ /**
+ * method provides publishing right to a specific topic
+ *
+ * @param dmaapContext
+ * @param topicName
+ * @param producerId
+ * @throws AccessDeniedException
+ * @throws ConfigDbException
+ * @throws IOException
+ * @throws TopicExistsException
+ */
+ void permitPublisherForTopic(DMaaPContext dmaapContext, String topicName, String producerId)
+ throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,CambriaApiException;
+
+ /**
+ * method denies any specific publisher from a topic
+ *
+ * @param dmaapContext
+ * @param topicName
+ * @param producerId
+ * @throws AccessDeniedException
+ * @throws ConfigDbException
+ * @throws IOException
+ * @throws TopicExistsException
+ */
+ void denyPublisherForTopic(DMaaPContext dmaapContext, String topicName, String producerId)
+ throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,CambriaApiException;
+
+ /**
+ * method provide consuming right to a specific user on a topic
+ *
+ * @param dmaapContext
+ * @param topicName
+ * @param consumerId
+ * @throws AccessDeniedException
+ * @throws ConfigDbException
+ * @throws IOException
+ * @throws TopicExistsException
+ */
+ void permitConsumerForTopic(DMaaPContext dmaapContext, String topicName, String consumerId)
+ throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,CambriaApiException;
+
+ /**
+ * method denies a particular user's consuming right on a topic
+ *
+ * @param dmaapContext
+ * @param topicName
+ * @param consumerId
+ * @throws AccessDeniedException
+ * @throws ConfigDbException
+ * @throws IOException
+ * @throws TopicExistsException
+ */
+ void denyConsumerForTopic(DMaaPContext dmaapContext, String topicName, String consumerId)
+ throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,CambriaApiException;
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/TransactionService.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/TransactionService.java
new file mode 100644
index 0000000..b2c8182
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/TransactionService.java
@@ -0,0 +1,62 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service;
+
+import java.io.IOException;
+
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.transaction.TransactionObj;
+
+import com.att.aft.dme2.internal.jettison.json.JSONException;
+import com.att.nsa.configs.ConfigDbException;
+
+/**
+ *
+ * @author author
+ *
+ */
+public interface TransactionService {
+ /**
+ *
+ * @param trnObj
+ */
+ void checkTransaction(TransactionObj trnObj);
+
+ /**
+ *
+ * @param dmaapContext
+ * @throws ConfigDbException
+ * @throws IOException
+ */
+ void getAllTransactionObjs(DMaaPContext dmaapContext) throws ConfigDbException, IOException;
+
+ /**
+ *
+ * @param dmaapContext
+ * @param transactionId
+ * @throws ConfigDbException
+ * @throws JSONException
+ * @throws IOException
+ */
+ void getTransactionObj(DMaaPContext dmaapContext, String transactionId)
+ throws ConfigDbException, JSONException, IOException;
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/UIService.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/UIService.java
new file mode 100644
index 0000000..daa3825
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/UIService.java
@@ -0,0 +1,92 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+/**
+ *
+ */
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service;
+
+import java.io.IOException;
+
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
+
+import com.att.nsa.configs.ConfigDbException;
+
+import kafka.common.TopicExistsException;
+
+/**
+ * @author author
+ *
+ */
+public interface UIService {
+ /**
+ * Returning template of hello page.
+ *
+ * @param dmaapContext
+ * @throws IOException
+ */
+ void hello(DMaaPContext dmaapContext) throws IOException;
+
+ /**
+ * Fetching list of all api keys and returning in a templated form for
+ * display
+ *
+ * @param dmaapContext
+ * @throws ConfigDbException
+ * @throws IOException
+ */
+ void getApiKeysTable(DMaaPContext dmaapContext) throws ConfigDbException,
+ IOException;
+
+ /**
+ * Fetching detials of apikey in a templated form for display
+ *
+ * @param dmaapContext
+ * @param apiKey
+ * @throws Exception
+ */
+ void getApiKey(DMaaPContext dmaapContext, final String apiKey)
+ throws Exception;
+
+ /**
+ * Fetching list of all the topics and returning in a templated form for
+ * display
+ *
+ * @param dmaapContext
+ * @throws ConfigDbException
+ * @throws IOException
+ */
+ void getTopicsTable(DMaaPContext dmaapContext) throws ConfigDbException,
+ IOException;
+
+ /**
+ * Fetching detials of topic in a templated form for display
+ *
+ * @param dmaapContext
+ * @param topic
+ * @throws ConfigDbException
+ * @throws IOException
+ * @throws TopicExistsException
+ */
+ void getTopic(DMaaPContext dmaapContext, final String topic)
+ throws ConfigDbException, IOException, TopicExistsException;
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/AdminServiceImpl.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/AdminServiceImpl.java
new file mode 100644
index 0000000..96a63a7
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/AdminServiceImpl.java
@@ -0,0 +1,188 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service.impl;
+
+import java.io.IOException;
+import java.util.Collection;
+import java.util.Set;
+
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Consumer;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.ConsumerFactory;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security.DMaaPAuthenticatorImpl;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service.AdminService;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.DMaaPResponseBuilder;
+import org.springframework.stereotype.Component;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.limits.Blacklist;
+import com.att.nsa.security.NsaApiKey;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+
+/**
+ * @author author
+ *
+ */
+@Component
+public class AdminServiceImpl implements AdminService {
+
+ //private Logger log = Logger.getLogger(AdminServiceImpl.class.toString());
+ private static final EELFLogger log = EELFManager.getInstance().getLogger(AdminServiceImpl.class);
+ /**
+ * getConsumerCache returns consumer cache
+ * @param dMaaPContext context
+ * @throws IOException ex
+ * @throws AccessDeniedException
+ */
+ @Override
+ public void showConsumerCache(DMaaPContext dMaaPContext) throws IOException, AccessDeniedException {
+ adminAuthenticate(dMaaPContext);
+
+ JSONObject consumers = new JSONObject();
+ JSONArray jsonConsumersList = new JSONArray();
+
+ for (Consumer consumer : getConsumerFactory(dMaaPContext).getConsumers()) {
+ JSONObject consumerObject = new JSONObject();
+ consumerObject.put("name", consumer.getName());
+ consumerObject.put("created", consumer.getCreateTimeMs());
+ consumerObject.put("accessed", consumer.getLastAccessMs());
+ jsonConsumersList.put(consumerObject);
+ }
+
+ consumers.put("consumers", jsonConsumersList);
+ log.info("========== AdminServiceImpl: getConsumerCache: " + jsonConsumersList.toString() + "===========");
+ DMaaPResponseBuilder.respondOk(dMaaPContext, consumers);
+ }
+
+ /**
+ *
+ * dropConsumerCache() method clears consumer cache
+ * @param dMaaPContext context
+ * @throws JSONException ex
+ * @throws IOException ex
+ * @throws AccessDeniedException
+ *
+ */
+ @Override
+ public void dropConsumerCache(DMaaPContext dMaaPContext) throws JSONException, IOException, AccessDeniedException {
+ adminAuthenticate(dMaaPContext);
+ getConsumerFactory(dMaaPContext).dropCache();
+ DMaaPResponseBuilder.respondOkWithHtml(dMaaPContext, "Consumer cache cleared successfully");
+ // log.info("========== AdminServiceImpl: dropConsumerCache: Consumer
+ // Cache successfully dropped.===========");
+ }
+
+ /**
+ * getfConsumerFactory returns CosnumerFactory details
+ * @param dMaaPContext contxt
+ * @return ConsumerFactory obj
+ *
+ */
+ private ConsumerFactory getConsumerFactory(DMaaPContext dMaaPContext) {
+ return dMaaPContext.getConfigReader().getfConsumerFactory();
+ }
+
+ /**
+ * return ipblacklist
+ * @param dMaaPContext context
+ * @return blacklist obj
+ */
+ private static Blacklist getIpBlacklist(DMaaPContext dMaaPContext) {
+ return dMaaPContext.getConfigReader().getfIpBlackList();
+ }
+
+
+ /**
+ * Get list of blacklisted ips
+ */
+ @Override
+ public void getBlacklist ( DMaaPContext dMaaPContext ) throws IOException, AccessDeniedException
+ {
+ adminAuthenticate ( dMaaPContext );
+
+ DMaaPResponseBuilder.respondOk ( dMaaPContext,
+ new JSONObject().put ( "blacklist", setToJsonArray ( getIpBlacklist (dMaaPContext).asSet() ) ) );
+ }
+
+ /**
+ * Add ip to blacklist
+ */
+ @Override
+ public void addToBlacklist ( DMaaPContext dMaaPContext, String ip ) throws IOException, ConfigDbException, AccessDeniedException
+ {
+ adminAuthenticate ( dMaaPContext );
+
+ getIpBlacklist (dMaaPContext).add ( ip );
+ DMaaPResponseBuilder.respondOkNoContent ( dMaaPContext );
+ }
+
+ /**
+ * Remove ip from blacklist
+ */
+ @Override
+ public void removeFromBlacklist ( DMaaPContext dMaaPContext, String ip ) throws IOException, ConfigDbException, AccessDeniedException
+ {
+ adminAuthenticate ( dMaaPContext );
+
+ getIpBlacklist (dMaaPContext).remove ( ip );
+ DMaaPResponseBuilder.respondOkNoContent ( dMaaPContext );
+ }
+
+ /**
+ * Authenticate if user is admin
+ * @param dMaaPContext context
+ * @throws AccessDeniedException ex
+ */
+ private static void adminAuthenticate ( DMaaPContext dMaaPContext ) throws AccessDeniedException
+ {
+
+ final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dMaaPContext);
+ if ( user == null || !user.getKey ().equals ( "admin" ) )
+ {
+ throw new AccessDeniedException ();
+ }
+ }
+
+ public static JSONArray setToJsonArray ( Set<?> fields )
+ {
+ return collectionToJsonArray ( fields );
+ }
+
+ public static JSONArray collectionToJsonArray ( Collection<?> fields )
+ {
+ final JSONArray a = new JSONArray ();
+ if ( fields != null )
+ {
+ for ( Object o : fields )
+ {
+ a.put ( o );
+ }
+ }
+ return a;
+ }
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/ApiKeysServiceImpl.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/ApiKeysServiceImpl.java
new file mode 100644
index 0000000..5086f23
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/ApiKeysServiceImpl.java
@@ -0,0 +1,325 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service.impl;
+
+import java.io.IOException;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.json.JSONArray;
+import org.json.JSONObject;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.ApiKeyBean;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants.CambriaConstants;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security.DMaaPAuthenticatorImpl;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service.ApiKeysService;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.ConfigurationReader;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.DMaaPResponseBuilder;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.Emailer;
+import org.springframework.stereotype.Service;
+
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.drumlin.service.standards.HttpStatusCodes;
+import com.att.nsa.security.NsaApiKey;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import com.att.nsa.security.db.NsaApiDb;
+import com.att.nsa.security.db.NsaApiDb.KeyExistsException;
+import com.att.nsa.security.db.simple.NsaSimpleApiKey;
+
+/**
+ * Implementation of the ApiKeysService, this will provide the below operations,
+ * getAllApiKeys, getApiKey, createApiKey, updateApiKey, deleteApiKey
+ *
+ * @author author
+ */
+@Service
+public class ApiKeysServiceImpl implements ApiKeysService {
+
+ //private Logger log = Logger.getLogger(ApiKeysServiceImpl.class.toString());
+ private static final EELFLogger log = EELFManager.getInstance().getLogger(ApiKeysServiceImpl.class.toString());
+ /**
+ * This method will provide all the ApiKeys present in kafka server.
+ *
+ * @param dmaapContext
+ * @throws ConfigDbException
+ * @throws IOException
+ */
+ public void getAllApiKeys(DMaaPContext dmaapContext)
+ throws ConfigDbException, IOException {
+
+ ConfigurationReader configReader = dmaapContext.getConfigReader();
+
+ log.info("configReader : " + configReader.toString());
+
+ final JSONObject result = new JSONObject();
+ final JSONArray keys = new JSONArray();
+ result.put("apiKeys", keys);
+
+ NsaApiDb<NsaSimpleApiKey> apiDb = configReader.getfApiKeyDb();
+
+ for (String key : apiDb.loadAllKeys()) {
+ keys.put(key);
+ }
+ log.info("========== ApiKeysServiceImpl: getAllApiKeys: Api Keys are : "
+ + keys.toString() + "===========");
+ DMaaPResponseBuilder.respondOk(dmaapContext, result);
+ }
+
+ /**
+ * @param dmaapContext
+ * @param apikey
+ * @throws ConfigDbException
+ * @throws IOException
+ */
+ @Override
+ public void getApiKey(DMaaPContext dmaapContext, String apikey)
+ throws ConfigDbException, IOException {
+
+ String errorMsg = "Api key name is not mentioned.";
+ int errorCode = HttpStatusCodes.k400_badRequest;
+
+ if (null != apikey) {
+ NsaSimpleApiKey simpleApiKey = getApiKeyDb(dmaapContext)
+ .loadApiKey(apikey);
+
+
+ if (null != simpleApiKey) {
+ JSONObject result = simpleApiKey.asJsonObject();
+ DMaaPResponseBuilder.respondOk(dmaapContext, result);
+ log.info("========== ApiKeysServiceImpl: getApiKey : "
+ + result.toString() + "===========");
+ return;
+ } else {
+ errorMsg = "Api key [" + apikey + "] does not exist.";
+ errorCode = HttpStatusCodes.k404_notFound;
+ log.info("========== ApiKeysServiceImpl: getApiKey: Error : API Key does not exist. "
+ + "===========");
+ DMaaPResponseBuilder.respondWithError(dmaapContext, errorCode,
+ errorMsg);
+ throw new IOException();
+ }
+ }
+
+ }
+
+ /**
+ * @param dmaapContext
+ * @param nsaApiKey
+ * @throws KeyExistsException
+ * @throws ConfigDbException
+ * @throws IOException
+ */
+ @Override
+ public void createApiKey(DMaaPContext dmaapContext, ApiKeyBean nsaApiKey)
+ throws KeyExistsException, ConfigDbException, IOException {
+
+ log.debug("TopicService: : createApiKey....");
+
+
+ String contactEmail = nsaApiKey.getEmail();
+ final boolean emailProvided = contactEmail != null && contactEmail.length() > 0 && contactEmail.indexOf("@") > 1 ;
+ String kSetting_AllowAnonymousKeys= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"apiKeys.allowAnonymous");
+ if(null==kSetting_AllowAnonymousKeys) kSetting_AllowAnonymousKeys ="false";
+
+ // if ((contactEmail == null) || (contactEmail.length() == 0))
+ if ( kSetting_AllowAnonymousKeys.equalsIgnoreCase("true") && !emailProvided )
+ {
+ DMaaPResponseBuilder.respondWithErrorInJson(dmaapContext, 400, "You must provide an email address.");
+ return;
+ }
+
+
+
+
+ final NsaApiDb<NsaSimpleApiKey> apiKeyDb = getApiKeyDb(dmaapContext);
+ String apiKey = nsaApiKey.getKey();
+ String sharedSecret = nsaApiKey.getSharedSecret();
+ final NsaSimpleApiKey key = apiKeyDb.createApiKey(apiKey,
+ sharedSecret);
+
+ if (null != key) {
+
+ if (null != nsaApiKey.getEmail()) {
+ key.setContactEmail(nsaApiKey.getEmail());
+ }
+
+ if (null != nsaApiKey.getDescription()) {
+ key.setDescription(nsaApiKey.getDescription());
+ }
+
+ log.debug("=======ApiKeysServiceImpl: createApiKey : saving api key : "
+ + key.toString() + "=====");
+ apiKeyDb.saveApiKey(key);
+ // email out the secret to validate the email address
+ if ( emailProvided )
+ {
+ String body = "\n" + "Your email address was provided as the creator of new API key \""
+ + apiKey + "\".\n" + "\n" + "If you did not make this request, please let us know."
+ + " See http://sa2020.it.att.com:8888 for contact information, " + "but don't worry -"
+ + " the API key is useless without the information below, which has been provided "
+ + "only to you.\n" + "\n\n" + "For API key \"" + apiKey + "\", use API key secret:\n\n\t"
+ + sharedSecret + "\n\n" + "Note that it's normal to share the API key"
+ + " (" + apiKey + "). "
+ + "This is how you are granted access to resources " + "like a UEB topic or Flatiron scope. "
+ + "However, you should NOT share the API key's secret. " + "The API key is associated with your"
+ + " email alone. ALL access to data made with this " + "key will be your responsibility. If you "
+ + "share the secret, someone else can use the API key " + "to access proprietary data with your "
+ + "identity.\n" + "\n" + "Enjoy!\n" + "\n" + "The GFP/SA-2020 Team";
+
+ Emailer em = dmaapContext.getConfigReader().getSystemEmailer();
+ em.send(contactEmail, "New API Key", body);
+ }
+ log.debug("TopicService: : sending response.");
+
+ JSONObject o = key.asJsonObject();
+
+ o.put ( NsaSimpleApiKey.kApiSecretField,
+ emailProvided ?
+ "Emailed to " + contactEmail + "." :
+ key.getSecret ()
+ );
+ DMaaPResponseBuilder.respondOk(dmaapContext,
+ o);
+ /*o.put("secret", "Emailed to " + contactEmail + ".");
+ DMaaPResponseBuilder.respondOk(dmaapContext,
+ o); */
+ return;
+ } else {
+ log.debug("=======ApiKeysServiceImpl: createApiKey : Error in creating API Key.=====");
+ DMaaPResponseBuilder.respondWithError(dmaapContext,
+ HttpStatusCodes.k500_internalServerError,
+ "Failed to create api key.");
+ throw new KeyExistsException(apiKey);
+ }
+ }
+
+ /**
+ * @param dmaapContext
+ * @param apikey
+ * @param nsaApiKey
+ * @throws ConfigDbException
+ * @throws IOException
+ * @throws AccessDeniedException
+ */
+ @Override
+ public void updateApiKey(DMaaPContext dmaapContext, String apikey,
+ ApiKeyBean nsaApiKey) throws ConfigDbException, IOException, AccessDeniedException {
+
+ String errorMsg = "Api key name is not mentioned.";
+ int errorCode = HttpStatusCodes.k400_badRequest;
+
+ if (null != apikey) {
+ final NsaApiDb<NsaSimpleApiKey> apiKeyDb = getApiKeyDb(dmaapContext);
+ final NsaSimpleApiKey key = apiKeyDb.loadApiKey(apikey);
+ boolean shouldUpdate = false;
+
+ if (null != key) {
+ final NsaApiKey user = DMaaPAuthenticatorImpl
+ .getAuthenticatedUser(dmaapContext);
+
+ if (user == null || !user.getKey().equals(key.getKey())) {
+ throw new AccessDeniedException("You must authenticate with the key you'd like to update.");
+ }
+
+ if (null != nsaApiKey.getEmail()) {
+ key.setContactEmail(nsaApiKey.getEmail());
+ shouldUpdate = true;
+ }
+
+ if (null != nsaApiKey.getDescription()) {
+ key.setDescription(nsaApiKey.getDescription());
+ shouldUpdate = true;
+ }
+
+ if (shouldUpdate) {
+ apiKeyDb.saveApiKey(key);
+ }
+
+ log.info("======ApiKeysServiceImpl : updateApiKey : Key Updated Successfully :"
+ + key.toString() + "=========");
+ DMaaPResponseBuilder.respondOk(dmaapContext,
+ key.asJsonObject());
+ return;
+ }
+ } else {
+ errorMsg = "Api key [" + apikey + "] does not exist.";
+ errorCode = HttpStatusCodes.k404_notFound;
+ DMaaPResponseBuilder.respondWithError(dmaapContext, errorCode,
+ errorMsg);
+ log.info("======ApiKeysServiceImpl : updateApiKey : Error in Updating Key.============");
+ throw new IOException();
+ }
+ }
+
+ /**
+ * @param dmaapContext
+ * @param apikey
+ * @throws ConfigDbException
+ * @throws IOException
+ * @throws AccessDeniedException
+ */
+ @Override
+ public void deleteApiKey(DMaaPContext dmaapContext, String apikey)
+ throws ConfigDbException, IOException, AccessDeniedException {
+
+ String errorMsg = "Api key name is not mentioned.";
+ int errorCode = HttpStatusCodes.k400_badRequest;
+
+ if (null != apikey) {
+ final NsaApiDb<NsaSimpleApiKey> apiKeyDb = getApiKeyDb(dmaapContext);
+ final NsaSimpleApiKey key = apiKeyDb.loadApiKey(apikey);
+
+ if (null != key) {
+
+ final NsaApiKey user = DMaaPAuthenticatorImpl
+ .getAuthenticatedUser(dmaapContext);
+ if (user == null || !user.getKey().equals(key.getKey())) {
+ throw new AccessDeniedException("You don't own the API key.");
+ }
+
+ apiKeyDb.deleteApiKey(key);
+ log.info("======ApiKeysServiceImpl : deleteApiKey : Deleted Key successfully.============");
+ DMaaPResponseBuilder.respondOkWithHtml(dmaapContext,
+ "Api key [" + apikey + "] deleted successfully.");
+ return;
+ }
+ } else {
+ errorMsg = "Api key [" + apikey + "] does not exist.";
+ errorCode = HttpStatusCodes.k404_notFound;
+ DMaaPResponseBuilder.respondWithError(dmaapContext, errorCode,
+ errorMsg);
+ log.info("======ApiKeysServiceImpl : deleteApiKey : Error while deleting key.============");
+ throw new IOException();
+ }
+ }
+
+ /**
+ *
+ * @param dmaapContext
+ * @return
+ */
+ private NsaApiDb<NsaSimpleApiKey> getApiKeyDb(DMaaPContext dmaapContext) {
+ ConfigurationReader configReader = dmaapContext.getConfigReader();
+ return configReader.getfApiKeyDb();
+ }
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/BaseTransactionDbImpl.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/BaseTransactionDbImpl.java
new file mode 100644
index 0000000..be779fa
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/BaseTransactionDbImpl.java
@@ -0,0 +1,154 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service.impl;
+
+import java.util.Set;
+import java.util.TreeSet;
+
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.transaction.DMaaPTransactionFactory;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.transaction.DMaaPTransactionObj;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.transaction.DMaaPTransactionObjDB;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.transaction.TransactionObj;
+
+import com.att.nsa.configs.ConfigDb;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.configs.ConfigPath;
+
+/**
+ * Persistent storage for Transaction objects built over an abstract config db.
+ *
+ * @author author
+ *
+ * @param <K>
+ */
+public class BaseTransactionDbImpl<K extends DMaaPTransactionObj> implements DMaaPTransactionObjDB<K> {
+
+ private final ConfigDb fDb;
+ private final ConfigPath fBasePath;
+ private final DMaaPTransactionFactory<K> fKeyFactory;
+
+ private static final String kStdRootPath = "/transaction";
+
+ private ConfigPath makePath(String transactionId) {
+ return fBasePath.getChild(transactionId);
+ }
+
+ /**
+ * Construct an Transaction db over the given config db at the standard
+ * location
+ *
+ * @param db
+ * @param keyFactory
+ * @throws ConfigDbException
+ */
+ public BaseTransactionDbImpl(ConfigDb db, DMaaPTransactionFactory<K> keyFactory) throws ConfigDbException {
+ this(db, kStdRootPath, keyFactory);
+ }
+
+ /**
+ * Construct an Transaction db over the given config db using the given root
+ * location
+ *
+ * @param db
+ * @param rootPath
+ * @param keyFactory
+ * @throws ConfigDbException
+ */
+ public BaseTransactionDbImpl(ConfigDb db, String rootPath, DMaaPTransactionFactory<K> keyFactory)
+ throws ConfigDbException {
+ fDb = db;
+ fBasePath = db.parse(rootPath);
+ fKeyFactory = keyFactory;
+
+ if (!db.exists(fBasePath)) {
+ db.store(fBasePath, "");
+ }
+ }
+
+ /**
+ * Create a new Transaction Obj. If one exists,
+ *
+ * @param id
+ * @return the new Transaction record
+ * @throws ConfigDbException
+ */
+ public synchronized K createTransactionObj(String id) throws KeyExistsException, ConfigDbException {
+ final ConfigPath path = makePath(id);
+ if (fDb.exists(path)) {
+ throw new KeyExistsException(id);
+ }
+
+ // make one, store it, return it
+ final K newKey = fKeyFactory.makeNewTransactionId(id);
+ fDb.store(path, newKey.serialize());
+ return newKey;
+ }
+
+ /**
+ * Save an Transaction record. This must be used after changing auxiliary
+ * data on the record. Note that the transaction object must exist (via
+ * createTransactionObj).
+ *
+ * @param transaction
+ * object
+ * @throws ConfigDbException
+ */
+ @Override
+ public synchronized void saveTransactionObj(K trnObj) throws ConfigDbException {
+ final ConfigPath path = makePath(trnObj.getId());
+ if (!fDb.exists(path) || !(trnObj instanceof TransactionObj)) {
+ throw new IllegalStateException(trnObj.getId() + " is not known to this database");
+ }
+ fDb.store(path, ((TransactionObj) trnObj).serialize());
+ }
+
+ /**
+ * Load an Transaction record based on the Transaction Id value
+ *
+ * @param transactionId
+ * @return an Transaction Object record or null
+ * @throws ConfigDbException
+ */
+ @Override
+ public synchronized K loadTransactionObj(String transactionId) throws ConfigDbException {
+ final String data = fDb.load(makePath(transactionId));
+ if (data != null) {
+ return fKeyFactory.makeNewTransactionObj(data);
+ }
+ return null;
+ }
+
+ /**
+ * Load all transactions known to this database. (This could be expensive.)
+ *
+ * @return a set of all Transaction objects
+ * @throws ConfigDbException
+ */
+ public synchronized Set<String> loadAllTransactionObjs() throws ConfigDbException {
+ final TreeSet<String> result = new TreeSet<String>();
+ for (ConfigPath cp : fDb.loadChildrenNames(fBasePath)) {
+ result.add(cp.getName());
+ }
+ return result;
+ }
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/EventsServiceImpl.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/EventsServiceImpl.java
new file mode 100644
index 0000000..dce5a19
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/EventsServiceImpl.java
@@ -0,0 +1,788 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service.impl;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.LinkedList;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.core.MediaType;
+
+import org.apache.http.HttpStatus;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.json.JSONObject;
+import org.json.JSONTokener;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Consumer;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.ConsumerFactory;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.MetricsSet;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Publisher;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.ConsumerFactory.UnavailableException;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Publisher.message;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPCambriaLimiter;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.LogDetails;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants.CambriaConstants;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception.DMaaPAccessDeniedException;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception.DMaaPErrorMessages;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception.DMaaPResponseCode;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception.ErrorResponse;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metabroker.Topic;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metabroker.Broker.TopicExistsException;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.resources.CambriaEventSet;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.resources.CambriaOutboundEventStream;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security.DMaaPAAFAuthenticator;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security.DMaaPAAFAuthenticatorImpl;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security.DMaaPAuthenticatorImpl;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service.EventsService;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.DMaaPResponseBuilder;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.Utils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Service;
+
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.drumlin.service.standards.MimeTypes;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
+import com.att.nsa.security.NsaApiKey;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import com.att.nsa.util.rrConvertor;
+
+import kafka.producer.KeyedMessage;
+
+/**
+ * This class provides the functinality to publish and subscribe message to
+ * kafka
+ *
+ * @author author
+ *
+ */
+@Service
+public class EventsServiceImpl implements EventsService {
+ //private static final Logger LOG = Logger.getLogger(EventsServiceImpl.class);
+ private static final EELFLogger LOG = EELFManager.getInstance().getLogger(EventsServiceImpl.class);
+
+ private static final String BATCH_LENGTH = "event.batch.length";
+ private static final String TRANSFER_ENCODING = "Transfer-Encoding";
+ @Autowired
+ private DMaaPErrorMessages errorMessages;
+
+ //@Value("${metrics.send.cambria.topic}")
+ //private String metricsTopic;
+
+ /**
+ * @param ctx
+ * @param topic
+ * @param consumerGroup
+ * @param clientId
+ * @throws ConfigDbException,
+ * TopicExistsException, AccessDeniedException,
+ * UnavailableException, CambriaApiException, IOException
+ *
+ *
+ */
+ @Override
+ public void getEvents(DMaaPContext ctx, String topic, String consumerGroup, String clientId)
+ throws ConfigDbException, TopicExistsException, AccessDeniedException, UnavailableException,
+ CambriaApiException, IOException,DMaaPAccessDeniedException {
+ final long startTime = System.currentTimeMillis();
+ final HttpServletRequest req = ctx.getRequest();
+
+ boolean isAAFTopic=false;
+ // was this host blacklisted?
+ final String remoteAddr = Utils.getRemoteAddress(ctx);;
+ if ( ctx.getConfigReader().getfIpBlackList().contains ( remoteAddr ) )
+ {
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+ DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), "Source address [" + remoteAddr +
+ "] is blacklisted. Please contact the cluster management team."
+ ,null,Utils.getFormattedDate(new Date()),topic,
+ Utils.getUserApiKey(ctx.getRequest()),ctx.getRequest().getRemoteHost(),
+ null,null);
+ LOG.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+ }
+
+
+ int limit = CambriaConstants.kNoLimit;
+ if (req.getParameter("limit") != null) {
+ limit = Integer.parseInt(req.getParameter("limit"));
+ }
+
+ int timeoutMs= CambriaConstants.kNoTimeout;
+ String strtimeoutMS = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"timeout");
+ if(strtimeoutMS!=null)timeoutMs=Integer.parseInt(strtimeoutMS);
+ //int timeoutMs = ctx.getConfigReader().getSettings().getInt("timeout", CambriaConstants.kNoTimeout);
+ if (req.getParameter("timeout") != null) {
+ timeoutMs = Integer.parseInt(req.getParameter("timeout"));
+ }
+
+ // By default no filter is applied if filter is not passed as a
+ // parameter in the request URI
+ String topicFilter = CambriaConstants.kNoFilter;
+ if (null != req.getParameter("filter")) {
+ topicFilter = req.getParameter("filter");
+ }
+ // pretty to print the messaages in new line
+ String prettyval="0";
+ String strPretty=AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"pretty");
+ if (null!=strPretty)prettyval=strPretty;
+
+ String metaval="0";
+ String strmeta=AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"meta");
+ if (null!=strmeta)metaval=strmeta;
+
+ final boolean pretty = rrConvertor
+ .convertToBooleanBroad(prettyval);
+ // withMeta to print offset along with message
+ final boolean withMeta = rrConvertor
+ .convertToBooleanBroad(metaval);
+
+
+ /*final boolean pretty = rrConvertor
+ .convertToBooleanBroad(ctx.getConfigReader().getSettings().getString("pretty", "0"));
+ // withMeta to print offset along with message
+ final boolean withMeta = rrConvertor
+ .convertToBooleanBroad(ctx.getConfigReader().getSettings().getString("meta", "0"));
+*/
+ final LogWrap logger = new LogWrap ( topic, consumerGroup, clientId);
+ logger.info("fetch: timeout=" + timeoutMs + ", limit=" + limit + ", filter=" + topicFilter);
+
+ // is this user allowed to read this topic?
+ final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx);
+ final Topic metatopic = ctx.getConfigReader().getfMetaBroker().getTopic(topic);
+
+ if (metatopic == null) {
+ // no such topic.
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND,
+ DMaaPResponseCode.RESOURCE_NOT_FOUND.getResponseCode(),
+ errorMessages.getTopicNotExist()+"-[" + topic + "]",null,Utils.getFormattedDate(new Date()),topic,null,null,
+ clientId,ctx.getRequest().getRemoteHost());
+ LOG.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+ }
+ String metricTopicname= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"metrics.send.cambria.topic");
+ if (null==metricTopicname)
+ metricTopicname="msgrtr.apinode.metrics.dmaap";
+
+ if(null==ctx.getRequest().getHeader("Authorization")&& !topic.equalsIgnoreCase(metricTopicname))
+ {
+ if (null != metatopic.getOwner() && !("".equals(metatopic.getOwner()))){
+ // check permissions
+ metatopic.checkUserRead(user);
+ }
+ }
+ // if headers are not provided then user will be null
+ if(user == null && null!=ctx.getRequest().getHeader("Authorization"))
+ {
+ // the topic name will be sent by the client
+// String permission = "com.att.dmaap.mr.topic"+"|"+topic+"|"+"sub";
+ DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
+ String permission = aaf.aafPermissionString(topic, "sub");
+ if(!aaf.aafAuthentication(ctx.getRequest(), permission))
+ {
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,
+ DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+ errorMessages.getNotPermitted1()+" read "+errorMessages.getNotPermitted2()+topic,null,Utils.getFormattedDate(new Date()),topic,null,null,
+ clientId,ctx.getRequest().getRemoteHost());
+ LOG.info(errRes.toString());
+ throw new DMaaPAccessDeniedException(errRes);
+
+ }
+ isAAFTopic = true;
+ }
+ Consumer c = null;
+ try {
+ final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
+
+ final DMaaPCambriaLimiter rl = ctx.getConfigReader().getfRateLimiter();
+ rl.onCall(topic, consumerGroup, clientId);
+
+ c = ctx.getConfigReader().getfConsumerFactory().getConsumerFor(topic, consumerGroup, clientId, timeoutMs);
+
+ /* final CambriaOutboundEventStream coes = new CambriaOutboundEventStream.Builder(c,
+ ctx.getConfigReader().getSettings()).timeout(timeoutMs).limit(limit).filter(topicFilter)
+ .pretty(pretty).withMeta(withMeta)
+ // .atOffset(topicOffset)
+ .build();*/
+ final CambriaOutboundEventStream coes = new CambriaOutboundEventStream.Builder(c).timeout(timeoutMs).limit(limit).filter(topicFilter)
+ .pretty(pretty).withMeta(withMeta).build();
+ coes.setDmaapContext(ctx);
+ coes.setTopic(metatopic);
+ if( isTransEnabled() || isAAFTopic ){
+ coes.setTransEnabled(true);
+ }else{
+ coes.setTransEnabled(false);
+ }
+ coes.setTopicStyle(isAAFTopic);
+
+ DMaaPResponseBuilder.setNoCacheHeadings(ctx);
+
+ DMaaPResponseBuilder.respondOkWithStream(ctx, MediaType.APPLICATION_JSON, coes);
+
+ // No IOException thrown during respondOkWithStream, so commit the
+ // new offsets to all the brokers
+ c.commitOffsets();
+ final int sent = coes.getSentCount();
+
+ metricsSet.consumeTick(sent);
+ rl.onSend(topic, consumerGroup, clientId, sent);
+
+ final long elapsedMs = System.currentTimeMillis() - startTime;
+ logger.info("Sent " + sent + " msgs in " + elapsedMs + " ms; committed to offset " + c.getOffset());
+
+ } catch (UnavailableException excp) {
+ logger.warn(excp.getMessage(), excp);
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE,
+ DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(),
+ errorMessages.getServerUnav()+ excp.getMessage(),null,Utils.getFormattedDate(new Date()),topic,null,null,
+ clientId,ctx.getRequest().getRemoteHost());
+ LOG.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+
+ } catch (CambriaApiException excp) {
+ logger.warn(excp.getMessage(), excp);
+ throw excp;
+ } catch (Exception excp) {
+ logger.warn("Couldn't respond to client, closing cambria consumer", excp);
+ ctx.getConfigReader().getfConsumerFactory().destroyConsumer(topic, consumerGroup, clientId);
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE,
+ DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(),
+ "Couldn't respond to client, closing cambria consumer"+ excp.getMessage(),null,Utils.getFormattedDate(new Date()),topic,null,null,
+ clientId,ctx.getRequest().getRemoteHost());
+ LOG.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+ } finally {
+ // If no cache, close the consumer now that we're done with it.
+ boolean kSetting_EnableCache = ConsumerFactory.kDefault_IsCacheEnabled;
+ String strkSetting_EnableCache=AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,ConsumerFactory.kSetting_EnableCache);
+ if(null!=strkSetting_EnableCache) kSetting_EnableCache=Boolean.parseBoolean(strkSetting_EnableCache);
+ //if (!ctx.getConfigReader().getSettings().getBoolean(ConsumerFactory.kSetting_EnableCache, ConsumerFactory.kDefault_IsCacheEnabled) && (c != null)) {
+ if (!kSetting_EnableCache && (c != null)) {
+ c.close();
+
+ }
+ }
+ }
+
+ /**
+ * @throws missingReqdSetting
+ *
+ */
+ @Override
+ public void pushEvents(DMaaPContext ctx, final String topic, InputStream msg, final String defaultPartition,
+ final String requestTime) throws ConfigDbException, AccessDeniedException, TopicExistsException,
+ CambriaApiException, IOException, missingReqdSetting,DMaaPAccessDeniedException {
+
+ // is this user allowed to write to this topic?
+ final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx);
+ final Topic metatopic = ctx.getConfigReader().getfMetaBroker().getTopic(topic);
+ boolean isAAFTopic=false;
+
+ // was this host blacklisted?
+ final String remoteAddr = Utils.getRemoteAddress(ctx);
+
+ if ( ctx.getConfigReader().getfIpBlackList().contains ( remoteAddr ) )
+ {
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+ DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), "Source address [" + remoteAddr +
+ "] is blacklisted. Please contact the cluster management team."
+ ,null,Utils.getFormattedDate(new Date()),topic,
+ Utils.getUserApiKey(ctx.getRequest()),ctx.getRequest().getRemoteHost(),
+ null,null);
+ LOG.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+ }
+
+ String topicNameStd = null;
+
+ // topicNameStd= ctx.getConfigReader().getSettings().getString("enforced.topic.name.AAF");
+ topicNameStd= com.att.ajsc.beans.PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop,"enforced.topic.name.AAF");
+ String metricTopicname= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"metrics.send.cambria.topic");
+ if (null==metricTopicname)
+ metricTopicname="msgrtr.apinode.metrics.dmaap";
+ boolean topicNameEnforced=false;
+ if (null != topicNameStd && topic.startsWith(topicNameStd) )
+ {
+ topicNameEnforced = true;
+ }
+
+ //Here check if the user has rights to publish on the topic
+ //( This will be called when no auth is added or when UEB API Key Authentication is used)
+ //checkUserWrite(user) method will throw an error when there is no Auth header added or when the
+ //user has no publish rights
+
+ if(null != metatopic && null != metatopic.getOwner() && !("".equals(metatopic.getOwner())) && null==ctx.getRequest().getHeader("Authorization") && !topic.equalsIgnoreCase(metricTopicname))
+ {
+ metatopic.checkUserWrite(user);
+ }
+
+
+
+ // if headers are not provided then user will be null
+ if(topicNameEnforced || (user == null && null!=ctx.getRequest().getHeader("Authorization") && !topic.equalsIgnoreCase(metricTopicname)))
+ {
+ // the topic name will be sent by the client
+ // String permission = "com.att.dmaap.mr.topic"+"|"+topic+"|"+"pub";
+ DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
+ String permission = aaf.aafPermissionString(topic, "pub");
+ if(!aaf.aafAuthentication(ctx.getRequest(), permission))
+ {
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,
+ DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+ errorMessages.getNotPermitted1()+" publish "+errorMessages.getNotPermitted2()+topic,null,Utils.getFormattedDate(new Date()),topic,
+ Utils.getUserApiKey(ctx.getRequest()),ctx.getRequest().getRemoteHost(),
+ null,null);
+ LOG.info(errRes.toString());
+ throw new DMaaPAccessDeniedException(errRes);
+ }
+ isAAFTopic=true;
+ }
+
+ final HttpServletRequest req = ctx.getRequest();
+
+ // check for chunked input
+ boolean chunked = false;
+ if (null != req.getHeader(TRANSFER_ENCODING)) {
+ chunked = req.getHeader(TRANSFER_ENCODING).contains("chunked");
+ }
+ // get the media type, or set it to a generic value if it wasn't
+ // provided
+ String mediaType = req.getContentType();
+ if (mediaType == null || mediaType.length() == 0) {
+ mediaType = MimeTypes.kAppGenericBinary;
+ }
+
+ if (mediaType.contains("charset=UTF-8")) {
+ mediaType = mediaType.replace("; charset=UTF-8", "").trim();
+ }
+
+ String istransidUEBtopicreqd = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"transidUEBtopicreqd");
+ boolean istransidreqd=false;
+ if (null != istransidUEBtopicreqd && istransidUEBtopicreqd.equalsIgnoreCase("true")){
+ istransidreqd = true;
+ }
+
+ if (isAAFTopic || istransidreqd ) {
+ pushEventsWithTransaction(ctx, msg, topic, defaultPartition, requestTime, chunked, mediaType);
+ }
+ else
+ {
+ pushEvents(ctx, topic, msg, defaultPartition, chunked, mediaType);
+ }
+
+
+ }
+
+ /**
+ *
+ * @param ctx
+ * @param topic
+ * @param msg
+ * @param defaultPartition
+ * @param chunked
+ * @param mediaType
+ * @throws ConfigDbException
+ * @throws AccessDeniedException
+ * @throws TopicExistsException
+ * @throws CambriaApiException
+ * @throws IOException
+ */
+ private void pushEvents(DMaaPContext ctx, String topic, InputStream msg, String defaultPartition,
+ boolean chunked, String mediaType) throws ConfigDbException, AccessDeniedException, TopicExistsException,
+ CambriaApiException, IOException {
+ final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
+
+ // setup the event set
+ final CambriaEventSet events = new CambriaEventSet(mediaType, msg, chunked, defaultPartition);
+
+ // start processing, building a batch to push to the backend
+ final long startMs = System.currentTimeMillis();
+ long count = 0;
+
+ long maxEventBatch=1024 * 16;
+ String batchlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,BATCH_LENGTH);
+ if(null!=batchlen)maxEventBatch=Long.parseLong(batchlen);
+
+ // long maxEventBatch = ctx.getConfigReader().getSettings().getLong(BATCH_LENGTH, 1024 * 16);
+ final LinkedList<Publisher.message> batch = new LinkedList<Publisher.message>();
+ final ArrayList<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>();
+
+ try {
+ // for each message...
+ Publisher.message m = null;
+ while ((m = events.next()) != null) {
+ // add the message to the batch
+ batch.add(m);
+ final KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, m.getKey(),
+ m.getMessage());
+ kms.add(data);
+ // check if the batch is full
+ final int sizeNow = batch.size();
+ if (sizeNow > maxEventBatch) {
+ ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
+ kms.clear();
+ batch.clear();
+ metricsSet.publishTick(sizeNow);
+ count += sizeNow;
+ }
+ }
+
+ // send the pending batch
+ final int sizeNow = batch.size();
+ if (sizeNow > 0) {
+ ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
+ kms.clear();
+ batch.clear();
+ metricsSet.publishTick(sizeNow);
+ count += sizeNow;
+ }
+
+ final long endMs = System.currentTimeMillis();
+ final long totalMs = endMs - startMs;
+
+ LOG.info("Published " + count + " msgs in " + totalMs + "ms for topic " + topic);
+
+ // build a responseP
+ final JSONObject response = new JSONObject();
+ response.put("count", count);
+ response.put("serverTimeMs", totalMs);
+ DMaaPResponseBuilder.respondOk(ctx, response);
+
+ } catch (Exception excp) {
+ int status = HttpStatus.SC_NOT_FOUND;
+ String errorMsg=null;
+ if(excp instanceof CambriaApiException) {
+ status = ((CambriaApiException) excp).getStatus();
+ JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
+ JSONObject errObject = new JSONObject(jsonTokener);
+ errorMsg = (String) errObject.get("message");
+
+ }
+ ErrorResponse errRes = new ErrorResponse(status,
+ DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
+ errorMessages.getPublishMsgError()+":"+topic+"."+errorMessages.getPublishMsgCount()+count+"."+errorMsg,null,Utils.getFormattedDate(new Date()),topic,
+ null,ctx.getRequest().getRemoteHost(),
+ null,null);
+ LOG.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+
+
+ }
+ }
+
+ /**
+ *
+ * @param ctx
+ * @param inputStream
+ * @param topic
+ * @param partitionKey
+ * @param requestTime
+ * @param chunked
+ * @param mediaType
+ * @throws ConfigDbException
+ * @throws AccessDeniedException
+ * @throws TopicExistsException
+ * @throws IOException
+ * @throws CambriaApiException
+ */
+ private void pushEventsWithTransaction(DMaaPContext ctx, InputStream inputStream, final String topic,
+ final String partitionKey, final String requestTime, final boolean chunked, final String mediaType)
+ throws ConfigDbException, AccessDeniedException, TopicExistsException, IOException,
+ CambriaApiException {
+
+ final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
+
+ // setup the event set
+ final CambriaEventSet events = new CambriaEventSet(mediaType, inputStream, chunked, partitionKey);
+
+ // start processing, building a batch to push to the backend
+ final long startMs = System.currentTimeMillis();
+ long count = 0;
+ long maxEventBatch = 1024 * 16;
+ String evenlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,BATCH_LENGTH);
+ if(null!=evenlen)maxEventBatch=Long.parseLong(evenlen);
+ //final long maxEventBatch = ctx.getConfigReader().getSettings().getLong(BATCH_LENGTH, 1024 * 16);
+ final LinkedList<Publisher.message> batch = new LinkedList<Publisher.message>();
+ final ArrayList<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>();
+
+ Publisher.message m = null;
+ int messageSequence = 1;
+ Long batchId = 1L;
+ final boolean transactionEnabled = true;
+ int publishBatchCount=0;
+ SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss.SS");
+
+ //LOG.warn("Batch Start Id: " + Utils.getFromattedBatchSequenceId(batchId));
+ try {
+ // for each message...
+ batchId=DMaaPContext.getBatchID();
+
+ String responseTransactionId = null;
+
+ while ((m = events.next()) != null) {
+
+ //LOG.warn("Batch Start Id: " + Utils.getFromattedBatchSequenceId(batchId));
+
+
+ addTransactionDetailsToMessage(m, topic, ctx.getRequest(), requestTime, messageSequence, batchId,
+ transactionEnabled);
+ messageSequence++;
+
+ // add the message to the batch
+ batch.add(m);
+
+ responseTransactionId = m.getLogDetails().getTransactionId();
+
+ JSONObject jsonObject = new JSONObject();
+ jsonObject.put("message", m.getMessage());
+ jsonObject.put("transactionId", responseTransactionId);
+ final KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, m.getKey(),
+ jsonObject.toString());
+ kms.add(data);
+
+ // check if the batch is full
+ final int sizeNow = batch.size();
+ if (sizeNow >= maxEventBatch) {
+ String startTime = sdf.format(new Date());
+ LOG.info("Batch Start Details:[serverIp="+ctx.getRequest().getLocalAddr()+",Batch Start Id=" + batchId+"]");
+ try {
+ ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
+ //transactionLogs(batch);
+ for (message msg : batch) {
+ LogDetails logDetails = msg.getLogDetails();
+ LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails());
+ }
+ } catch (Exception excp) {
+
+ int status = HttpStatus.SC_NOT_FOUND;
+ String errorMsg=null;
+ if(excp instanceof CambriaApiException) {
+ status = ((CambriaApiException) excp).getStatus();
+ JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
+ JSONObject errObject = new JSONObject(jsonTokener);
+ errorMsg = (String) errObject.get("message");
+ }
+ ErrorResponse errRes = new ErrorResponse(status,
+ DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
+ "Transaction-"+errorMessages.getPublishMsgError()+":"+topic+ "."+errorMessages.getPublishMsgCount()+count+"."+errorMsg,
+ null,Utils.getFormattedDate(new Date()),topic,
+ Utils.getUserApiKey(ctx.getRequest()),ctx.getRequest().getRemoteHost(),
+ null,null);
+ LOG.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+ }
+ kms.clear();
+ batch.clear();
+ metricsSet.publishTick(sizeNow);
+ publishBatchCount=sizeNow;
+ count += sizeNow;
+ //batchId++;
+ String endTime = sdf.format(new Date());
+ LOG.info("Batch End Details:[serverIp="+ctx.getRequest().getLocalAddr()+",Batch End Id=" + batchId
+ + ",Batch Total=" + publishBatchCount+",Batch Start Time="+startTime+",Batch End Time="+endTime+"]");
+ batchId=DMaaPContext.getBatchID();
+ }
+ }
+
+ // send the pending batch
+ final int sizeNow = batch.size();
+ if (sizeNow > 0) {
+ String startTime = sdf.format(new Date());
+ LOG.info("Batch Start Details:[serverIp="+ctx.getRequest().getLocalAddr()+",Batch Start Id=" + batchId+"]");
+ try {
+ ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
+ //transactionLogs(batch);
+ for (message msg : batch) {
+ LogDetails logDetails = msg.getLogDetails();
+ LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails());
+ }
+ } catch (Exception excp) {
+ int status = HttpStatus.SC_NOT_FOUND;
+ String errorMsg=null;
+ if(excp instanceof CambriaApiException) {
+ status = ((CambriaApiException) excp).getStatus();
+ JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
+ JSONObject errObject = new JSONObject(jsonTokener);
+ errorMsg = (String) errObject.get("message");
+ }
+
+ ErrorResponse errRes = new ErrorResponse(status,
+ DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
+ "Transaction-"+errorMessages.getPublishMsgError()+":"+topic+"."+ errorMessages.getPublishMsgCount()+count+"."+errorMsg,
+ null,Utils.getFormattedDate(new Date()),topic,
+ Utils.getUserApiKey(ctx.getRequest()),ctx.getRequest().getRemoteHost(),
+ null,null);
+ LOG.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+ }
+ kms.clear();
+ metricsSet.publishTick(sizeNow);
+ count += sizeNow;
+ //batchId++;
+ String endTime = sdf.format(new Date());
+ publishBatchCount=sizeNow;
+ LOG.info("Batch End Details:[serverIp="+ctx.getRequest().getLocalAddr()+",Batch End Id=" + batchId
+ + ",Batch Total=" + publishBatchCount+",Batch Start Time="+startTime+",Batch End Time="+endTime+"]");
+ }
+
+ final long endMs = System.currentTimeMillis();
+ final long totalMs = endMs - startMs;
+
+ LOG.info("Published " + count + " msgs in " + totalMs + "ms for topic " + topic);
+
+ if (null != responseTransactionId) {
+ ctx.getResponse().setHeader("transactionId", Utils.getResponseTransactionId(responseTransactionId));
+ }
+
+ // build a response
+ final JSONObject response = new JSONObject();
+ response.put("count", count);
+ response.put("serverTimeMs", totalMs);
+ DMaaPResponseBuilder.respondOk(ctx, response);
+
+ } catch (Exception excp) {
+ int status = HttpStatus.SC_NOT_FOUND;
+ String errorMsg=null;
+ if(excp instanceof CambriaApiException) {
+ status = ((CambriaApiException) excp).getStatus();
+ JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
+ JSONObject errObject = new JSONObject(jsonTokener);
+ errorMsg = (String) errObject.get("message");
+ }
+
+ ErrorResponse errRes = new ErrorResponse(
+ status,
+ DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
+ "Transaction-"+errorMessages.getPublishMsgError()+":"+topic+"."+errorMessages.getPublishMsgCount()+count+"."+errorMsg,null,Utils.getFormattedDate(new Date()),topic,
+ Utils.getUserApiKey(ctx.getRequest()),ctx.getRequest().getRemoteHost(),
+ null,null);
+ LOG.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+ }
+ }
+
+ /**
+ *
+ * @param msg
+ * @param topic
+ * @param request
+ * @param messageCreationTime
+ * @param messageSequence
+ * @param batchId
+ * @param transactionEnabled
+ */
+ private static void addTransactionDetailsToMessage(message msg, final String topic, HttpServletRequest request,
+ final String messageCreationTime, final int messageSequence, final Long batchId,
+ final boolean transactionEnabled) {
+ LogDetails logDetails = generateLogDetails(topic, request, messageCreationTime, messageSequence, batchId,
+ transactionEnabled);
+ logDetails.setMessageLengthInBytes(Utils.messageLengthInBytes(msg.getMessage()));
+ msg.setTransactionEnabled(transactionEnabled);
+ msg.setLogDetails(logDetails);
+ }
+
+
+
+ /**
+ *
+ * @author author
+ *
+ */
+ private static class LogWrap {
+ private final String fId;
+
+ /**
+ * constructor initialization
+ *
+ * @param topic
+ * @param cgroup
+ * @param cid
+ */
+ public LogWrap(String topic, String cgroup, String cid) {
+ fId = "[" + topic + "/" + cgroup + "/" + cid + "] ";
+ }
+
+ /**
+ *
+ * @param msg
+ */
+ public void info(String msg) {
+ LOG.info(fId + msg);
+ }
+
+ /**
+ *
+ * @param msg
+ * @param t
+ */
+ public void warn(String msg, Exception t) {
+ LOG.warn(fId + msg, t);
+ }
+
+ }
+
+ private boolean isTransEnabled() {
+ String istransidUEBtopicreqd = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"transidUEBtopicreqd");
+ boolean istransidreqd=false;
+ if ((null != istransidUEBtopicreqd && istransidUEBtopicreqd.equalsIgnoreCase("true")) ){
+ istransidreqd = true;
+ }
+
+ return istransidreqd;
+
+ }
+
+ private static LogDetails generateLogDetails(final String topicName, HttpServletRequest request,
+ final String messageTimestamp, int messageSequence, Long batchId, final boolean transactionEnabled) {
+ LogDetails logDetails = new LogDetails();
+ logDetails.setTopicId(topicName);
+ logDetails.setMessageTimestamp(messageTimestamp);
+ logDetails.setPublisherId(Utils.getUserApiKey(request));
+ logDetails.setPublisherIp(request.getRemoteHost());
+ logDetails.setMessageBatchId(batchId);
+ logDetails.setMessageSequence(String.valueOf(messageSequence));
+ logDetails.setTransactionEnabled(transactionEnabled);
+ logDetails.setTransactionIdTs(Utils.getFormattedDate(new Date()));
+ logDetails.setServerIp(request.getLocalAddr());
+ return logDetails;
+ }
+
+ /*public String getMetricsTopic() {
+ return metricsTopic;
+ }
+
+ public void setMetricsTopic(String metricsTopic) {
+ this.metricsTopic = metricsTopic;
+ }*/
+
+} \ No newline at end of file
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/MMServiceImpl.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/MMServiceImpl.java
new file mode 100644
index 0000000..cdba378
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/MMServiceImpl.java
@@ -0,0 +1,604 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service.impl;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.LinkedList;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.core.Context;
+
+import org.apache.http.HttpStatus;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+import org.json.JSONObject;
+import org.json.JSONTokener;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Consumer;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.ConsumerFactory;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.MetricsSet;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Publisher;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.ConsumerFactory.UnavailableException;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Publisher.message;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.LogDetails;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants.CambriaConstants;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception.DMaaPAccessDeniedException;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception.DMaaPErrorMessages;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception.DMaaPResponseCode;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception.ErrorResponse;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metabroker.Topic;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metabroker.Broker.TopicExistsException;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.resources.CambriaEventSet;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.resources.CambriaOutboundEventStream;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security.DMaaPAAFAuthenticator;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security.DMaaPAAFAuthenticatorImpl;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service.MMService;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.ConfigurationReader;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.DMaaPResponseBuilder;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.Utils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.stereotype.Service;
+
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.drumlin.service.standards.MimeTypes;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
+
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import com.att.nsa.util.rrConvertor;
+
+import kafka.producer.KeyedMessage;
+
+@Service
+public class MMServiceImpl implements MMService {
+ private static final String BATCH_LENGTH = "event.batch.length";
+ private static final String TRANSFER_ENCODING = "Transfer-Encoding";
+ //private static final Logger LOG = Logger.getLogger(MMServiceImpl.class);
+ private static final EELFLogger LOG = EELFManager.getInstance().getLogger(MMServiceImpl.class);
+ @Autowired
+ private DMaaPErrorMessages errorMessages;
+
+ @Autowired
+ @Qualifier("configurationReader")
+ private ConfigurationReader configReader;
+
+ // HttpServletRequest object
+ @Context
+ private HttpServletRequest request;
+
+ // HttpServletResponse object
+ @Context
+ private HttpServletResponse response;
+
+ @Override
+ public void addWhiteList() {
+
+ }
+
+ @Override
+ public void removeWhiteList() {
+
+ }
+
+ @Override
+ public void listWhiteList() {
+
+ }
+
+ @Override
+ public String subscribe(DMaaPContext ctx, String topic, String consumerGroup, String clientId)
+ throws ConfigDbException, TopicExistsException, AccessDeniedException, UnavailableException,
+ CambriaApiException, IOException {
+
+ // final long startTime = System.currentTimeMillis();
+ final HttpServletRequest req = ctx.getRequest();
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+
+ // was this host blacklisted?
+ final String remoteAddr = Utils.getRemoteAddress(ctx);
+
+ if (ctx.getConfigReader().getfIpBlackList().contains(remoteAddr)) {
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+ DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+ "Source address [" + remoteAddr + "] is blacklisted. Please contact the cluster management team.",
+ null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
+ ctx.getRequest().getRemoteHost(), null, null);
+ LOG.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+ }
+
+ int limit = CambriaConstants.kNoLimit;
+
+ if (req.getParameter("limit") != null) {
+ limit = Integer.parseInt(req.getParameter("limit"));
+ }
+ limit = 1;
+ // int timeoutMs = 60000;
+ int timeoutMs = CambriaConstants.kNoTimeout;
+ String strtimeoutMS = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "timeout");
+ if (strtimeoutMS != null)
+ timeoutMs = Integer.parseInt(strtimeoutMS);
+ // int timeoutMs = ctx.getConfigReader().getSettings().getInt("timeout",
+ // CambriaConstants.kNoTimeout);
+ if (req.getParameter("timeout") != null) {
+ timeoutMs = Integer.parseInt(req.getParameter("timeout"));
+ }
+
+ // By default no filter is applied if filter is not passed as a
+ // parameter in the request URI
+ String topicFilter = CambriaConstants.kNoFilter;
+ if (null != req.getParameter("filter")) {
+ topicFilter = req.getParameter("filter");
+ }
+ // pretty to print the messaages in new line
+ String prettyval = "0";
+ String strPretty = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "pretty");
+ if (null != strPretty)
+ prettyval = strPretty;
+
+ String metaval = "0";
+ String strmeta = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "meta");
+ if (null != strmeta)
+ metaval = strmeta;
+
+ final boolean pretty = rrConvertor.convertToBooleanBroad(prettyval);
+ // withMeta to print offset along with message
+ final boolean withMeta = rrConvertor.convertToBooleanBroad(metaval);
+
+ // is this user allowed to read this topic?
+ //final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx);
+ final Topic metatopic = ctx.getConfigReader().getfMetaBroker().getTopic(topic);
+
+ if (metatopic == null) {
+ // no such topic.
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND,
+ DMaaPResponseCode.RESOURCE_NOT_FOUND.getResponseCode(),
+ errorMessages.getTopicNotExist() + "-[" + topic + "]", null, Utils.getFormattedDate(new Date()),
+ topic, null, null, clientId, ctx.getRequest().getRemoteHost());
+ LOG.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+ }
+ //String metricTopicname = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "metrics.send.cambria.topic");
+ /*
+ * if (null==metricTopicname)
+ * metricTopicname="msgrtr.apinode.metrics.dmaap"; //else if(user!=null)
+ * if(null==ctx.getRequest().getHeader("Authorization")&&
+ * !topic.equalsIgnoreCase(metricTopicname)) { if (null !=
+ * metatopic.getOwner() && !("".equals(metatopic.getOwner()))){ // check
+ * permissions metatopic.checkUserRead(user); } }
+ */
+
+ Consumer c = null;
+ try {
+ final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
+
+ c = ctx.getConfigReader().getfConsumerFactory().getConsumerFor(topic, consumerGroup, clientId, timeoutMs);
+
+ final CambriaOutboundEventStream coes = new CambriaOutboundEventStream.Builder(c).timeout(timeoutMs)
+ .limit(limit).filter(topicFilter).pretty(pretty).withMeta(withMeta).build();
+ coes.setDmaapContext(ctx);
+ coes.setTopic(metatopic);
+
+ DMaaPResponseBuilder.setNoCacheHeadings(ctx);
+
+ try {
+ coes.write(baos);
+ } catch (Exception ex) {
+
+ }
+
+ c.commitOffsets();
+ final int sent = coes.getSentCount();
+
+ metricsSet.consumeTick(sent);
+
+ } catch (UnavailableException excp) {
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE,
+ DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(),
+ errorMessages.getServerUnav() + excp.getMessage(), null, Utils.getFormattedDate(new Date()), topic,
+ null, null, clientId, ctx.getRequest().getRemoteHost());
+ LOG.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+
+ } catch (CambriaApiException excp) {
+
+ throw excp;
+ } catch (Exception excp) {
+
+ ctx.getConfigReader().getfConsumerFactory().destroyConsumer(topic, consumerGroup, clientId);
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE,
+ DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(),
+ "Couldn't respond to client, closing cambria consumer" + excp.getMessage(), null,
+ Utils.getFormattedDate(new Date()), topic, null, null, clientId, ctx.getRequest().getRemoteHost());
+ LOG.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+ } finally {
+
+ boolean kSetting_EnableCache = ConsumerFactory.kDefault_IsCacheEnabled;
+ String strkSetting_EnableCache = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+ ConsumerFactory.kSetting_EnableCache);
+ if (null != strkSetting_EnableCache)
+ kSetting_EnableCache = Boolean.parseBoolean(strkSetting_EnableCache);
+
+ if (!kSetting_EnableCache && (c != null)) {
+ c.close();
+
+ }
+ }
+ return baos.toString();
+ }
+
+ @Override
+ public void pushEvents(DMaaPContext ctx, final String topic, InputStream msg, final String defaultPartition,
+ final String requestTime) throws ConfigDbException, AccessDeniedException, TopicExistsException,
+ CambriaApiException, IOException, missingReqdSetting {
+
+ //final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx);
+ //final Topic metatopic = ctx.getConfigReader().getfMetaBroker().getTopic(topic);
+
+ final String remoteAddr = Utils.getRemoteAddress(ctx);
+
+ if (ctx.getConfigReader().getfIpBlackList().contains(remoteAddr)) {
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+ DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+ "Source address [" + remoteAddr + "] is blacklisted. Please contact the cluster management team.",
+ null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
+ ctx.getRequest().getRemoteHost(), null, null);
+ LOG.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+ }
+
+ String topicNameStd = null;
+
+ topicNameStd = com.att.ajsc.beans.PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop,
+ "enforced.topic.name.AAF");
+ String metricTopicname = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+ "metrics.send.cambria.topic");
+ if (null == metricTopicname)
+ metricTopicname = "msgrtr.apinode.metrics.dmaap";
+ boolean topicNameEnforced = false;
+ if (null != topicNameStd && topic.startsWith(topicNameStd)) {
+ topicNameEnforced = true;
+ }
+
+ final HttpServletRequest req = ctx.getRequest();
+
+ boolean chunked = false;
+ if (null != req.getHeader(TRANSFER_ENCODING)) {
+ chunked = req.getHeader(TRANSFER_ENCODING).contains("chunked");
+ }
+
+ String mediaType = req.getContentType();
+ if (mediaType == null || mediaType.length() == 0) {
+ mediaType = MimeTypes.kAppGenericBinary;
+ }
+
+ if (mediaType.contains("charset=UTF-8")) {
+ mediaType = mediaType.replace("; charset=UTF-8", "").trim();
+ }
+
+ if (!topic.equalsIgnoreCase(metricTopicname)) {
+ pushEventsWithTransaction(ctx, msg, topic, defaultPartition, requestTime, chunked, mediaType);
+ } else {
+ pushEvents(ctx, topic, msg, defaultPartition, chunked, mediaType);
+ }
+ }
+
+ private static void addTransactionDetailsToMessage(message msg, final String topic, HttpServletRequest request,
+ final String messageCreationTime, final int messageSequence, final Long batchId,
+ final boolean transactionEnabled) {
+ LogDetails logDetails = generateLogDetails(topic, request, messageCreationTime, messageSequence, batchId,
+ transactionEnabled);
+ logDetails.setMessageLengthInBytes(Utils.messageLengthInBytes(msg.getMessage()));
+ msg.setTransactionEnabled(transactionEnabled);
+ msg.setLogDetails(logDetails);
+ }
+
+ private static LogDetails generateLogDetails(final String topicName, HttpServletRequest request,
+ final String messageTimestamp, int messageSequence, Long batchId, final boolean transactionEnabled) {
+ LogDetails logDetails = new LogDetails();
+ logDetails.setTopicId(topicName);
+ logDetails.setMessageTimestamp(messageTimestamp);
+ logDetails.setPublisherId(Utils.getUserApiKey(request));
+ logDetails.setPublisherIp(request.getRemoteHost());
+ logDetails.setMessageBatchId(batchId);
+ logDetails.setMessageSequence(String.valueOf(messageSequence));
+ logDetails.setTransactionEnabled(transactionEnabled);
+ logDetails.setTransactionIdTs(Utils.getFormattedDate(new Date()));
+ logDetails.setServerIp(request.getLocalAddr());
+ return logDetails;
+ }
+
+ private void pushEvents(DMaaPContext ctx, String topic, InputStream msg, String defaultPartition, boolean chunked,
+ String mediaType) throws ConfigDbException, AccessDeniedException, TopicExistsException,
+ CambriaApiException, IOException {
+ final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
+
+ // setup the event set
+ final CambriaEventSet events = new CambriaEventSet(mediaType, msg, chunked, defaultPartition);
+
+ // start processing, building a batch to push to the backend
+ final long startMs = System.currentTimeMillis();
+ long count = 0;
+
+ long maxEventBatch = 1024 * 16;
+ String batchlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, BATCH_LENGTH);
+ if (null != batchlen)
+ maxEventBatch = Long.parseLong(batchlen);
+
+ // long maxEventBatch =
+ // ctx.getConfigReader().getSettings().getLong(BATCH_LENGTH, 1024 * 16);
+ final LinkedList<Publisher.message> batch = new LinkedList<Publisher.message>();
+ final ArrayList<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>();
+
+ try {
+ // for each message...
+ Publisher.message m = null;
+ while ((m = events.next()) != null) {
+ // add the message to the batch
+ batch.add(m);
+ final KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, m.getKey(),
+ m.getMessage());
+ kms.add(data);
+ // check if the batch is full
+ final int sizeNow = batch.size();
+ if (sizeNow > maxEventBatch) {
+ ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
+ kms.clear();
+ batch.clear();
+ metricsSet.publishTick(sizeNow);
+ count += sizeNow;
+ }
+ }
+
+ // send the pending batch
+ final int sizeNow = batch.size();
+ if (sizeNow > 0) {
+ ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
+ kms.clear();
+ batch.clear();
+ metricsSet.publishTick(sizeNow);
+ count += sizeNow;
+ }
+
+ final long endMs = System.currentTimeMillis();
+ final long totalMs = endMs - startMs;
+
+ LOG.info("Published " + count + " msgs in " + totalMs + "ms for topic " + topic);
+
+ // build a responseP
+ final JSONObject response = new JSONObject();
+ response.put("count", count);
+ response.put("serverTimeMs", totalMs);
+ // DMaaPResponseBuilder.respondOk(ctx, response);
+
+ } catch (Exception excp) {
+
+ int status = HttpStatus.SC_NOT_FOUND;
+ String errorMsg = null;
+ if (excp instanceof CambriaApiException) {
+ status = ((CambriaApiException) excp).getStatus();
+ JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
+ JSONObject errObject = new JSONObject(jsonTokener);
+ errorMsg = (String) errObject.get("message");
+
+ }
+ ErrorResponse errRes = new ErrorResponse(status, DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
+ errorMessages.getPublishMsgError() + ":" + topic + "." + errorMessages.getPublishMsgCount() + count
+ + "." + errorMsg,
+ null, Utils.getFormattedDate(new Date()), topic, null, ctx.getRequest().getRemoteHost(), null,
+ null);
+ LOG.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+
+ }
+ }
+
+ private void pushEventsWithTransaction(DMaaPContext ctx, InputStream inputStream, final String topic,
+ final String partitionKey, final String requestTime, final boolean chunked, final String mediaType)
+ throws ConfigDbException, AccessDeniedException, TopicExistsException, IOException,
+ CambriaApiException {
+
+ final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
+
+ // setup the event set
+ final CambriaEventSet events = new CambriaEventSet(mediaType, inputStream, chunked, partitionKey);
+
+ // start processing, building a batch to push to the backend
+ final long startMs = System.currentTimeMillis();
+ long count = 0;
+ long maxEventBatch = 1024 * 16;
+ String evenlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, BATCH_LENGTH);
+ if (null != evenlen)
+ maxEventBatch = Long.parseLong(evenlen);
+
+ final LinkedList<Publisher.message> batch = new LinkedList<Publisher.message>();
+ final ArrayList<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>();
+
+ Publisher.message m = null;
+ int messageSequence = 1;
+ Long batchId = 1L;
+ final boolean transactionEnabled = true;
+ int publishBatchCount = 0;
+ SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss.SS");
+
+ // LOG.warn("Batch Start Id: " +
+ // Utils.getFromattedBatchSequenceId(batchId));
+ try {
+ // for each message...
+ batchId = DMaaPContext.getBatchID();
+
+ String responseTransactionId = null;
+
+ while ((m = events.next()) != null) {
+
+ // LOG.warn("Batch Start Id: " +
+ // Utils.getFromattedBatchSequenceId(batchId));
+
+ addTransactionDetailsToMessage(m, topic, ctx.getRequest(), requestTime, messageSequence, batchId,
+ transactionEnabled);
+ messageSequence++;
+
+ // add the message to the batch
+ batch.add(m);
+
+ responseTransactionId = m.getLogDetails().getTransactionId();
+
+ JSONObject jsonObject = new JSONObject();
+ jsonObject.put("message", m.getMessage());
+ jsonObject.put("transactionId", responseTransactionId);
+ final KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, m.getKey(),
+ jsonObject.toString());
+ kms.add(data);
+
+ // check if the batch is full
+ final int sizeNow = batch.size();
+ if (sizeNow >= maxEventBatch) {
+ String startTime = sdf.format(new Date());
+ LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id="
+ + batchId + "]");
+ try {
+ ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
+ // transactionLogs(batch);
+ for (message msg : batch) {
+ LogDetails logDetails = msg.getLogDetails();
+ LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails());
+ }
+ } catch (Exception excp) {
+
+ int status = HttpStatus.SC_NOT_FOUND;
+ String errorMsg = null;
+ if (excp instanceof CambriaApiException) {
+ status = ((CambriaApiException) excp).getStatus();
+ JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
+ JSONObject errObject = new JSONObject(jsonTokener);
+ errorMsg = (String) errObject.get("message");
+ }
+ ErrorResponse errRes = new ErrorResponse(status,
+ DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
+ "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "."
+ + errorMessages.getPublishMsgCount() + count + "." + errorMsg,
+ null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
+ ctx.getRequest().getRemoteHost(), null, null);
+ LOG.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+ }
+ kms.clear();
+ batch.clear();
+ metricsSet.publishTick(sizeNow);
+ publishBatchCount = sizeNow;
+ count += sizeNow;
+ // batchId++;
+ String endTime = sdf.format(new Date());
+ LOG.info("Batch End Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch End Id="
+ + batchId + ",Batch Total=" + publishBatchCount + ",Batch Start Time=" + startTime
+ + ",Batch End Time=" + endTime + "]");
+ batchId = DMaaPContext.getBatchID();
+ }
+ }
+
+ // send the pending batch
+ final int sizeNow = batch.size();
+ if (sizeNow > 0) {
+ String startTime = sdf.format(new Date());
+ LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id="
+ + batchId + "]");
+ try {
+ ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
+ // transactionLogs(batch);
+ for (message msg : batch) {
+ LogDetails logDetails = msg.getLogDetails();
+ LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails());
+ }
+ } catch (Exception excp) {
+ int status = HttpStatus.SC_NOT_FOUND;
+ String errorMsg = null;
+ if (excp instanceof CambriaApiException) {
+ status = ((CambriaApiException) excp).getStatus();
+ JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
+ JSONObject errObject = new JSONObject(jsonTokener);
+ errorMsg = (String) errObject.get("message");
+ }
+
+ ErrorResponse errRes = new ErrorResponse(status,
+ DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
+ "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "."
+ + errorMessages.getPublishMsgCount() + count + "." + errorMsg,
+ null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
+ ctx.getRequest().getRemoteHost(), null, null);
+ LOG.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+ }
+ kms.clear();
+ metricsSet.publishTick(sizeNow);
+ count += sizeNow;
+ // batchId++;
+ String endTime = sdf.format(new Date());
+ publishBatchCount = sizeNow;
+ LOG.info("Batch End Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch End Id=" + batchId
+ + ",Batch Total=" + publishBatchCount + ",Batch Start Time=" + startTime + ",Batch End Time="
+ + endTime + "]");
+ }
+
+ final long endMs = System.currentTimeMillis();
+ final long totalMs = endMs - startMs;
+
+ LOG.info("Published " + count + " msgs in " + totalMs + "ms for topic " + topic);
+
+ // build a response
+ final JSONObject response = new JSONObject();
+ response.put("count", count);
+ response.put("serverTimeMs", totalMs);
+
+ } catch (Exception excp) {
+ int status = HttpStatus.SC_NOT_FOUND;
+ String errorMsg = null;
+ if (excp instanceof CambriaApiException) {
+ status = ((CambriaApiException) excp).getStatus();
+ JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
+ JSONObject errObject = new JSONObject(jsonTokener);
+ errorMsg = (String) errObject.get("message");
+ }
+
+ ErrorResponse errRes = new ErrorResponse(status, DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
+ "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "."
+ + errorMessages.getPublishMsgCount() + count + "." + errorMsg,
+ null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
+ ctx.getRequest().getRemoteHost(), null, null);
+ LOG.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+ }
+ }
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/MetricsServiceImpl.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/MetricsServiceImpl.java
new file mode 100644
index 0000000..c7db60d
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/MetricsServiceImpl.java
@@ -0,0 +1,115 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service.impl;
+
+import java.io.IOException;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.json.JSONObject;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.MetricsSet;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service.MetricsService;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.DMaaPResponseBuilder;
+import org.springframework.stereotype.Component;
+
+import com.att.nsa.metrics.CdmMeasuredItem;
+
+/**
+ *
+ *
+ * This will provide all the generated metrics details also it can provide the
+ * get metrics details
+ *
+ *
+ * @author author
+ *
+ *
+ */
+@Component
+public class MetricsServiceImpl implements MetricsService {
+
+ //private static final Logger LOG = Logger.getLogger(MetricsService.class.toString());
+ private static final EELFLogger LOG = EELFManager.getInstance().getLogger(MetricsService.class);
+ /**
+ *
+ *
+ * @param ctx
+ * @throws IOException
+ *
+ *
+ * get Metric details
+ *
+ */
+ @Override
+
+ public void get(DMaaPContext ctx) throws IOException {
+ LOG.info("Inside : MetricsServiceImpl : get()");
+ final MetricsSet metrics = ctx.getConfigReader().getfMetrics();
+ DMaaPResponseBuilder.setNoCacheHeadings(ctx);
+ final JSONObject result = metrics.toJson();
+ DMaaPResponseBuilder.respondOk(ctx, result);
+ LOG.info("============ Metrics generated : " + result.toString() + "=================");
+
+ }
+
+
+ @Override
+ /**
+ *
+ * get Metric by name
+ *
+ *
+ * @param ctx
+ * @param name
+ * @throws IOException
+ * @throws CambriaApiException
+ *
+ *
+ */
+ public void getMetricByName(DMaaPContext ctx, String name) throws IOException, CambriaApiException {
+ LOG.info("Inside : MetricsServiceImpl : getMetricByName()");
+ final MetricsSet metrics = ctx.getConfigReader().getfMetrics();
+
+ final CdmMeasuredItem item = metrics.getItem(name);
+ /**
+ * check if item is null
+ */
+ if (item == null) {
+ throw new CambriaApiException(404, "No metric named [" + name + "].");
+ }
+
+ final JSONObject entry = new JSONObject();
+ entry.put("summary", item.summarize());
+ entry.put("raw", item.getRawValueString());
+
+ DMaaPResponseBuilder.setNoCacheHeadings(ctx);
+
+ final JSONObject result = new JSONObject();
+ result.put(name, entry);
+
+ DMaaPResponseBuilder.respondOk(ctx, result);
+ LOG.info("============ Metrics generated : " + entry.toString() + "=================");
+ }
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/TopicServiceImpl.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/TopicServiceImpl.java
new file mode 100644
index 0000000..a04e110
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/TopicServiceImpl.java
@@ -0,0 +1,649 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+/**
+ *
+ */
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service.impl;
+
+import java.io.IOException;
+
+import org.apache.http.HttpStatus;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPKafkaMetaBroker;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.TopicBean;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants.CambriaConstants;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception.DMaaPAccessDeniedException;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception.DMaaPErrorMessages;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception.DMaaPResponseCode;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception.ErrorResponse;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metabroker.Broker;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metabroker.Topic;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metabroker.Broker.TopicExistsException;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security.DMaaPAAFAuthenticator;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security.DMaaPAAFAuthenticatorImpl;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security.DMaaPAuthenticatorImpl;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service.TopicService;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.DMaaPResponseBuilder;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Service;
+
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.security.NsaAcl;
+import com.att.nsa.security.NsaApiKey;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+
+/**
+ * @author author
+ *
+ */
+@Service
+public class TopicServiceImpl implements TopicService {
+
+ //private static final Logger LOGGER = Logger.getLogger(TopicServiceImpl.class);
+ private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(TopicServiceImpl.class);
+ @Autowired
+ private DMaaPErrorMessages errorMessages;
+
+ //@Value("${msgRtr.topicfactory.aaf}")
+ //private String mrFactory;
+
+
+ /**
+ * @param dmaapContext
+ * @throws JSONException
+ * @throws ConfigDbException
+ * @throws IOException
+ *
+ */
+ @Override
+ public void getTopics(DMaaPContext dmaapContext) throws JSONException, ConfigDbException, IOException {
+
+ LOGGER.info("Fetching list of all the topics.");
+ JSONObject json = new JSONObject();
+
+ JSONArray topicsList = new JSONArray();
+
+ for (Topic topic : getMetaBroker(dmaapContext).getAllTopics()) {
+ topicsList.put(topic.getName());
+ }
+
+ json.put("topics", topicsList);
+
+ LOGGER.info("Returning list of all the topics.");
+ DMaaPResponseBuilder.respondOk(dmaapContext, json);
+
+ }
+
+ /**
+ * @param dmaapContext
+ * @throws JSONException
+ * @throws ConfigDbException
+ * @throws IOException
+ *
+ */
+ public void getAllTopics(DMaaPContext dmaapContext) throws JSONException, ConfigDbException, IOException {
+
+ LOGGER.info("Fetching list of all the topics.");
+ JSONObject json = new JSONObject();
+
+ JSONArray topicsList = new JSONArray();
+
+ for (Topic topic : getMetaBroker(dmaapContext).getAllTopics()) {
+ JSONObject obj = new JSONObject();
+ obj.put("topicName", topic.getName());
+ //obj.put("description", topic.getDescription());
+ obj.put("owner", topic.getOwner());
+ obj.put("txenabled", topic.isTransactionEnabled());
+ topicsList.put(obj);
+ }
+
+ json.put("topics", topicsList);
+
+ LOGGER.info("Returning list of all the topics.");
+ DMaaPResponseBuilder.respondOk(dmaapContext, json);
+
+ }
+
+
+ /**
+ * @param dmaapContext
+ * @param topicName
+ * @throws ConfigDbException
+ * @throws IOException
+ * @throws TopicExistsException
+ */
+ @Override
+ public void getTopic(DMaaPContext dmaapContext, String topicName)
+ throws ConfigDbException, IOException, TopicExistsException {
+
+ LOGGER.info("Fetching details of topic " + topicName);
+ Topic t = getMetaBroker(dmaapContext).getTopic(topicName);
+
+ if (null == t) {
+ LOGGER.error("Topic [" + topicName + "] does not exist.");
+ throw new TopicExistsException("Topic [" + topicName + "] does not exist.");
+ }
+
+ JSONObject o = new JSONObject();
+ o.put ( "name", t.getName () );
+ o.put ( "description", t.getDescription () );
+
+ if (null!=t.getOwners ())
+ o.put ( "owner", t.getOwners ().iterator ().next () );
+ if(null!=t.getReaderAcl ())
+ o.put ( "readerAcl", aclToJson ( t.getReaderAcl () ) );
+ if(null!=t.getWriterAcl ())
+ o.put ( "writerAcl", aclToJson ( t.getWriterAcl () ) );
+
+ LOGGER.info("Returning details of topic " + topicName);
+ DMaaPResponseBuilder.respondOk(dmaapContext, o);
+
+ }
+
+
+ /**
+ * @param dmaapContext
+ * @param topicBean
+ * @throws CambriaApiException
+ * @throws AccessDeniedException
+ * @throws IOException
+ * @throws TopicExistsException
+ * @throws JSONException
+ *
+ *
+ *
+ */
+ @Override
+ public void createTopic(DMaaPContext dmaapContext, TopicBean topicBean)
+ throws CambriaApiException, DMaaPAccessDeniedException,IOException, TopicExistsException {
+
+ LOGGER.info("Creating topic " + topicBean.getTopicName());
+
+ final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext);
+ String key = null;
+ String appName=dmaapContext.getRequest().getHeader("AppName");
+ String enfTopicName= com.att.ajsc.beans.PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop,"enforced.topic.name.AAF");
+
+ if(user != null)
+ {
+ key = user.getKey();
+
+ if( enfTopicName != null && topicBean.getTopicName().indexOf(enfTopicName) >=0 ) {
+
+ LOGGER.error("Failed to create topic"+topicBean.getTopicName()+", Authentication failed.");
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,
+ DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+ errorMessages.getCreateTopicFail()+" "+errorMessages.getNotPermitted1()+" create "+errorMessages.getNotPermitted2());
+ LOGGER.info(errRes.toString());
+ throw new DMaaPAccessDeniedException(errRes);
+
+ }
+ }
+
+ //else if (user==null && (null==dmaapContext.getRequest().getHeader("Authorization") && null == dmaapContext.getRequest().getHeader("cookie")) ) {
+ else if (user == null && null==dmaapContext.getRequest().getHeader("Authorization") &&
+ (null == appName && null == dmaapContext.getRequest().getHeader("cookie"))) {
+ LOGGER.error("Failed to create topic"+topicBean.getTopicName()+", Authentication failed.");
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,
+ DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+ errorMessages.getCreateTopicFail()+" "+errorMessages.getNotPermitted1()+" create "+errorMessages.getNotPermitted2());
+ LOGGER.info(errRes.toString());
+ throw new DMaaPAccessDeniedException(errRes);
+ }
+
+ if (user == null && (null!=dmaapContext.getRequest().getHeader("Authorization") ||
+ null != dmaapContext.getRequest().getHeader("cookie"))) {
+ //if (user == null && (null!=dmaapContext.getRequest().getHeader("Authorization") || null != dmaapContext.getRequest().getHeader("cookie"))) {
+ // ACL authentication is not provided so we will use the aaf authentication
+ LOGGER.info("Authorization the topic");
+
+ String permission = "";
+ String nameSpace="";
+ if(topicBean.getTopicName().indexOf(".")>1)
+ nameSpace = topicBean.getTopicName().substring(0,topicBean.getTopicName().lastIndexOf("."));
+
+ String mrFactoryVal=AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"msgRtr.topicfactory.aaf");
+
+ //AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSettings_KafkaZookeeper);
+
+ permission = mrFactoryVal+nameSpace+"|create";
+ DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
+
+ if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
+ {
+
+ LOGGER.error("Failed to create topic"+topicBean.getTopicName()+", Authentication failed.");
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,
+ DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+ errorMessages.getCreateTopicFail()+" "+errorMessages.getNotPermitted1()+" create "+errorMessages.getNotPermitted2());
+ LOGGER.info(errRes.toString());
+ throw new DMaaPAccessDeniedException(errRes);
+
+ }else{
+ // if user is null and aaf authentication is ok then key should be ""
+ //key = "";
+ /**
+ * Added as part of AAF user it should return username
+ */
+
+ key = dmaapContext.getRequest().getUserPrincipal().getName().toString();
+ LOGGER.info("key ==================== "+key);
+
+ }
+ }
+
+ try {
+ final String topicName = topicBean.getTopicName();
+ final String desc = topicBean.getTopicDescription();
+
+ final int partitions = topicBean.getPartitionCount();
+
+ final int replicas = topicBean.getReplicationCount();
+ boolean transactionEnabled = topicBean.isTransactionEnabled();
+
+
+ final Broker metabroker = getMetaBroker(dmaapContext);
+ final Topic t = metabroker.createTopic(topicName, desc, key, partitions, replicas,
+ transactionEnabled);
+
+ LOGGER.info("Topic created successfully. Sending response");
+ DMaaPResponseBuilder.respondOk(dmaapContext, topicToJson(t));
+ } catch (JSONException excp) {
+
+ LOGGER.error("Failed to create topic. Couldn't parse JSON data.", excp);
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_BAD_REQUEST,
+ DMaaPResponseCode.INCORRECT_JSON.getResponseCode(),
+ errorMessages.getIncorrectJson());
+ LOGGER.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+
+ }
+ }
+
+ /**
+ * @param dmaapContext
+ * @param topicName
+ * @throws ConfigDbException
+ * @throws IOException
+ * @throws TopicExistsException
+ * @throws CambriaApiException
+ * @throws AccessDeniedException
+ */
+ @Override
+ public void deleteTopic(DMaaPContext dmaapContext, String topicName)
+ throws IOException, ConfigDbException, CambriaApiException, TopicExistsException, DMaaPAccessDeniedException, AccessDeniedException {
+
+ LOGGER.info("Deleting topic " + topicName);
+ final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext);
+
+ if (user == null && null!=dmaapContext.getRequest().getHeader("Authorization")) {
+ LOGGER.info("Authenticating the user, as ACL authentication is not provided");
+// String permission = "com.att.dmaap.mr.topic"+"|"+topicName+"|"+"manage";
+ String permission = "";
+ String nameSpace = topicName.substring(0,topicName.lastIndexOf("."));
+ String mrFactoryVal=AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"msgRtr.topicfactory.aaf");
+// String tokens[] = topicName.split(".mr.topic.");
+ permission = mrFactoryVal+nameSpace+"|destroy";
+ DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
+ if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
+ {
+ LOGGER.error("Failed to delete topi"+topicName+". Authentication failed.");
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+ DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+ errorMessages.getCreateTopicFail()+" "+errorMessages.getNotPermitted1()+" delete "+errorMessages.getNotPermitted2());
+ LOGGER.info(errRes.toString());
+ throw new DMaaPAccessDeniedException(errRes);
+ }
+
+
+ }
+
+ final Broker metabroker = getMetaBroker(dmaapContext);
+ final Topic topic = metabroker.getTopic(topicName);
+
+ if (topic == null) {
+ LOGGER.error("Failed to delete topic. Topic [" + topicName + "] does not exist.");
+ throw new TopicExistsException("Failed to delete topic. Topic [" + topicName + "] does not exist.");
+ }
+
+ metabroker.deleteTopic(topicName);
+
+ LOGGER.info("Topic [" + topicName + "] deleted successfully. Sending response.");
+ DMaaPResponseBuilder.respondOkWithHtml(dmaapContext, "Topic [" + topicName + "] deleted successfully");
+
+ }
+
+ /**
+ *
+ * @param dmaapContext
+ * @return
+ */
+ private DMaaPKafkaMetaBroker getMetaBroker(DMaaPContext dmaapContext) {
+ return (DMaaPKafkaMetaBroker) dmaapContext.getConfigReader().getfMetaBroker();
+ }
+
+ /**
+ * @param dmaapContext
+ * @param topicName
+ * @throws ConfigDbException
+ * @throws IOException
+ * @throws TopicExistsException
+ *
+ */
+ @Override
+ public void getPublishersByTopicName(DMaaPContext dmaapContext, String topicName)
+ throws ConfigDbException, IOException, TopicExistsException {
+ LOGGER.info("Retrieving list of all the publishers for topic " + topicName);
+ Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
+
+ if (topic == null) {
+ LOGGER.error("Failed to retrieve publishers list for topic. Topic [" + topicName + "] does not exist.");
+ throw new TopicExistsException(
+ "Failed to retrieve publishers list for topic. Topic [" + topicName + "] does not exist.");
+ }
+
+
+
+ final NsaAcl acl = topic.getWriterAcl();
+
+ LOGGER.info("Returning list of all the publishers for topic " + topicName + ". Sending response.");
+ DMaaPResponseBuilder.respondOk(dmaapContext, aclToJson(acl));
+
+ }
+
+ /**
+ *
+ * @param acl
+ * @return
+ */
+ private static JSONObject aclToJson(NsaAcl acl) {
+ final JSONObject o = new JSONObject();
+ if (acl == null) {
+ o.put("enabled", false);
+ o.put("users", new JSONArray());
+ } else {
+ o.put("enabled", acl.isActive());
+
+ final JSONArray a = new JSONArray();
+ for (String user : acl.getUsers()) {
+ a.put(user);
+ }
+ o.put("users", a);
+ }
+ return o;
+ }
+
+ /**
+ * @param dmaapContext
+ * @param topicName
+ */
+ @Override
+ public void getConsumersByTopicName(DMaaPContext dmaapContext, String topicName)
+ throws IOException, ConfigDbException, TopicExistsException {
+ LOGGER.info("Retrieving list of all the consumers for topic " + topicName);
+ Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
+
+ if (topic == null) {
+ LOGGER.error("Failed to retrieve consumers list for topic. Topic [" + topicName + "] does not exist.");
+ throw new TopicExistsException(
+ "Failed to retrieve consumers list for topic. Topic [" + topicName + "] does not exist.");
+ }
+
+ final NsaAcl acl = topic.getReaderAcl();
+
+ LOGGER.info("Returning list of all the consumers for topic " + topicName + ". Sending response.");
+ DMaaPResponseBuilder.respondOk(dmaapContext, aclToJson(acl));
+
+ }
+
+ /**
+ *
+ * @param t
+ * @return
+ */
+ private static JSONObject topicToJson(Topic t) {
+ final JSONObject o = new JSONObject();
+
+ o.put("name", t.getName());
+ o.put("description", t.getDescription());
+ o.put("owner", t.getOwner());
+ o.put("readerAcl", aclToJson(t.getReaderAcl()));
+ o.put("writerAcl", aclToJson(t.getWriterAcl()));
+
+ return o;
+ }
+
+ /**
+ * @param dmaapContext
+ * @param topicName
+ * @param producerId
+ * @throws ConfigDbException
+ * @throws IOException
+ * @throws TopicExistsException
+ * @throws AccessDeniedException
+ * @throws
+ *
+ */
+ @Override
+ public void permitPublisherForTopic(DMaaPContext dmaapContext, String topicName, String producerId)
+ throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,CambriaApiException {
+
+ LOGGER.info("Granting write access to producer [" + producerId + "] for topic " + topicName);
+ final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext);
+
+// if (user == null) {
+//
+// LOGGER.info("Authenticating the user, as ACL authentication is not provided");
+//// String permission = "com.att.dmaap.mr.topic"+"|"+topicName+"|"+"manage";
+//
+// DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
+// String permission = aaf.aafPermissionString(topicName, "manage");
+// if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
+// {
+// LOGGER.error("Failed to permit write access to producer [" + producerId + "] for topic " + topicName
+// + ". Authentication failed.");
+// ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+// DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+// errorMessages.getNotPermitted1()+" <Grant publish permissions> "+errorMessages.getNotPermitted2()+ topicName);
+// LOGGER.info(errRes);
+// throw new DMaaPAccessDeniedException(errRes);
+// }
+// }
+
+ Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
+
+ if (null == topic) {
+ LOGGER.error("Failed to permit write access to producer [" + producerId + "] for topic. Topic [" + topicName
+ + "] does not exist.");
+ throw new TopicExistsException("Failed to permit write access to producer [" + producerId
+ + "] for topic. Topic [" + topicName + "] does not exist.");
+ }
+
+ topic.permitWritesFromUser(producerId, user);
+
+ LOGGER.info("Write access has been granted to producer [" + producerId + "] for topic [" + topicName
+ + "]. Sending response.");
+ DMaaPResponseBuilder.respondOkWithHtml(dmaapContext, "Write access has been granted to publisher.");
+
+ }
+
+ /**
+ * @param dmaapContext
+ * @param topicName
+ * @param producerId
+ * @throws ConfigDbException
+ * @throws IOException
+ * @throws TopicExistsException
+ * @throws AccessDeniedException
+ * @throws DMaaPAccessDeniedException
+ *
+ */
+ @Override
+ public void denyPublisherForTopic(DMaaPContext dmaapContext, String topicName, String producerId)
+ throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException, DMaaPAccessDeniedException {
+
+ LOGGER.info("Revoking write access to producer [" + producerId + "] for topic " + topicName);
+ final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext);
+// if (user == null) {
+//
+//// String permission = "com.att.dmaap.mr.topic"+"|"+topicName+"|"+"manage";
+// DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
+// String permission = aaf.aafPermissionString(topicName, "manage");
+// if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
+// {
+// LOGGER.error("Failed to revoke write access to producer [" + producerId + "] for topic " + topicName
+// + ". Authentication failed.");
+// ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+// DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+// errorMessages.getNotPermitted1()+" <Revoke publish permissions> "+errorMessages.getNotPermitted2()+ topicName);
+// LOGGER.info(errRes);
+// throw new DMaaPAccessDeniedException(errRes);
+//
+// }
+// }
+
+ Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
+
+ if (null == topic) {
+ LOGGER.error("Failed to revoke write access to producer [" + producerId + "] for topic. Topic [" + topicName
+ + "] does not exist.");
+ throw new TopicExistsException("Failed to revoke write access to producer [" + producerId
+ + "] for topic. Topic [" + topicName + "] does not exist.");
+ }
+
+ topic.denyWritesFromUser(producerId, user);
+
+ LOGGER.info("Write access has been revoked to producer [" + producerId + "] for topic [" + topicName
+ + "]. Sending response.");
+ DMaaPResponseBuilder.respondOkWithHtml(dmaapContext, "Write access has been revoked for publisher.");
+
+ }
+
+ /**
+ * @param dmaapContext
+ * @param topicName
+ * @param consumerId
+ * @throws DMaaPAccessDeniedException
+ */
+ @Override
+ public void permitConsumerForTopic(DMaaPContext dmaapContext, String topicName, String consumerId)
+ throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException, DMaaPAccessDeniedException {
+
+ LOGGER.info("Granting read access to consumer [" + consumerId + "] for topic " + topicName);
+ final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext);
+// if (user == null) {
+//
+//// String permission = "com.att.dmaap.mr.topic"+"|"+topicName+"|"+"manage";
+// DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
+// String permission = aaf.aafPermissionString(topicName, "manage");
+// if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
+// {
+// LOGGER.error("Failed to permit read access to consumer [" + consumerId + "] for topic " + topicName
+// + ". Authentication failed.");
+// ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+// DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+// errorMessages.getNotPermitted1()+" <Grant consume permissions> "+errorMessages.getNotPermitted2()+ topicName);
+// LOGGER.info(errRes);
+// throw new DMaaPAccessDeniedException(errRes);
+// }
+// }
+
+ Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
+
+ if (null == topic) {
+ LOGGER.error("Failed to permit read access to consumer [" + consumerId + "] for topic. Topic [" + topicName
+ + "] does not exist.");
+ throw new TopicExistsException("Failed to permit read access to consumer [" + consumerId
+ + "] for topic. Topic [" + topicName + "] does not exist.");
+ }
+
+ topic.permitReadsByUser(consumerId, user);
+
+ LOGGER.info("Read access has been granted to consumer [" + consumerId + "] for topic [" + topicName
+ + "]. Sending response.");
+ DMaaPResponseBuilder.respondOkWithHtml(dmaapContext,
+ "Read access has been granted for consumer [" + consumerId + "] for topic [" + topicName + "].");
+ }
+
+ /**
+ * @param dmaapContext
+ * @param topicName
+ * @param consumerId
+ * @throws DMaaPAccessDeniedException
+ */
+ @Override
+ public void denyConsumerForTopic(DMaaPContext dmaapContext, String topicName, String consumerId)
+ throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException, DMaaPAccessDeniedException {
+
+ LOGGER.info("Revoking read access to consumer [" + consumerId + "] for topic " + topicName);
+ final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext);
+// if (user == null) {
+//// String permission = "com.att.dmaap.mr.topic"+"|"+topicName+"|"+"manage";
+// DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
+// String permission = aaf.aafPermissionString(topicName, "manage");
+// if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
+// {
+// LOGGER.error("Failed to revoke read access to consumer [" + consumerId + "] for topic " + topicName
+// + ". Authentication failed.");
+// ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+// DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+// errorMessages.getNotPermitted1()+" <Grant consume permissions> "+errorMessages.getNotPermitted2()+ topicName);
+// LOGGER.info(errRes);
+// throw new DMaaPAccessDeniedException(errRes);
+// }
+//
+//
+// }
+
+ Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
+
+ if (null == topic) {
+ LOGGER.error("Failed to revoke read access to consumer [" + consumerId + "] for topic. Topic [" + topicName
+ + "] does not exist.");
+ throw new TopicExistsException("Failed to permit read access to consumer [" + consumerId
+ + "] for topic. Topic [" + topicName + "] does not exist.");
+ }
+
+ topic.denyReadsByUser(consumerId, user);
+
+ LOGGER.info("Read access has been revoked to consumer [" + consumerId + "] for topic [" + topicName
+ + "]. Sending response.");
+ DMaaPResponseBuilder.respondOkWithHtml(dmaapContext,
+ "Read access has been revoked for consumer [" + consumerId + "] for topic [" + topicName + "].");
+
+ }
+
+
+
+
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/TransactionServiceImpl.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/TransactionServiceImpl.java
new file mode 100644
index 0000000..2299b65
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/TransactionServiceImpl.java
@@ -0,0 +1,100 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service.impl;
+
+import java.io.IOException;
+
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service.TransactionService;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.transaction.TransactionObj;
+import org.springframework.stereotype.Service;
+
+import com.att.aft.dme2.internal.jettison.json.JSONException;
+import com.att.nsa.configs.ConfigDbException;
+
+/**
+ * Once the transaction rest gateway will be using that time it will provide all
+ * the transaction details like fetching all the transactional objects or get
+ * any particular transaction object details
+ *
+ * @author author
+ *
+ */
+@Service
+public class TransactionServiceImpl implements TransactionService {
+
+ @Override
+ public void checkTransaction(TransactionObj trnObj) {
+ /* Need to implement the method */
+ }
+
+ @Override
+ public void getAllTransactionObjs(DMaaPContext dmaapContext)
+ throws ConfigDbException, IOException {
+
+ /*
+ * ConfigurationReader configReader = dmaapContext.getConfigReader();
+ *
+ * LOG.info("configReader : "+configReader.toString());
+ *
+ * final JSONObject result = new JSONObject (); final JSONArray
+ * transactionIds = new JSONArray (); result.put ( "transactionIds",
+ * transactionIds );
+ *
+ * DMaaPTransactionObjDB<DMaaPTransactionObj> transDb =
+ * configReader.getfTranDb();
+ *
+ * for (String transactionId : transDb.loadAllTransactionObjs()) {
+ * transactionIds.put (transactionId); } LOG.info(
+ * "========== TransactionServiceImpl: getAllTransactionObjs: Transaction objects are : "
+ * + transactionIds.toString()+"===========");
+ * DMaaPResponseBuilder.respondOk(dmaapContext, result);
+ */
+ }
+
+ @Override
+ public void getTransactionObj(DMaaPContext dmaapContext,
+ String transactionId) throws ConfigDbException, JSONException,
+ IOException {
+
+ /*
+ * if (null != transactionId) {
+ *
+ * ConfigurationReader configReader = dmaapContext.getConfigReader();
+ *
+ * DMaaPTransactionObj trnObj;
+ *
+ * trnObj = configReader.getfTranDb().loadTransactionObj(transactionId);
+ *
+ *
+ * if (null != trnObj) { trnObj.serialize(); JSONObject result =
+ * trnObj.asJsonObject(); DMaaPResponseBuilder.respondOk(dmaapContext,
+ * result);
+ * LOG.info("========== TransactionServiceImpl: getTransactionObj : "+
+ * result.toString()+"==========="); return; }
+ *
+ * } LOG.info(
+ * "========== TransactionServiceImpl: getTransactionObj: Error : Transaction object does not exist. "
+ * +"===========");
+ */
+ }
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/UIServiceImpl.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/UIServiceImpl.java
new file mode 100644
index 0000000..7582d78
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/UIServiceImpl.java
@@ -0,0 +1,206 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service.impl;
+
+import java.io.IOException;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.json.JSONArray;
+import org.json.JSONObject;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPKafkaMetaBroker;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metabroker.Topic;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service.UIService;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.DMaaPResponseBuilder;
+import org.springframework.stereotype.Service;
+
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.security.db.NsaApiDb;
+import com.att.nsa.security.db.simple.NsaSimpleApiKey;
+
+import kafka.common.TopicExistsException;
+
+/**
+ * @author author
+ *
+ */
+@Service
+public class UIServiceImpl implements UIService {
+
+ //private static final Logger LOGGER = Logger.getLogger(UIServiceImpl.class);
+ private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(UIServiceImpl.class);
+ /**
+ * Returning template of hello page
+ * @param dmaapContext
+ * @throws IOException
+ */
+ @Override
+ public void hello(DMaaPContext dmaapContext) throws IOException {
+ LOGGER.info("Returning template of hello page.");
+ DMaaPResponseBuilder.respondOkWithHtml(dmaapContext, "templates/hello.html");
+ }
+
+ /**
+ * Fetching list of all api keys and returning in a templated form for display.
+ * @param dmaapContext
+ * @throws ConfigDbException
+ * @throws IOException
+ */
+ @Override
+ public void getApiKeysTable(DMaaPContext dmaapContext) throws ConfigDbException, IOException {
+ // TODO - We need to work on the templates and how data will be set in
+ // the template
+ LOGGER.info("Fetching list of all api keys and returning in a templated form for display.");
+ Map<String, NsaSimpleApiKey> keyMap = getApiKeyDb(dmaapContext).loadAllKeyRecords();
+
+ LinkedList<JSONObject> keyList = new LinkedList<JSONObject>();
+
+ JSONObject jsonList = new JSONObject();
+
+ for (Entry<String, NsaSimpleApiKey> e : keyMap.entrySet()) {
+ final NsaSimpleApiKey key = e.getValue();
+ final JSONObject jsonObject = new JSONObject();
+ jsonObject.put("key", key.getKey());
+ jsonObject.put("email", key.getContactEmail());
+ jsonObject.put("description", key.getDescription());
+ keyList.add(jsonObject);
+ }
+
+ jsonList.put("apiKeys", keyList);
+
+ LOGGER.info("Returning list of all the api keys in JSON format for the template.");
+ // "templates/apiKeyList.html"
+ DMaaPResponseBuilder.respondOk(dmaapContext, jsonList);
+
+ }
+
+ /**
+ * @param dmaapContext
+ * @param apiKey
+ * @throws Exception
+ */
+ @Override
+ public void getApiKey(DMaaPContext dmaapContext, String apiKey) throws Exception {
+ // TODO - We need to work on the templates and how data will be set in
+ // the template
+ LOGGER.info("Fetching detials of apikey: " + apiKey);
+ final NsaSimpleApiKey key = getApiKeyDb(dmaapContext).loadApiKey(apiKey);
+
+ if (null != key) {
+ LOGGER.info("Details of apikey [" + apiKey + "] found. Returning response");
+ DMaaPResponseBuilder.respondOk(dmaapContext, key.asJsonObject());
+ } else {
+ LOGGER.info("Details of apikey [" + apiKey + "] not found. Returning response");
+ throw new Exception("Key [" + apiKey + "] not found.");
+ }
+
+ }
+
+ /**
+ * Fetching list of all the topics
+ * @param dmaapContext
+ * @throws ConfigDbException
+ * @throws IOException
+ */
+ @Override
+ public void getTopicsTable(DMaaPContext dmaapContext) throws ConfigDbException, IOException {
+ // TODO - We need to work on the templates and how data will be set in
+ // the template
+ LOGGER.info("Fetching list of all the topics and returning in a templated form for display");
+ List<Topic> topicsList = getMetaBroker(dmaapContext).getAllTopics();
+
+ JSONObject jsonObject = new JSONObject();
+
+ JSONArray topicsArray = new JSONArray();
+
+ List<Topic> topicList = getMetaBroker(dmaapContext).getAllTopics();
+
+ for (Topic topic : topicList) {
+ JSONObject obj = new JSONObject();
+ obj.put("topicName", topic.getName());
+ obj.put("description", topic.getDescription());
+ obj.put("owner", topic.getOwner());
+ topicsArray.put(obj);
+ }
+
+ jsonObject.put("topics", topicsList);
+
+ LOGGER.info("Returning the list of topics in templated format for display.");
+ DMaaPResponseBuilder.respondOk(dmaapContext, jsonObject);
+
+ }
+
+ /**
+ * @param dmaapContext
+ * @param topicName
+ * @throws ConfigDbException
+ * @throws IOException
+ * @throws TopicExistsException
+ */
+ @Override
+ public void getTopic(DMaaPContext dmaapContext, String topicName)
+ throws ConfigDbException, IOException, TopicExistsException {
+ // TODO - We need to work on the templates and how data will be set in
+ // the template
+ LOGGER.info("Fetching detials of apikey: " + topicName);
+ Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
+
+ if (null == topic) {
+ LOGGER.error("Topic [" + topicName + "] does not exist.");
+ throw new TopicExistsException("Topic [" + topicName + "] does not exist.");
+ }
+
+ JSONObject json = new JSONObject();
+ json.put("topicName", topic.getName());
+ json.put("description", topic.getDescription());
+ json.put("owner", topic.getOwner());
+
+ LOGGER.info("Returning details of topic [" + topicName + "]. Sending response.");
+ DMaaPResponseBuilder.respondOk(dmaapContext, json);
+
+ }
+
+ /**
+ *
+ * @param dmaapContext
+ * @return
+ */
+ private NsaApiDb<NsaSimpleApiKey> getApiKeyDb(DMaaPContext dmaapContext) {
+ return dmaapContext.getConfigReader().getfApiKeyDb();
+
+ }
+
+ /**
+ *
+ * @param dmaapContext
+ * @return
+ */
+ private DMaaPKafkaMetaBroker getMetaBroker(DMaaPContext dmaapContext) {
+ return (DMaaPKafkaMetaBroker) dmaapContext.getConfigReader().getfMetaBroker();
+ }
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/DMaaPTransactionFactory.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/DMaaPTransactionFactory.java
new file mode 100644
index 0000000..a721885
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/DMaaPTransactionFactory.java
@@ -0,0 +1,44 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.transaction;
+/**
+ *
+ * @author author
+ *
+ * @param <K>
+ */
+public interface DMaaPTransactionFactory<K extends DMaaPTransactionObj> {
+
+ /**
+ *
+ * @param data
+ * @return
+ */
+ K makeNewTransactionObj ( String data );
+ /**
+ *
+ * @param id
+ * @return
+ */
+ K makeNewTransactionId ( String id );
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/DMaaPTransactionObj.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/DMaaPTransactionObj.java
new file mode 100644
index 0000000..1fa299d
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/DMaaPTransactionObj.java
@@ -0,0 +1,83 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.transaction;
+
+import org.json.JSONObject;
+/**
+ * This is an interface for DMaaP transactional logging object class.
+ * @author author
+ *
+ */
+public interface DMaaPTransactionObj {
+ /**
+ * This will get the transaction id
+ * @return id transactionId
+ */
+ String getId();
+ /**
+ * This will set the transaction id
+ * @param id transactionId
+ */
+ void setId(String id);
+ /**
+ * This will sync the transaction object mapping
+ * @return String or null
+ */
+ String serialize();
+ /**
+ * get the total message count once the publisher published
+ * @return long totalMessageCount
+ */
+ long getTotalMessageCount();
+ /**
+ * set the total message count once the publisher published
+ * @param totalMessageCount
+ */
+ void setTotalMessageCount(long totalMessageCount);
+ /**
+ * get the total Success Message Count once the publisher published
+ * @return getSuccessMessageCount
+ */
+ long getSuccessMessageCount();
+ /**
+ * set the total Success Message Count once the publisher published
+ * @param successMessageCount
+ */
+ void setSuccessMessageCount(long successMessageCount);
+ /**
+ * get the failure Message Count once the publisher published
+ * @return failureMessageCount
+ */
+ long getFailureMessageCount();
+ /**
+ * set the failure Message Count once the publisher published
+ * @param failureMessageCount
+ */
+ void setFailureMessageCount(long failureMessageCount);
+
+ /**
+ * wrapping the data into json object
+ * @return JSONObject
+ */
+ JSONObject asJsonObject();
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/DMaaPTransactionObjDB.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/DMaaPTransactionObjDB.java
new file mode 100644
index 0000000..a391842
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/DMaaPTransactionObjDB.java
@@ -0,0 +1,86 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.transaction;
+
+import java.util.Set;
+
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.security.NsaSecurityManagerException;
+
+
+/**
+ * Persistent storage for Transaction Object and secrets built over an abstract config db. Instances
+ * of this DB must support concurrent access.
+ * @author author
+ *
+ * @param <K> DMaaPTransactionObj
+ */
+public interface DMaaPTransactionObjDB <K extends DMaaPTransactionObj> {
+
+
+ /**
+ * Create a new Transaction Object. If one exists,
+ * @param id
+ * @return the new Transaction record
+ * @throws ConfigDbException
+ */
+ K createTransactionObj (String id) throws KeyExistsException, ConfigDbException;
+
+
+ /**
+ * An exception to signal a Transaction object already exists
+ * @author author
+ *
+ */
+ public static class KeyExistsException extends NsaSecurityManagerException
+ {
+ /**
+ * If the key exists
+ * @param key
+ */
+ public KeyExistsException ( String key ) { super ( "Transaction Object " + key + " exists" ); }
+ private static final long serialVersionUID = 1L;
+ }
+
+ /**
+ * Save a Transaction Object record. This must be used after changing auxiliary data on the record.
+ * Note that the transaction must exist (via createTransactionObj).
+ * @param transactionObj
+ * @throws ConfigDbException
+ */
+ void saveTransactionObj ( K transactionObj ) throws ConfigDbException;
+
+ /**
+ * Load an Transaction Object record based on the Transaction ID value
+ * @param transactionId
+ * @return a transaction record or null
+ * @throws ConfigDbException
+ */
+ K loadTransactionObj ( String transactionId ) throws ConfigDbException;
+
+ /**
+ * Load all Transaction objects.
+ * @return
+ * @throws ConfigDbException
+ */
+ Set<String> loadAllTransactionObjs () throws ConfigDbException;
+} \ No newline at end of file
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/TransactionObj.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/TransactionObj.java
new file mode 100644
index 0000000..e79bf01
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/TransactionObj.java
@@ -0,0 +1,202 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.transaction;
+
+import org.json.JSONObject;
+
+/**
+ * This is the class which will have the transaction enabled logging object
+ * details
+ *
+ * @author author
+ *
+ */
+public class TransactionObj implements DMaaPTransactionObj {
+
+ private String id;
+ private String createTime;
+ private long totalMessageCount;
+ private long successMessageCount;
+ private long failureMessageCount;
+ private JSONObject fData = new JSONObject();
+ private TrnRequest trnRequest;
+ private static final String kAuxData = "transaction";
+
+ /**
+ * Initializing constructor
+ * put the json data for transaction enabled logging
+ *
+ * @param data
+ */
+ public TransactionObj(JSONObject data) {
+ fData = data;
+
+ // check for required fields (these throw if not present)
+ getId();
+ getTotalMessageCount();
+ getSuccessMessageCount();
+ getFailureMessageCount();
+
+ // make sure we've got an aux data object
+ final JSONObject aux = fData.optJSONObject(kAuxData);
+ if (aux == null) {
+ fData.put(kAuxData, new JSONObject());
+ }
+ }
+
+ /**
+ * this constructor will have the details of transaction id,
+ * totalMessageCount successMessageCount, failureMessageCount to get the
+ * transaction object
+ *
+ * @param id
+ * @param totalMessageCount
+ * @param successMessageCount
+ * @param failureMessageCount
+ */
+ public TransactionObj(String id, long totalMessageCount, long successMessageCount, long failureMessageCount) {
+ this.id = id;
+ this.totalMessageCount = totalMessageCount;
+ this.successMessageCount = successMessageCount;
+ this.failureMessageCount = failureMessageCount;
+
+ }
+
+ /**
+ * The constructor passing only transaction id
+ *
+ * @param id
+ */
+ public TransactionObj(String id) {
+ this.id = id;
+ }
+
+ /**
+ * Wrapping the data into json object
+ *
+ * @return JSONObject
+ */
+ public JSONObject asJsonObject() {
+ final JSONObject full = new JSONObject(fData, JSONObject.getNames(fData));
+ return full;
+ }
+
+ /**
+ * To get the transaction id
+ */
+ public String getId() {
+ return id;
+ }
+
+ /**
+ * To set the transaction id
+ */
+ public void setId(String id) {
+ this.id = id;
+ }
+
+ /**
+ *
+ * @return
+ */
+ public String getCreateTime() {
+ return createTime;
+ }
+
+ /**
+ *
+ * @param createTime
+ */
+ public void setCreateTime(String createTime) {
+ this.createTime = createTime;
+ }
+
+ @Override
+ public String serialize() {
+ fData.put("transactionId", id);
+ fData.put("totalMessageCount", totalMessageCount);
+ fData.put("successMessageCount", successMessageCount);
+ fData.put("failureMessageCount", failureMessageCount);
+ return fData.toString();
+ }
+
+ public long getTotalMessageCount() {
+ return totalMessageCount;
+ }
+
+ public void setTotalMessageCount(long totalMessageCount) {
+ this.totalMessageCount = totalMessageCount;
+ }
+
+ public long getSuccessMessageCount() {
+ return successMessageCount;
+ }
+
+ public void setSuccessMessageCount(long successMessageCount) {
+ this.successMessageCount = successMessageCount;
+ }
+
+ public long getFailureMessageCount() {
+ return failureMessageCount;
+ }
+
+ /**
+ * @param failureMessageCount
+ */
+ public void setFailureMessageCount(long failureMessageCount) {
+ this.failureMessageCount = failureMessageCount;
+ }
+
+ /**
+ *
+ * @return JSOnObject fData
+ */
+ public JSONObject getfData() {
+ return fData;
+ }
+
+ /**
+ * set the json object into data
+ *
+ * @param fData
+ */
+ public void setfData(JSONObject fData) {
+ this.fData = fData;
+ }
+
+ /**
+ *
+ * @return
+ */
+ public TrnRequest getTrnRequest() {
+ return trnRequest;
+ }
+
+ /**
+ *
+ * @param trnRequest
+ */
+ public void setTrnRequest(TrnRequest trnRequest) {
+ this.trnRequest = trnRequest;
+ }
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/TrnRequest.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/TrnRequest.java
new file mode 100644
index 0000000..bb8fe37
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/TrnRequest.java
@@ -0,0 +1,183 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.transaction;
+
+/**
+ * Created for transaction enable logging details, this is nothing but a bean
+ * class.
+ *
+ * @author author
+ *
+ */
+public class TrnRequest {
+
+ private String id;
+ private String requestCreate;
+ private String requestHost;
+ private String serverHost;
+ private String messageProceed;
+ private String totalMessage;
+ private String clientType;
+ private String url;
+
+ /**
+ *
+ *
+ *
+ * @return id
+ *
+ */
+ public String getId() {
+ return id;
+ }
+
+ /**
+ *
+ *
+ * @param id
+ */
+ public void setId(String id) {
+ this.id = id;
+ }
+
+ /**
+ *
+ *
+ * @return requestCreate
+ */
+ public String getRequestCreate() {
+ return requestCreate;
+ }
+
+ /**
+ *
+ * @param requestCreate
+ */
+ public void setRequestCreate(String requestCreate) {
+ this.requestCreate = requestCreate;
+ }
+
+ /**
+ *
+ * @return
+ */
+ public String getRequestHost() {
+ return requestHost;
+ }
+
+ /**
+ *
+ * @param requestHost
+ */
+ public void setRequestHost(String requestHost) {
+ this.requestHost = requestHost;
+ }
+
+ /**
+ *
+ *
+ *
+ * @return
+ */
+ public String getServerHost() {
+ return serverHost;
+ }
+
+ /**
+ *
+ * @param serverHost
+ */
+ public void setServerHost(String serverHost) {
+ this.serverHost = serverHost;
+ }
+
+ /**
+ *
+ *
+ *
+ * @return
+ */
+ public String getMessageProceed() {
+ return messageProceed;
+ }
+
+ /**
+ *
+ * @param messageProceed
+ */
+ public void setMessageProceed(String messageProceed) {
+ this.messageProceed = messageProceed;
+ }
+
+ /**
+ *
+ * @return
+ */
+ public String getTotalMessage() {
+ return totalMessage;
+ }
+
+ /**
+ *
+ * @param totalMessage
+ *
+ *
+ */
+ public void setTotalMessage(String totalMessage) {
+ this.totalMessage = totalMessage;
+ }
+
+ /**
+ *
+ * @return
+ */
+ public String getClientType() {
+ return clientType;
+ }
+
+ /**
+ *
+ * @param clientType
+ *
+ */
+ public void setClientType(String clientType) {
+ this.clientType = clientType;
+ }
+
+ /**
+ *
+ * @return
+ */
+ public String getUrl() {
+ return url;
+ }
+
+ /**
+ *
+ * @param url
+ *
+ */
+ public void setUrl(String url) {
+ this.url = url;
+ }
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/impl/DMaaPSimpleTransactionFactory.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/impl/DMaaPSimpleTransactionFactory.java
new file mode 100644
index 0000000..bd18794
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/impl/DMaaPSimpleTransactionFactory.java
@@ -0,0 +1,61 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.transaction.impl;
+
+import org.json.JSONObject;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.transaction.DMaaPTransactionFactory;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.transaction.DMaaPTransactionObj;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.transaction.TransactionObj;
+
+/**
+ * A factory for the simple Transaction implementation
+ *
+ *
+ * @author author
+ *
+ */
+public class DMaaPSimpleTransactionFactory implements DMaaPTransactionFactory<DMaaPTransactionObj> {
+ /**
+ *
+ * @param data
+ * @return DMaaPTransactionObj
+ */
+ @Override
+ public DMaaPTransactionObj makeNewTransactionObj(String data) {
+ JSONObject jsonObject = new JSONObject(data);
+ return new TransactionObj(jsonObject.getString("transactionId"), jsonObject.getLong("totalMessageCount"),
+ jsonObject.getLong("successMessageCount"), jsonObject.getLong("failureMessageCount"));
+ }
+
+ /**
+ *
+ * @param id
+ * @return TransactionObj
+ *
+ *
+ */
+ @Override
+ public DMaaPTransactionObj makeNewTransactionId(String id) {
+ return new TransactionObj(id);
+ }
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/ConfigurationReader.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/ConfigurationReader.java
new file mode 100644
index 0000000..34951b8
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/ConfigurationReader.java
@@ -0,0 +1,497 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils;
+
+import javax.servlet.ServletException;
+
+import org.I0Itec.zkclient.ZkClient;
+import org.apache.curator.framework.CuratorFramework;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.ConsumerFactory;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.MetricsSet;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Publisher;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.kafka.KafkaConsumerCache.KafkaConsumerCacheException;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.memory.MemoryConsumerFactory;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.memory.MemoryMetaBroker;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.memory.MemoryQueue;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.memory.MemoryQueuePublisher;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPCambriaLimiter;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPZkConfigDb;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants.CambriaConstants;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metabroker.Broker;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security.DMaaPAuthenticator;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security.impl.DMaaPOriginalUebAuthenticator;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.Emailer;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.stereotype.Component;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.configs.confimpl.MemConfigDb;
+import com.att.nsa.drumlin.till.nv.rrNvReadable;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.invalidSettingValue;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
+import com.att.nsa.limits.Blacklist;
+import com.att.nsa.security.NsaAuthenticatorService;
+//import com.att.nsa.security.authenticators.OriginalUebAuthenticator;
+import com.att.nsa.security.db.BaseNsaApiDbImpl;
+import com.att.nsa.security.db.NsaApiDb;
+import com.att.nsa.security.db.NsaApiDb.KeyExistsException;
+import com.att.nsa.security.db.simple.NsaSimpleApiKey;
+import com.att.nsa.security.db.simple.NsaSimpleApiKeyFactory;
+
+/**
+ * Class is created for all the configuration for rest and service layer
+ * integration.
+ *
+ */
+@Component
+public class ConfigurationReader {
+
+// private rrNvReadable settings;
+ private Broker fMetaBroker;
+ private ConsumerFactory fConsumerFactory;
+ private Publisher fPublisher;
+ private MetricsSet fMetrics;
+ @Autowired
+ private DMaaPCambriaLimiter fRateLimiter;
+ private NsaApiDb<NsaSimpleApiKey> fApiKeyDb;
+ /* private DMaaPTransactionObjDB<DMaaPTransactionObj> fTranDb; */
+ private DMaaPAuthenticator<NsaSimpleApiKey> fSecurityManager;
+ private NsaAuthenticatorService<NsaSimpleApiKey> nsaSecurityManager;
+ private static CuratorFramework curator;
+ private ZkClient zk;
+ private DMaaPZkConfigDb fConfigDb;
+ private MemoryQueue q;
+ private MemoryMetaBroker mmb;
+ private Blacklist fIpBlackList;
+ private Emailer fEmailer;
+
+ private static final EELFLogger log = EELFManager.getInstance().getLogger(ConfigurationReader.class);
+ //private static final Logger log = Logger.getLogger(ConfigurationReader.class.toString());
+
+ /**
+ * constructor to initialize all the values
+ *
+ * @param settings
+ * @param fMetrics
+ * @param zk
+ * @param fConfigDb
+ * @param fPublisher
+ * @param curator
+ * @param fConsumerFactory
+ * @param fMetaBroker
+ * @param q
+ * @param mmb
+ * @param fApiKeyDb
+ * @param fSecurityManager
+ * @throws missingReqdSetting
+ * @throws invalidSettingValue
+ * @throws ServletException
+ * @throws KafkaConsumerCacheException
+ * @throws ConfigDbException
+ */
+ @Autowired
+ public ConfigurationReader(@Qualifier("propertyReader") rrNvReadable settings,
+ @Qualifier("dMaaPMetricsSet") MetricsSet fMetrics, @Qualifier("dMaaPZkClient") ZkClient zk,
+ @Qualifier("dMaaPZkConfigDb") DMaaPZkConfigDb fConfigDb, @Qualifier("kafkaPublisher") Publisher fPublisher,
+ @Qualifier("curator") CuratorFramework curator,
+ @Qualifier("dMaaPKafkaConsumerFactory") ConsumerFactory fConsumerFactory,
+ @Qualifier("dMaaPKafkaMetaBroker") Broker fMetaBroker, @Qualifier("q") MemoryQueue q,
+ @Qualifier("mmb") MemoryMetaBroker mmb, @Qualifier("dMaaPNsaApiDb") NsaApiDb<NsaSimpleApiKey> fApiKeyDb,
+ /*
+ * @Qualifier("dMaaPTranDb")
+ * DMaaPTransactionObjDB<DMaaPTransactionObj> fTranDb,
+ */
+ @Qualifier("dMaaPAuthenticatorImpl") DMaaPAuthenticator<NsaSimpleApiKey> fSecurityManager
+ )
+ throws missingReqdSetting, invalidSettingValue, ServletException, KafkaConsumerCacheException, ConfigDbException {
+ //this.settings = settings;
+ this.fMetrics = fMetrics;
+ this.zk = zk;
+ this.fConfigDb = fConfigDb;
+ this.fPublisher = fPublisher;
+ ConfigurationReader.curator = curator;
+ this.fConsumerFactory = fConsumerFactory;
+ this.fMetaBroker = fMetaBroker;
+ this.q = q;
+ this.mmb = mmb;
+ this.fApiKeyDb = fApiKeyDb;
+ /* this.fTranDb = fTranDb; */
+ this.fSecurityManager = fSecurityManager;
+
+ long allowedtimeSkewMs=600000L;
+ String strallowedTimeSkewM= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"authentication.allowedTimeSkewMs");
+ if(null!=strallowedTimeSkewM)allowedtimeSkewMs= Long.parseLong(strallowedTimeSkewM);
+
+ // boolean requireSecureChannel = true;
+ //String strrequireSecureChannel= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"aauthentication.requireSecureChannel");
+ //if(strrequireSecureChannel!=null)requireSecureChannel=Boolean.parseBoolean(strrequireSecureChannel);
+ //this.nsaSecurityManager = new NsaAuthenticatorService<NsaSimpleApiKey>(this.fApiKeyDb, settings.getLong("authentication.allowedTimeSkewMs", 600000L), settings.getBoolean("authentication.requireSecureChannel", true));
+ //this.nsaSecurityManager = new NsaAuthenticatorService<NsaSimpleApiKey>(this.fApiKeyDb, allowedtimeSkewMs, requireSecureChannel);
+
+ servletSetup();
+ }
+
+ protected void servletSetup()
+ throws rrNvReadable.missingReqdSetting, rrNvReadable.invalidSettingValue, ServletException, ConfigDbException {
+ try {
+
+ fMetrics.toJson();
+ fMetrics.setupCambriaSender();
+
+ // add the admin authenticator
+ // final String adminSecret = settings.getString ( CambriaConstants.kSetting_AdminSecret, null );
+ final String adminSecret = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kSetting_AdminSecret);
+ //adminSecret = "fe3cCompound";
+ if ( adminSecret != null && adminSecret.length () > 0 )
+ {
+ try
+ {
+
+ final NsaApiDb<NsaSimpleApiKey> adminDb = new BaseNsaApiDbImpl<NsaSimpleApiKey> ( new MemConfigDb(), new NsaSimpleApiKeyFactory() );
+ adminDb.createApiKey ( "admin", adminSecret );
+ //nsaSecurityManager.addAuthenticator ( new OriginalUebAuthenticator<NsaSimpleApiKey> ( adminDb, 10*60*1000 ) );
+ fSecurityManager.addAuthenticator ( new DMaaPOriginalUebAuthenticator<NsaSimpleApiKey> ( adminDb, 10*60*1000 ) );
+ }
+ catch ( KeyExistsException e )
+ {
+ throw new RuntimeException ( "This key can't exist in a fresh in-memory DB!", e );
+ }
+ }
+
+ // setup a backend
+ //final String type = settings.getString(CambriaConstants.kBrokerType, CambriaConstants.kBrokerType_Kafka);
+ String type = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kBrokerType);
+ if (type==null) type = CambriaConstants.kBrokerType_Kafka;
+ if (CambriaConstants.kBrokerType_Kafka.equalsIgnoreCase(type)) {
+ log.info("Broker Type is:" + CambriaConstants.kBrokerType_Kafka);
+
+ } else if (CambriaConstants.kBrokerType_Memory.equalsIgnoreCase(type)) {
+ log.info("Broker Type is:" + CambriaConstants.kBrokerType_Memory);
+
+ fPublisher = new MemoryQueuePublisher(q, mmb);
+ fMetaBroker = mmb;
+ fConsumerFactory = new MemoryConsumerFactory(q);
+ } else {
+ throw new IllegalArgumentException(
+ "Unrecognized type for " + CambriaConstants.kBrokerType + ": " + type + ".");
+ }
+
+ fIpBlackList = new Blacklist ( getfConfigDb(), getfConfigDb().parse ( "/ipBlacklist" ) );
+ this.fEmailer = new Emailer();
+
+ log.info("Broker Type is:" + type);
+
+ } catch (SecurityException e) {
+ throw new ServletException(e);
+ }
+ }
+
+ /**
+ * method returns metaBroker
+ *
+ * @return
+ */
+ public Broker getfMetaBroker() {
+ return fMetaBroker;
+ }
+
+ /**
+ * method to set the metaBroker
+ *
+ * @param fMetaBroker
+ */
+ public void setfMetaBroker(Broker fMetaBroker) {
+ this.fMetaBroker = fMetaBroker;
+ }
+
+ /**
+ * method to get ConsumerFactory Object
+ *
+ * @return
+ */
+ public ConsumerFactory getfConsumerFactory() {
+ return fConsumerFactory;
+ }
+
+ /**
+ * method to set the consumerfactory object
+ *
+ * @param fConsumerFactory
+ */
+ public void setfConsumerFactory(ConsumerFactory fConsumerFactory) {
+ this.fConsumerFactory = fConsumerFactory;
+ }
+
+ /**
+ * method to get Publisher object
+ *
+ * @return
+ */
+ public Publisher getfPublisher() {
+ return fPublisher;
+ }
+
+ /**
+ * method to set Publisher object
+ *
+ * @param fPublisher
+ */
+ public void setfPublisher(Publisher fPublisher) {
+ this.fPublisher = fPublisher;
+ }
+
+ /**
+ * method to get MetricsSet Object
+ *
+ * @return
+ */
+ public MetricsSet getfMetrics() {
+ return fMetrics;
+ }
+
+ /**
+ * method to set MetricsSet Object
+ *
+ * @param fMetrics
+ */
+ public void setfMetrics(MetricsSet fMetrics) {
+ this.fMetrics = fMetrics;
+ }
+
+ /**
+ * method to get DMaaPCambriaLimiter object
+ *
+ * @return
+ */
+ public DMaaPCambriaLimiter getfRateLimiter() {
+ return fRateLimiter;
+ }
+
+ /**
+ * method to set DMaaPCambriaLimiter object
+ *
+ * @param fRateLimiter
+ */
+ public void setfRateLimiter(DMaaPCambriaLimiter fRateLimiter) {
+ this.fRateLimiter = fRateLimiter;
+ }
+
+ /**
+ * Method to get DMaaPAuthenticator object
+ *
+ * @return
+ */
+ public DMaaPAuthenticator<NsaSimpleApiKey> getfSecurityManager() {
+ return fSecurityManager;
+ }
+
+ /**
+ * method to set DMaaPAuthenticator object
+ *
+ * @param fSecurityManager
+ */
+ public void setfSecurityManager(DMaaPAuthenticator<NsaSimpleApiKey> fSecurityManager) {
+ this.fSecurityManager = fSecurityManager;
+ }
+
+ /**
+ * method to get rrNvReadable object
+ *
+ * @return
+ */
+ /*public rrNvReadable getSettings() {
+ return settings;
+ }*/
+
+ /**
+ * method to set rrNvReadable object
+ *
+ * @param settings
+ */
+ /*public void setSettings(rrNvReadable settings) {
+ this.settings = settings;
+ }*/
+
+ /**
+ * method to get CuratorFramework object
+ *
+ * @return
+ */
+ public static CuratorFramework getCurator() {
+ return curator;
+ }
+
+ /**
+ * method to set CuratorFramework object
+ *
+ * @param curator
+ */
+ public static void setCurator(CuratorFramework curator) {
+ ConfigurationReader.curator = curator;
+ }
+
+ /**
+ * method to get ZkClient object
+ *
+ * @return
+ */
+ public ZkClient getZk() {
+ return zk;
+ }
+
+ /**
+ * method to set ZkClient object
+ *
+ * @param zk
+ */
+ public void setZk(ZkClient zk) {
+ this.zk = zk;
+ }
+
+ /**
+ * method to get DMaaPZkConfigDb object
+ *
+ * @return
+ */
+ public DMaaPZkConfigDb getfConfigDb() {
+ return fConfigDb;
+ }
+
+ /**
+ * method to set DMaaPZkConfigDb object
+ *
+ * @param fConfigDb
+ */
+ public void setfConfigDb(DMaaPZkConfigDb fConfigDb) {
+ this.fConfigDb = fConfigDb;
+ }
+
+ /**
+ * method to get MemoryQueue object
+ *
+ * @return
+ */
+ public MemoryQueue getQ() {
+ return q;
+ }
+
+ /**
+ * method to set MemoryQueue object
+ *
+ * @param q
+ */
+ public void setQ(MemoryQueue q) {
+ this.q = q;
+ }
+
+ /**
+ * method to get MemoryMetaBroker object
+ *
+ * @return
+ */
+ public MemoryMetaBroker getMmb() {
+ return mmb;
+ }
+
+ /**
+ * method to set MemoryMetaBroker object
+ *
+ * @param mmb
+ */
+ public void setMmb(MemoryMetaBroker mmb) {
+ this.mmb = mmb;
+ }
+
+ /**
+ * method to get NsaApiDb object
+ *
+ * @return
+ */
+ public NsaApiDb<NsaSimpleApiKey> getfApiKeyDb() {
+ return fApiKeyDb;
+ }
+
+ /**
+ * method to set NsaApiDb object
+ *
+ * @param fApiKeyDb
+ */
+ public void setfApiKeyDb(NsaApiDb<NsaSimpleApiKey> fApiKeyDb) {
+ this.fApiKeyDb = fApiKeyDb;
+ }
+
+ /*
+ * public DMaaPTransactionObjDB<DMaaPTransactionObj> getfTranDb() { return
+ * fTranDb; }
+ *
+ * public void setfTranDb(DMaaPTransactionObjDB<DMaaPTransactionObj>
+ * fTranDb) { this.fTranDb = fTranDb; }
+ */
+ /**
+ * method to get the zookeeper connection String
+ *
+ * @param settings
+ * @return
+ */
+ public static String getMainZookeeperConnectionString() {
+ //return settings.getString(CambriaConstants.kSetting_ZkConfigDbServers, CambriaConstants.kDefault_ZkConfigDbServers);
+
+ String typeVal = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kSetting_ZkConfigDbServers);
+ if (typeVal==null) typeVal=CambriaConstants.kDefault_ZkConfigDbServers;
+
+ return typeVal;
+ }
+
+ public static String getMainZookeeperConnectionSRoot(){
+ String strVal=com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kSetting_ZkConfigDbRoot);
+
+ if (null==strVal)
+ strVal=CambriaConstants.kDefault_ZkConfigDbRoot;
+
+ return strVal;
+ }
+
+ public Blacklist getfIpBlackList() {
+ return fIpBlackList;
+ }
+
+ public void setfIpBlackList(Blacklist fIpBlackList) {
+ this.fIpBlackList = fIpBlackList;
+ }
+
+ public NsaAuthenticatorService<NsaSimpleApiKey> getNsaSecurityManager() {
+ return nsaSecurityManager;
+ }
+
+ public void setNsaSecurityManager(NsaAuthenticatorService<NsaSimpleApiKey> nsaSecurityManager) {
+ this.nsaSecurityManager = nsaSecurityManager;
+ }
+
+ public Emailer getSystemEmailer()
+ {
+ return this.fEmailer;
+ }
+
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/DMaaPCuratorFactory.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/DMaaPCuratorFactory.java
new file mode 100644
index 0000000..8950ec8
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/DMaaPCuratorFactory.java
@@ -0,0 +1,69 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils;
+
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.CuratorFrameworkFactory;
+import org.apache.curator.retry.ExponentialBackoffRetry;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants.CambriaConstants;
+
+import com.att.nsa.drumlin.till.nv.rrNvReadable;
+
+/**
+ *
+ *
+ * @author author
+ *
+ *
+ */
+public class DMaaPCuratorFactory {
+ /**
+ *
+ * method provide CuratorFramework object
+ *
+ * @param settings
+ * @return
+ *
+ *
+ *
+ */
+ public static CuratorFramework getCurator(rrNvReadable settings) {
+ String Setting_ZkConfigDbServers =com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, CambriaConstants.kSetting_ZkConfigDbServers);
+
+ if(null==Setting_ZkConfigDbServers)
+ Setting_ZkConfigDbServers =CambriaConstants.kDefault_ZkConfigDbServers;
+
+ String strSetting_ZkSessionTimeoutMs = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, CambriaConstants.kSetting_ZkSessionTimeoutMs);
+ if (strSetting_ZkSessionTimeoutMs==null) strSetting_ZkSessionTimeoutMs = CambriaConstants.kDefault_ZkSessionTimeoutMs+"";
+ int Setting_ZkSessionTimeoutMs = Integer.parseInt(strSetting_ZkSessionTimeoutMs);
+
+ String str_ZkConnectionTimeoutMs = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, CambriaConstants.kSetting_ZkSessionTimeoutMs);
+ if (str_ZkConnectionTimeoutMs==null) str_ZkConnectionTimeoutMs = CambriaConstants.kDefault_ZkConnectionTimeoutMs+"";
+ int setting_ZkConnectionTimeoutMs = Integer.parseInt(str_ZkConnectionTimeoutMs);
+
+
+ CuratorFramework curator = CuratorFrameworkFactory.newClient(
+ Setting_ZkConfigDbServers,Setting_ZkSessionTimeoutMs,setting_ZkConnectionTimeoutMs
+ ,new ExponentialBackoffRetry(1000, 5));
+ return curator;
+ }
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/DMaaPResponseBuilder.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/DMaaPResponseBuilder.java
new file mode 100644
index 0000000..a24dd6b
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/DMaaPResponseBuilder.java
@@ -0,0 +1,358 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.PrintWriter;
+import java.io.Writer;
+
+import javax.servlet.http.HttpServletResponse;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
+
+/**
+ * class is used to create response object which is given to user
+ *
+ * @author author
+ *
+ */
+
+public class DMaaPResponseBuilder {
+
+ //private static Logger log = Logger.getLogger(DMaaPResponseBuilder.class);
+ private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPResponseBuilder.class);
+ protected static final int kBufferLength = 4096;
+
+ public static void setNoCacheHeadings(DMaaPContext ctx) {
+ HttpServletResponse response = ctx.getResponse();
+ response.addHeader("Cache-Control", "no-store, no-cache, must-revalidate");
+ response.addHeader("Pragma", "no-cache");
+ response.addHeader("Expires", "0");
+ }
+
+ /**
+ * static method is used to create response object associated with
+ * JSONObject
+ *
+ * @param ctx
+ * @param result
+ * @throws JSONException
+ * @throws IOException
+ */
+ public static void respondOk(DMaaPContext ctx, JSONObject result) throws JSONException, IOException {
+
+ respondOkWithStream(ctx, "application/json", new ByteArrayInputStream(result.toString(4).getBytes()));
+
+ }
+
+ /**
+ * method used to set staus to 204
+ *
+ * @param ctx
+ */
+ public static void respondOkNoContent(DMaaPContext ctx) {
+ try {
+ ctx.getResponse().setStatus(204);
+ } catch (Exception excp) {
+ log.error(excp.getMessage(), excp);
+ }
+ }
+
+ /**
+ * static method is used to create response object associated with html
+ *
+ * @param ctx
+ * @param html
+ */
+ public static void respondOkWithHtml(DMaaPContext ctx, String html) {
+ try {
+ respondOkWithStream(ctx, "text/html", new ByteArrayInputStream(html.toString().getBytes()));
+ } catch (Exception excp) {
+ log.error(excp.getMessage(), excp);
+ }
+ }
+
+ /**
+ * method used to create response object associated with InputStream
+ *
+ * @param ctx
+ * @param mediaType
+ * @param is
+ * @throws IOException
+ */
+ public static void respondOkWithStream(DMaaPContext ctx, String mediaType, final InputStream is)
+ throws IOException {
+ /*
+ * creates response object associated with streamwriter
+ */
+ respondOkWithStream(ctx, mediaType, new StreamWriter() {
+
+ public void write(OutputStream os) throws IOException {
+ copyStream(is, os);
+ }
+ });
+
+ }
+
+ /**
+ *
+ * @param ctx
+ * @param mediaType
+ * @param writer
+ * @throws IOException
+ */
+ public static void respondOkWithStream(DMaaPContext ctx, String mediaType, StreamWriter writer) throws IOException {
+
+ ctx.getResponse().setStatus(200);
+ OutputStream os = getStreamForBinaryResponse(ctx, mediaType);
+ writer.write(os);
+
+ }
+
+ /**
+ * static method to create error objects
+ *
+ * @param ctx
+ * @param errCode
+ * @param msg
+ */
+ public static void respondWithError(DMaaPContext ctx, int errCode, String msg) {
+ try {
+ ctx.getResponse().sendError(errCode, msg);
+ } catch (IOException excp) {
+ log.error(excp.getMessage(), excp);
+ }
+ }
+
+ /**
+ * method to create error objects
+ *
+ * @param ctx
+ * @param errCode
+ * @param body
+ */
+ public static void respondWithError(DMaaPContext ctx, int errCode, JSONObject body) {
+ try {
+ sendErrorAndBody(ctx, errCode, body.toString(4), "application/json");
+ } catch (Exception excp) {
+ log.error(excp.getMessage(), excp);
+ }
+ }
+
+ /**
+ * static method creates error object in JSON
+ *
+ * @param ctx
+ * @param errCode
+ * @param msg
+ */
+ public static void respondWithErrorInJson(DMaaPContext ctx, int errCode, String msg) {
+ try {
+ JSONObject o = new JSONObject();
+ o.put("status", errCode);
+ o.put("message", msg);
+ respondWithError(ctx, errCode, o);
+
+ } catch (Exception excp) {
+ log.error(excp.getMessage(), excp);
+ }
+ }
+
+ /**
+ * static method used to copy the stream with the help of another method
+ * copystream
+ *
+ * @param in
+ * @param out
+ * @throws IOException
+ */
+ public static void copyStream(InputStream in, OutputStream out) throws IOException {
+ copyStream(in, out, 4096);
+ }
+
+ /**
+ * static method to copy the streams
+ *
+ * @param in
+ * @param out
+ * @param bufferSize
+ * @throws IOException
+ */
+ public static void copyStream(InputStream in, OutputStream out, int bufferSize) throws IOException {
+ byte[] buffer = new byte[bufferSize];
+ int len;
+ while ((len = in.read(buffer)) != -1) {
+ out.write(buffer, 0, len);
+ }
+ out.close();
+ }
+
+ /**
+ * interface used to define write method for outputStream
+ */
+ public static abstract interface StreamWriter {
+ /**
+ * abstract method used to write the response
+ *
+ * @param paramOutputStream
+ * @throws IOException
+ */
+ public abstract void write(OutputStream paramOutputStream) throws IOException;
+ }
+
+ /**
+ * static method returns stream for binary response
+ *
+ * @param ctx
+ * @return
+ * @throws IOException
+ */
+ public static OutputStream getStreamForBinaryResponse(DMaaPContext ctx) throws IOException {
+ return getStreamForBinaryResponse(ctx, "application/octet-stream");
+ }
+
+ /**
+ * static method returns stream for binaryResponses
+ *
+ * @param ctx
+ * @param contentType
+ * @return
+ * @throws IOException
+ */
+ public static OutputStream getStreamForBinaryResponse(DMaaPContext ctx, String contentType) throws IOException {
+ ctx.getResponse().setContentType(contentType);
+
+ boolean fResponseEntityAllowed = (!(ctx.getRequest().getMethod().equalsIgnoreCase("HEAD")));
+
+ OutputStream os = null;
+ if (fResponseEntityAllowed) {
+ os = ctx.getResponse().getOutputStream();
+ } else {
+ os = new NullStream();
+ }
+ return os;
+ }
+
+ /**
+ *
+ * @author author
+ *
+ */
+ private static class NullStream extends OutputStream {
+ /**
+ * @param b
+ * integer
+ */
+ public void write(int b) {
+ }
+ }
+
+ private static class NullWriter extends Writer {
+ /**
+ * write method
+ * @param cbuf
+ * @param off
+ * @param len
+ */
+ public void write(char[] cbuf, int off, int len) {
+ }
+
+ /**
+ * flush method
+ */
+ public void flush() {
+ }
+
+ /**
+ * close method
+ */
+ public void close() {
+ }
+ }
+
+ /**
+ * sttaic method fetch stream for text
+ *
+ * @param ctx
+ * @param err
+ * @param content
+ * @param mimeType
+ */
+ public static void sendErrorAndBody(DMaaPContext ctx, int err, String content, String mimeType) {
+ try {
+ setStatus(ctx, err);
+ getStreamForTextResponse(ctx, mimeType).println(content);
+ } catch (IOException e) {
+ log.error(new StringBuilder().append("Error sending error response: ").append(e.getMessage()).toString(),
+ e);
+ }
+ }
+
+ /**
+ * method to set the code
+ *
+ * @param ctx
+ * @param code
+ */
+ public static void setStatus(DMaaPContext ctx, int code) {
+ ctx.getResponse().setStatus(code);
+ }
+
+ /**
+ * static method returns stream for text response
+ *
+ * @param ctx
+ * @return
+ * @throws IOException
+ */
+ public static PrintWriter getStreamForTextResponse(DMaaPContext ctx) throws IOException {
+ return getStreamForTextResponse(ctx, "text/html");
+ }
+
+ /**
+ * static method returns stream for text response
+ *
+ * @param ctx
+ * @param contentType
+ * @return
+ * @throws IOException
+ */
+ public static PrintWriter getStreamForTextResponse(DMaaPContext ctx, String contentType) throws IOException {
+ ctx.getResponse().setContentType(contentType);
+
+ PrintWriter pw = null;
+ boolean fResponseEntityAllowed = (!(ctx.getRequest().getMethod().equalsIgnoreCase("HEAD")));
+
+ if (fResponseEntityAllowed) {
+ pw = ctx.getResponse().getWriter();
+ } else {
+ pw = new PrintWriter(new NullWriter());
+ }
+ return pw;
+ }
+} \ No newline at end of file
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/Emailer.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/Emailer.java
new file mode 100644
index 0000000..3f87d59
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/Emailer.java
@@ -0,0 +1,215 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils;
+
+import java.io.IOException;
+import java.util.Properties;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+
+import javax.mail.BodyPart;
+import javax.mail.Message;
+import javax.mail.Multipart;
+import javax.mail.PasswordAuthentication;
+import javax.mail.Session;
+import javax.mail.Transport;
+import javax.mail.internet.InternetAddress;
+import javax.mail.internet.MimeBodyPart;
+import javax.mail.internet.MimeMessage;
+import javax.mail.internet.MimeMultipart;
+
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants.CambriaConstants;
+
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.att.nsa.drumlin.till.nv.rrNvReadable;
+
+/**
+ * Send an email from a message.
+ *
+ * @author author
+ */
+public class Emailer
+{
+ public static final String kField_To = "to";
+ public static final String kField_Subject = "subject";
+ public static final String kField_Message = "message";
+
+ public Emailer()
+ {
+ fExec = Executors.newCachedThreadPool ();
+ // fSettings = settings;
+ }
+
+ public void send ( String to, String subj, String body ) throws IOException
+ {
+ final String[] addrs = to.split ( "," );
+
+ if ( to.length () > 0 )
+ {
+ final MailTask mt = new MailTask ( addrs, subj, body );
+ fExec.submit ( mt );
+ }
+ else
+ {
+ log.warn ( "At least one address is required." );
+ }
+ }
+
+ public void close ()
+ {
+ fExec.shutdown ();
+ }
+
+ private final ExecutorService fExec;
+ //private final rrNvReadable fSettings;
+
+ //private static final Logger log = LoggerFactory.getLogger ( Emailer.class );
+
+ private static final EELFLogger log = EELFManager.getInstance().getLogger(Emailer.class);
+
+ public static final String kSetting_MailAuthUser = "mailLogin";
+ public static final String kSetting_MailAuthPwd = "mailPassword";
+ public static final String kSetting_MailFromEmail = "mailFromEmail";
+ public static final String kSetting_MailFromName = "mailFromName";
+ public static final String kSetting_SmtpServer = "mailSmtpServer";
+ public static final String kSetting_SmtpServerPort = "mailSmtpServerPort";
+ public static final String kSetting_SmtpServerSsl = "mailSmtpServerSsl";
+ public static final String kSetting_SmtpServerUseAuth = "mailSmtpServerUseAuth";
+
+ private class MailTask implements Runnable
+ {
+ public MailTask ( String[] to, String subject, String msgBody )
+ {
+ fToAddrs = to;
+ fSubject = subject;
+ fBody = msgBody;
+ }
+
+ private String getSetting ( String settingKey, String defval )
+ {
+ //return fSettings.getString ( settingKey, defval );
+ String strSet = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,settingKey);
+ if(strSet==null)strSet=defval;
+ return strSet;
+ }
+
+ // we need to get setting values from the evaluator but also the channel config
+ private void makeSetting ( Properties props, String propKey, String settingKey, String defval )
+ {
+ props.put ( propKey, getSetting ( settingKey, defval ) );
+ }
+
+ private void makeSetting ( Properties props, String propKey, String settingKey, int defval )
+ {
+ makeSetting ( props, propKey, settingKey, "" + defval );
+ }
+
+ private void makeSetting ( Properties props, String propKey, String settingKey, boolean defval )
+ {
+ makeSetting ( props, propKey, settingKey, "" + defval );
+ }
+
+ @Override
+ public void run ()
+ {
+ final StringBuffer tag = new StringBuffer ();
+ final StringBuffer addrList = new StringBuffer ();
+ tag.append ( "(" );
+ for ( String to : fToAddrs )
+ {
+ if ( addrList.length () > 0 )
+ {
+ addrList.append ( ", " );
+ }
+ addrList.append ( to );
+ }
+ tag.append ( addrList.toString () );
+ tag.append ( ") \"" );
+ tag.append ( fSubject );
+ tag.append ( "\"" );
+
+ log.info ( "sending mail to " + tag );
+
+ try
+ {
+ final Properties prop = new Properties ();
+ makeSetting ( prop, "mail.smtp.port", kSetting_SmtpServerPort, 587 );
+ prop.put ( "mail.smtp.socketFactory.fallback", "false" );
+ prop.put ( "mail.smtp.quitwait", "false" );
+ makeSetting ( prop, "mail.smtp.host", kSetting_SmtpServer, "smtp.it.att.com" );
+ makeSetting ( prop, "mail.smtp.auth", kSetting_SmtpServerUseAuth, true );
+ makeSetting ( prop, "mail.smtp.starttls.enable", kSetting_SmtpServerSsl, true );
+
+ final String un = getSetting ( kSetting_MailAuthUser, "" );
+ final String pw = getSetting ( kSetting_MailAuthPwd, "" );
+ final Session session = Session.getInstance ( prop,
+ new javax.mail.Authenticator()
+ {
+ @Override
+ protected PasswordAuthentication getPasswordAuthentication()
+ {
+ return new PasswordAuthentication ( un, pw );
+ }
+ }
+ );
+
+ final Message msg = new MimeMessage ( session );
+
+ final InternetAddress from = new InternetAddress (
+ getSetting ( kSetting_MailFromEmail, "team@sa2020.it.att.com" ),
+ getSetting ( kSetting_MailFromName, "The GFP/SA2020 Team" ) );
+ msg.setFrom ( from );
+ msg.setReplyTo ( new InternetAddress[] { from } );
+ msg.setSubject ( fSubject );
+
+ for ( String toAddr : fToAddrs )
+ {
+ final InternetAddress to = new InternetAddress ( toAddr );
+ msg.addRecipient ( Message.RecipientType.TO, to );
+ }
+
+ final Multipart multipart = new MimeMultipart ( "related" );
+ final BodyPart htmlPart = new MimeBodyPart ();
+ htmlPart.setContent ( fBody, "text/plain" );
+ multipart.addBodyPart ( htmlPart );
+ msg.setContent ( multipart );
+
+ Transport.send ( msg );
+
+ log.info ( "mailing " + tag + " off without error" );
+ }
+ catch ( Exception e )
+ {
+ log.warn ( "Exception caught for " + tag, e );
+ }
+ }
+
+ private final String[] fToAddrs;
+ private final String fSubject;
+ private final String fBody;
+ }
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/PropertyReader.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/PropertyReader.java
new file mode 100644
index 0000000..a3b65e1
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/PropertyReader.java
@@ -0,0 +1,133 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils;
+
+import java.io.File;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants.CambriaConstants;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.att.nsa.drumlin.till.nv.impl.nvPropertiesFile;
+import com.att.nsa.drumlin.till.nv.impl.nvReadableStack;
+import com.att.nsa.drumlin.till.nv.impl.nvReadableTable;
+
+/**
+ *
+ * @author
+ *
+ *
+ */
+public class PropertyReader extends nvReadableStack {
+ /**
+ *
+ * initializing logger
+ *
+ */
+ //private static final Logger LOGGER = Logger.getLogger(PropertyReader.class);
+ private static final EELFLogger log = EELFManager.getInstance().getLogger(PropertyReader.class);
+// private static final String MSGRTR_PROPERTIES_FILE = "msgRtrApi.properties";
+
+ /**
+ * constructor initialization
+ *
+ * @throws loadException
+ *
+ */
+ public PropertyReader() throws loadException {
+ /* Map<String, String> argMap = new HashMap<String, String>();
+ final String config = getSetting(argMap, CambriaConstants.kConfig, MSGRTR_PROPERTIES_FILE);
+ final URL settingStream = findStream(config, ConfigurationReader.class);
+ push(new nvPropertiesFile(settingStream));
+ push(new nvReadableTable(argMap));*/
+ }
+
+ /**
+ *
+ *
+ * @param argMap
+ * @param key
+ * @param defaultValue
+ * @return
+ *
+ */
+ @SuppressWarnings("unused")
+ private static String getSetting(Map<String, String> argMap, final String key, final String defaultValue) {
+ String val = (String) argMap.get(key);
+ if (null == val) {
+ return defaultValue;
+ }
+ return val;
+ }
+
+ /**
+ *
+ * @param resourceName
+ * @param clazz
+ * @return
+ * @exception MalformedURLException
+ *
+ */
+ /*public static URL findStream(final String resourceName, Class<?> clazz) {
+ try {
+ File file = new File(resourceName);
+
+ if (file.isAbsolute()) {
+ return file.toURI().toURL();
+ }
+
+ String filesRoot = System.getProperty("RRWT_FILES", null);
+
+ if (null != filesRoot) {
+
+ String fullPath = filesRoot + "/" + resourceName;
+
+ LOGGER.debug("Looking for [" + fullPath + "].");
+
+ file = new File(fullPath);
+ if (file.exists()) {
+ return file.toURI().toURL();
+ }
+ }
+
+ URL res = clazz.getClassLoader().getResource(resourceName);
+
+ if (null != res) {
+ return res;
+ }
+
+ res = ClassLoader.getSystemResource(resourceName);
+
+ if (null != res) {
+ return res;
+ }
+ } catch (MalformedURLException e) {
+ LOGGER.error("Unexpected failure to convert a local filename into a URL: " + e.getMessage(), e);
+ }
+ return null;
+ }
+*/
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/Utils.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/Utils.java
new file mode 100644
index 0000000..300cf86
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/Utils.java
@@ -0,0 +1,145 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils;
+
+import java.text.DecimalFormat;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.Enumeration;
+import java.util.LinkedList;
+import java.util.List;
+
+import javax.servlet.http.HttpServletRequest;
+
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
+/**
+ * This is an utility class for various operations for formatting
+ * @author author
+ *
+ */
+public class Utils {
+
+ private static final String DATE_FORMAT = "dd-MM-yyyy::hh:mm:ss:SSS";
+ public static final String CAMBRIA_AUTH_HEADER = "X-CambriaAuth";
+ private static final String BATCH_ID_FORMAT = "000000";
+
+ private Utils() {
+ super();
+ }
+
+ /**
+ * Formatting the date
+ * @param date
+ * @return date or null
+ */
+ public static String getFormattedDate(Date date) {
+ SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT);
+ if (null != date){
+ return sdf.format(date);
+ }
+ return null;
+ }
+ /**
+ * to get the details of User Api Key
+ * @param request
+ * @return authkey or null
+ */
+ public static String getUserApiKey(HttpServletRequest request) {
+ final String auth = request.getHeader(CAMBRIA_AUTH_HEADER);
+ if (null != auth) {
+ final String[] splittedAuthKey = auth.split(":");
+ return splittedAuthKey[0];
+ }else if (null!=request.getHeader("Authorization")){
+ /**
+ * AAF implementation enhancement
+ */
+ String user= request.getUserPrincipal().getName().toString();
+ return user.substring(0, user.lastIndexOf("@"));
+ }
+ return null;
+ }
+ /**
+ * to format the batch sequence id
+ * @param batchId
+ * @return batchId
+ */
+ public static String getFromattedBatchSequenceId(Long batchId) {
+ DecimalFormat format = new DecimalFormat(BATCH_ID_FORMAT);
+ return format.format(batchId);
+ }
+
+ /**
+ * to get the message length in bytes
+ * @param message
+ * @return bytes or 0
+ */
+ public static long messageLengthInBytes(String message) {
+ if (null != message) {
+ return message.getBytes().length;
+ }
+ return 0;
+ }
+ /**
+ * To get transaction id details
+ * @param transactionId
+ * @return transactionId or null
+ */
+ public static String getResponseTransactionId(String transactionId) {
+ if (null != transactionId && !transactionId.isEmpty()) {
+ return transactionId.substring(0, transactionId.lastIndexOf("::"));
+ }
+ return null;
+ }
+
+ /**
+ * get the thread sleep time
+ * @param ratePerMinute
+ * @return ratePerMinute or 0
+ */
+ public static long getSleepMsForRate ( double ratePerMinute )
+ {
+ if ( ratePerMinute <= 0.0 ) return 0;
+ return Math.max ( 1000, Math.round ( 60 * 1000 / ratePerMinute ) );
+ }
+
+ public static String getRemoteAddress(DMaaPContext ctx)
+ {
+ String reqAddr = ctx.getRequest().getRemoteAddr();
+ String fwdHeader = getFirstHeader("X-Forwarded-For",ctx);
+ return ((fwdHeader != null) ? fwdHeader : reqAddr);
+ }
+ public static String getFirstHeader(String h,DMaaPContext ctx)
+ {
+ List l = getHeader(h,ctx);
+ return ((l.size() > 0) ? (String)l.iterator().next() : null);
+ }
+ public static List<String> getHeader(String h,DMaaPContext ctx)
+ {
+ LinkedList list = new LinkedList();
+ Enumeration e = ctx.getRequest().getHeaders(h);
+ while (e.hasMoreElements())
+ {
+ list.add(e.nextElement().toString());
+ }
+ return list;
+ }
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/filter/ContentLengthFilter.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/filter/ContentLengthFilter.java
new file mode 100644
index 0000000..7f90e80
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/filter/ContentLengthFilter.java
@@ -0,0 +1,133 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.filter;
+
+import java.io.IOException;
+
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
+
+import org.apache.http.HttpStatus;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.json.JSONObject;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception.DMaaPErrorMessages;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception.DMaaPResponseCode;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception.ErrorResponse;
+import org.springframework.context.ApplicationContext;
+import org.springframework.web.context.support.WebApplicationContextUtils;
+
+/**
+ * Servlet Filter implementation class ContentLengthFilter
+ */
+public class ContentLengthFilter implements Filter {
+
+ private DefaultLength defaultLength;
+
+ private FilterConfig filterConfig = null;
+ DMaaPErrorMessages errorMessages = null;
+ //private Logger log = Logger.getLogger(ContentLengthFilter.class.toString());
+ private static final EELFLogger log = EELFManager.getInstance().getLogger(ContentLengthFilter.class);
+ /**
+ * Default constructor.
+ */
+
+ public ContentLengthFilter() {
+ // TODO Auto-generated constructor stub
+ }
+
+ /**
+ * @see Filter#destroy()
+ */
+ public void destroy() {
+ // TODO Auto-generated method stub
+ }
+
+ /**
+ * @see Filter#doFilter(ServletRequest, ServletResponse, FilterChain)
+ */
+ public void doFilter(ServletRequest req, ServletResponse res, FilterChain chain) throws IOException,
+ ServletException {
+ // TODO Auto-generated method stub
+ // place your code here
+ log.info("inside servlet do filter content length checking before pub/sub");
+ HttpServletRequest request = (HttpServletRequest) req;
+ JSONObject jsonObj = null;
+ int requestLength = 0;
+ try {
+ // retrieving content length from message header
+
+ if (null != request.getHeader("Content-Length")) {
+ requestLength = Integer.parseInt(request.getHeader("Content-Length"));
+ }
+ // retrieving encoding from message header
+ String transferEncoding = request.getHeader("Transfer-Encoding");
+ // checking for no encoding, chunked and requestLength greater then
+ // default length
+ if (null != transferEncoding && !(transferEncoding.contains("chunked"))
+ && (requestLength > Integer.parseInt(defaultLength.getDefaultLength()))) {
+ jsonObj = new JSONObject().append("defaultlength", defaultLength)
+ .append("requestlength", requestLength);
+ log.error("message length is greater than default");
+ throw new CambriaApiException(jsonObj);
+ } else if (null == transferEncoding && (requestLength > Integer.parseInt(defaultLength.getDefaultLength()))) {
+ jsonObj = new JSONObject().append("defaultlength", defaultLength.getDefaultLength()).append(
+ "requestlength", requestLength);
+ log.error("Request message is not chunked or request length is greater than default length");
+ throw new CambriaApiException(jsonObj);
+ } else {
+ chain.doFilter(req, res);
+ }
+ } catch (CambriaApiException | NumberFormatException e) {
+ log.error("message size is greater then default");
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_EXPECTATION_FAILED,
+ DMaaPResponseCode.MSG_SIZE_EXCEEDS_MSG_LIMIT.getResponseCode(), errorMessages.getMsgSizeExceeds()
+ + jsonObj.toString());
+ log.info(errRes.toString());
+ // throw new CambriaApiException(errRes);
+ }
+
+ }
+
+ /**
+ * @see Filter#init(FilterConfig)
+ */
+ public void init(FilterConfig fConfig) throws ServletException {
+ // TODO Auto-generated method stub
+ this.filterConfig = fConfig;
+ log.info("Filter Content Length Initialize");
+ ApplicationContext ctx = WebApplicationContextUtils.getRequiredWebApplicationContext(fConfig
+ .getServletContext());
+ DefaultLength defLength = (DefaultLength) ctx.getBean("defLength");
+ DMaaPErrorMessages errorMessages = (DMaaPErrorMessages) ctx.getBean("DMaaPErrorMessages");
+ this.errorMessages = errorMessages;
+ this.defaultLength = defLength;
+
+ }
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/filter/DefaultLength.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/filter/DefaultLength.java
new file mode 100644
index 0000000..9fe91cf
--- /dev/null
+++ b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/filter/DefaultLength.java
@@ -0,0 +1,37 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.filter;
+
+
+public class DefaultLength {
+
+ String defaultLength;
+
+ public String getDefaultLength() {
+ return defaultLength;
+ }
+
+ public void setDefaultLength(String defaultLength) {
+ this.defaultLength = defaultLength;
+ }
+
+}
diff --git a/src/main/resources/DMaaPUrl.properties b/src/main/resources/DMaaPUrl.properties
new file mode 100644
index 0000000..8c90912
--- /dev/null
+++ b/src/main/resources/DMaaPUrl.properties
@@ -0,0 +1,39 @@
+###############################################################################
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+###############################################################################
+#dev1 server
+#url=http://hltd435.hydc.sbc.com:8080/DMaaP/dmaaprest
+
+#dev2 server
+#url=http://hltd436.hydc.sbc.com:8080/DMaaP/dmaaprest/
+#url=http://hltd436.hydc.sbc.com:8181/DMaaP/dmaaprest/
+
+#localhost
+#url=http://hltd435.hydc.sbc.com:8089/DMaaP/dmaaprest/
+url=http://hltd436.hydc.sbc.com:8080/DMaaP/dmaaprest/
+#cloud
+#url=http://127.0.0.1:8090/DMaaP/dmaaprest/
+
+#X-Cambria-Date
+date=2015-11-23T8:56:19-0700
+
+# topic
+topicName=org.onap.dmaap.messagerouter.msgrtr.app.dmaap.mr.sharjeel \ No newline at end of file
diff --git a/src/main/resources/cambriaApiVersion.properties b/src/main/resources/cambriaApiVersion.properties
new file mode 100644
index 0000000..4521526
--- /dev/null
+++ b/src/main/resources/cambriaApiVersion.properties
@@ -0,0 +1,23 @@
+###############################################################################
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+###############################################################################
+
+cambriaApiVersion=${project.version}
diff --git a/src/main/resources/dme2testcase.properties b/src/main/resources/dme2testcase.properties
new file mode 100644
index 0000000..6f358a3
--- /dev/null
+++ b/src/main/resources/dme2testcase.properties
@@ -0,0 +1,85 @@
+###############################################################################
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+###############################################################################
+Version =1.0
+Environment =TEST
+Latitude =37.66
+Longitude =-122.096839
+ServiceName =dmaap-v1.dev.dmaap.dt.saat.acsi.att.com/apiKeys
+Partner =MR1
+
+#producerConsumer
+SubContextPathproducer =/events/
+SubContextPathConsumer=/events/
+group=group
+id=rk229m@csp.att.com
+
+#filter
+SubContextPathConsumerFilter=/events/
+filterType=filter={"class":"Equals", "field":"email", "value":"ai039a@att.com"}
+
+#topics
+subContextPathGetAllTopic=/topics
+subContextPathGetOneTopic=/topics/
+SubContextPathGetPublisher=/topics/org.onap.dmaap.messagerouter.msgrtr.app.dmaap.mr.sharjeel/producers
+SubContextPathGetPermitPublisher=/topics/org.onap.dmaap.messagerouter.msgrtr.app.dmaap.mr.sharjeel/producers/rk229m@csp.att.com
+SubContextPathGetConsumer=/topics/org.onap.dmaap.messagerouter.msgrtr.app.dmaap.mr.sharjeel/consumers
+SubContextPathCreateTopic=/topics/create
+SubContextPathGetPermitConsumer=/topics/org.onap.dmaap.messagerouter.msgrtr.app.dmaap.mr.sharjeel/consumers/rk229m@att.com
+newTopic=org.onap.dmaap.messagerouter.msgrtr.dmaap.mr.junittestingtopic
+topicDescription=new topic creation
+partition=1
+replication=1
+txenabled=true
+deleteTopic=org.onap.dmaap.messagerouter.msgrtr.dmaap.mr.deleteTopic
+
+
+#Admin
+SubContextPathGetAdminConsumerCache=/consumerCache
+SubContextPathDropAdminConsumerCache=/dropConsumerCache
+
+#Metrics
+SubContextPathGetMetrics=/metrics
+SubContextPathGetMetricsByName=/metrics/startTime
+
+#apikey
+SubContextPathGetApiKeys=/apiKeys
+SubContextPathGetCreateKeys=/create
+SubContextPathUpdateKeys=/apiKeys/
+SubContextPathDeleteteKeys=/apiKeys/
+SubContextPathGetOneKey=/apiKeys/
+
+Protocol =http
+
+
+#methodType
+MethodTypePost =POST
+MethodTypeGet=GET
+MethodTypePut=PUT
+MethodTypeDelete=DELETE
+message ={"id": "example@att.com"}
+
+user=<user_id>
+password=<password>
+
+
+contenttype=application/json
+contenttypejson=application/json \ No newline at end of file
diff --git a/src/main/resources/endpoint.properties b/src/main/resources/endpoint.properties
new file mode 100644
index 0000000..f1ab131
--- /dev/null
+++ b/src/main/resources/endpoint.properties
@@ -0,0 +1,31 @@
+###############################################################################
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+###############################################################################
+Latitude =37.66
+Longitude =-122.096839
+Version =1.0.0
+ServiceName =org.onap.dmaap.messagerouter.msgrtr.acsi.saat.dt.dmaap.dev.demo1
+Environment =DEV
+RouteOffer =LA
+HostName =hltd436.hydc.sbc.com
+Port =8080
+ContextPath =/DMaaP/dmaaprest
+Protocol =http \ No newline at end of file
diff --git a/src/main/resources/images/attLogo.gif b/src/main/resources/images/attLogo.gif
new file mode 100644
index 0000000..10f184c
--- /dev/null
+++ b/src/main/resources/images/attLogo.gif
Binary files differ
diff --git a/src/main/resources/images/att_vt_1cp_grd_rev.gif b/src/main/resources/images/att_vt_1cp_grd_rev.gif
new file mode 100644
index 0000000..034515c
--- /dev/null
+++ b/src/main/resources/images/att_vt_1cp_grd_rev.gif
Binary files differ
diff --git a/src/main/resources/routes.conf b/src/main/resources/routes.conf
new file mode 100644
index 0000000..14c4f56
--- /dev/null
+++ b/src/main/resources/routes.conf
@@ -0,0 +1,106 @@
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.endpoints
+
+#
+# We need to deprecate the original non-versioned paths and use /v1/ for them.
+# Non-versioned paths will be supported "permanently."
+#
+
+#
+# metrics
+#
+GET /metrics CambriaMetrics.get
+GET /metrics/{metricName} CambriaMetrics.getMetricByName
+
+GET /v1/metrics CambriaMetrics.get
+GET /v1/metrics/{metricName} CambriaMetrics.getMetricByName
+
+#
+# get and post events
+#
+GET /events/{topic}/{consumerGroup}/{clientId} CambriaEvents.getEvents
+POST /events/{topic} CambriaEvents.pushEvents
+POST /events/{topic}/{partition} CambriaEvents.pushEvents
+
+GET /v1/events/{topic}/{consumerGroup}/{clientId} CambriaEvents.getEvents
+POST /v1/events/{topic} CambriaEvents.pushEvents
+POST /v1/events/{topic}/{partition} CambriaEvents.pushEvents
+
+
+#
+# api keys
+#
+GET /apiKeys CambriaApiKeys.getAllApiKeys
+POST /apiKeys/create CambriaApiKeys.createApiKey
+GET /apiKeys/{apiKey} CambriaApiKeys.getApiKey
+PATCH /apiKeys/{apiKey} CambriaApiKeys.updateApiKey
+DELETE /apiKeys/{apiKey} CambriaApiKeys.deleteApiKey
+
+GET /v1/apiKeys CambriaApiKeys.getAllApiKeys
+POST /v1/apiKeys/create CambriaApiKeys.createApiKey
+GET /v1/apiKeys/{apiKey} CambriaApiKeys.getApiKey
+PATCH /v1/apiKeys/{apiKey} CambriaApiKeys.updateApiKey
+DELETE /v1/apiKeys/{apiKey} CambriaApiKeys.deleteApiKey
+
+#
+# topics
+#
+POST /topics/create CambriaTopics.createTopic
+GET /topics CambriaTopics.getTopics
+GET /topics/{topicName} CambriaTopics.getTopic
+DELETE /topics/{topicName} CambriaTopics.deleteTopic
+
+POST /v1/topics/create CambriaTopics.createTopic
+GET /v1/topics CambriaTopics.getTopics
+GET /v1/topics/{topicName} CambriaTopics.getTopic
+DELETE /v1/topics/{topicName} CambriaTopics.deleteTopic
+
+#
+# topic permissions
+#
+GET /topics/{topicName}/producers CambriaTopics.getPublishersByTopicName
+PUT /topics/{topicName}/producers/{producerId} CambriaTopics.permitPublisherForTopic
+DELETE /topics/{topicName}/producers/{producerId} CambriaTopics.denyPublisherForTopic
+
+GET /topics/{topicName}/consumers CambriaTopics.getConsumersByTopicName
+PUT /topics/{topicName}/consumers/{consumerId} CambriaTopics.permitConsumerForTopic
+DELETE /topics/{topicName}/consumers/{consumerId} CambriaTopics.denyConsumerForTopic
+
+GET /v1/topics/{topicName}/producers CambriaTopics.getPublishersByTopicName
+PUT /v1/topics/{topicName}/producers/{producerId} CambriaTopics.permitPublisherForTopic
+DELETE /v1/topics/{topicName}/producers/{producerId} CambriaTopics.denyPublisherForTopic
+
+GET /v1/topics/{topicName}/consumers CambriaTopics.getConsumersByTopicName
+PUT /v1/topics/{topicName}/consumers/{consumerId} CambriaTopics.permitConsumerForTopic
+DELETE /v1/topics/{topicName}/consumers/{consumerId} CambriaTopics.denyConsumerForTopic
+
+#
+# Admin
+#
+GET /admin/consumerCache CambriaAdmin.showConsumerCache
+POST /admin/dropConsumerCache CambriaAdmin.dropConsumerCache
+
+GET /v1/admin/consumerCache CambriaAdmin.showConsumerCache
+POST /v1/admin/dropConsumerCache CambriaAdmin.dropConsumerCache
+
+###############################################################################
+#
+# UI routes don't need to be versioned
+#
+
+#
+# UI
+#
+GET / CambriaUi.hello
+GET /ui/apikeys CambriaUi.getApiKeysTable
+GET /ui/apikeys/{apiKey} CambriaUi.getApiKey
+GET /ui/topics CambriaUi.getTopicsTable
+GET /ui/topics/{topic} CambriaUi.getTopic
+
+
+# typical static file paths
+GET /css/ staticDir:css
+GET /js/ staticDir:js
+GET /images/ staticDir:images
+GET /font/ staticDir:font
+GET /favicon.ico staticFile:images/attLogo.gif
+GET /font-awesome/ staticDir:font-awesome
diff --git a/src/main/resources/templates/hello.html b/src/main/resources/templates/hello.html
new file mode 100644
index 0000000..69a65ab
--- /dev/null
+++ b/src/main/resources/templates/hello.html
@@ -0,0 +1,9 @@
+#set($tab="")
+#parse("header.html")
+
+ <h1>Cambria API</h1>
+ <p>This is a Cambria API server, part of the Universal Event Broker service, a general purpose,
+ high-throughput pub/sub event routing system.</p>
+ <p>Please see <a href="http://sa2020.it.att.com:8888/sw/cambria/intro">the Cambria project</a> information page.
+
+#parse("footer.html")
diff --git a/src/main/scripts/cambria.sh b/src/main/scripts/cambria.sh
new file mode 100644
index 0000000..f74aa7b
--- /dev/null
+++ b/src/main/scripts/cambria.sh
@@ -0,0 +1,49 @@
+#!/bin/sh
+#*******************************************************************************
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+#*******************************************************************************
+
+# switched this from CAMBRIA_API_HOME, which should be declared in the env.
+# harmless to overwrite it here, but it's confusing to do so.
+BASE_DIR=`dirname "$0"`/..
+
+# use JAVA_HOME if provided
+if [ -n "${CAMBRIA_JAVA_HOME}" ]; then
+ JAVA=${CAMBRIA_JAVA_HOME}/bin/java
+elif [ -n "${JAVA_HOME}" ]; then
+ JAVA=${JAVA_HOME}/bin/java
+else
+ JAVA=java
+fi
+
+# use the logs dir set in environment, or the installation's logs dir if not set
+if [ -z "$CAMBRIA_LOGS_HOME" ]; then
+ CAMBRIA_LOGS_HOME=$BASE_DIR/logs
+fi
+
+mkdir -p ${CAMBRIA_LOGS_HOME}
+# run java. The classpath is the etc dir for config files, and the lib dir
+# for all the jars.
+#
+# don't pipe stdout/stderr to /dev/null here - some diagnostic info is available only there.
+# also don't assume the run is in the background. the caller should take care of that.
+#
+$JAVA -cp ${BASE_DIR}/etc:${BASE_DIR}/lib/* com.att.nsa.cambria.CambriaApiServer $* >${CAMBRIA_LOGS_HOME}/console.log 2>&1
diff --git a/src/main/scripts/cambriaJsonPublisher.sh b/src/main/scripts/cambriaJsonPublisher.sh
new file mode 100644
index 0000000..9dbc4c7
--- /dev/null
+++ b/src/main/scripts/cambriaJsonPublisher.sh
@@ -0,0 +1,41 @@
+#!/bin/bash
+#*******************************************************************************
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+#*******************************************************************************
+
+#
+# act as a simple cambria publisher, requires wget
+#
+# usage:
+# cambriaPublisher <broker> <topic>
+#
+
+KEY=$3
+if [ "$3" == "" ]
+then
+ KEY=`hostname -f`
+fi
+
+while read LINE
+do
+ wget -q --header "Content-Type: application/json" --post-data="{ \"cambria.partition\":\"$KEY\", \"msg\":\"$LINE\" }" -O - $1/events/$2 >/dev/null
+done
+
diff --git a/src/main/scripts/cambriaMonitor.sh b/src/main/scripts/cambriaMonitor.sh
new file mode 100644
index 0000000..0a8727c
--- /dev/null
+++ b/src/main/scripts/cambriaMonitor.sh
@@ -0,0 +1,40 @@
+#!/bin/bash
+#*******************************************************************************
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+#*******************************************************************************
+
+#
+# act as a simple cambria consumer, requires wget
+#
+# usage:
+# cambriaMonitor <broker> <topic> <group> <id> <timeout>
+#
+
+while :
+do
+ wget -q -O - $1/events/$2/$3/$4?timeout=$5\&pretty=1
+ if [ $? -ne 0 ]
+ then
+ sleep 10
+ fi
+ echo
+done
+
diff --git a/src/main/scripts/cambriaMonitorWithAuth.sh b/src/main/scripts/cambriaMonitorWithAuth.sh
new file mode 100644
index 0000000..4ee2908
--- /dev/null
+++ b/src/main/scripts/cambriaMonitorWithAuth.sh
@@ -0,0 +1,43 @@
+#!/bin/bash
+#*******************************************************************************
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+#*******************************************************************************
+
+#
+# act as a simple cambria consumer, requires wget
+#
+# usage:
+# cambriaMonitor <broker> <topic> <group> <id> <timeout>
+#
+
+while :
+do
+ DATE=`date`
+ SIGNATURE=`echo -n "$DATE" | openssl sha1 -hmac $CAMBRIA_APISECRET -binary | openssl base64`
+
+ wget -q --header "X-CambriaAuth: $CAMBRIA_APIKEY:$SIGNATURE" --header "X-CambriaDate: $DATE" -O - $1/events/$2/$3/$4?timeout=$5\&pretty=1
+ if [ $? -ne 0 ]
+ then
+ sleep 10
+ fi
+ echo
+done
+
diff --git a/src/main/scripts/cambriaSimpleTextPubWithAuth.sh b/src/main/scripts/cambriaSimpleTextPubWithAuth.sh
new file mode 100644
index 0000000..1623726
--- /dev/null
+++ b/src/main/scripts/cambriaSimpleTextPubWithAuth.sh
@@ -0,0 +1,38 @@
+#!/bin/bash
+#*******************************************************************************
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+#*******************************************************************************
+
+#
+# act as a simple cambria publisher, requires wget
+#
+# usage:
+# cambriaPublisher <broker> <topic>
+#
+
+DATE=`date`
+SIGNATURE=`echo -n "$DATE" | openssl sha1 -hmac $CAMBRIA_APISECRET -binary | openssl base64`
+
+while read LINE
+do
+ wget -q --header "Content-Type: text/plain" --header "X-CambriaAuth: $CAMBRIA_APIKEY:$SIGNATURE" --header "X-CambriaDate: $DATE" --post-data="$LINE" -O - $1/events/$2 >/dev/null
+done
+
diff --git a/src/main/scripts/cambriaSimpleTextPublisher.sh b/src/main/scripts/cambriaSimpleTextPublisher.sh
new file mode 100644
index 0000000..4aacd8a
--- /dev/null
+++ b/src/main/scripts/cambriaSimpleTextPublisher.sh
@@ -0,0 +1,35 @@
+#!/bin/bash
+#*******************************************************************************
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+#*******************************************************************************
+
+#
+# act as a simple cambria publisher, requires wget
+#
+# usage:
+# cambriaPublisher <broker> <topic>
+#
+
+while read LINE
+do
+ wget -q --header "Content-Type: text/plain" --post-data="$LINE" -O - $1/events/$2 >/dev/null
+done
+
diff --git a/src/main/scripts/cambriaTool.sh b/src/main/scripts/cambriaTool.sh
new file mode 100644
index 0000000..175a19c
--- /dev/null
+++ b/src/main/scripts/cambriaTool.sh
@@ -0,0 +1,55 @@
+#!/bin/sh
+#*******************************************************************************
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+#*******************************************************************************
+
+# switched this from CAMBRIA_API_HOME, which should be declared in the env.
+# harmless to overwrite it here, but it's confusing to do so.
+BASE_DIR=`dirname "$0"`/..
+
+# determin a path separator that works for this platform
+PATHSEP=":"
+case "$(uname -s)" in
+
+ Darwin)
+ ;;
+
+ Linux)
+ ;;
+
+ CYGWIN*|MINGW32*|MSYS*)
+ PATHSEP=";"
+ ;;
+
+ *)
+ ;;
+esac
+
+# use JAVA_HOME if provided
+if [ -n "${CAMBRIA_JAVA_HOME}" ]; then
+ JAVA=${CAMBRIA_JAVA_HOME}/bin/java
+elif [ -n "${JAVA_HOME}" ]; then
+ JAVA=${JAVA_HOME}/bin/java
+else
+ JAVA=java
+fi
+
+$JAVA -cp ${BASE_DIR}/etc${PATHSEP}${BASE_DIR}/lib/* org.onap.dmaap.messagerouter.messageservice.nsa.cambria.tools.ConfigTool $*
diff --git a/src/main/scripts/swmpkgclean.sh b/src/main/scripts/swmpkgclean.sh
new file mode 100644
index 0000000..5f8699f
--- /dev/null
+++ b/src/main/scripts/swmpkgclean.sh
@@ -0,0 +1,42 @@
+#!/bin/bash
+#*******************************************************************************
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+#*******************************************************************************
+
+# SWM can only store a finite amount of packages in its repository, so this script deletes the oldest package.
+# This script is run by Jenkins after the build is finished (post SWM upload).
+
+SWM_COMPONENT="org.onap.dmaap.messagerouter.msgrtr.nsa:msgrtr"
+
+SWM_PKGS=`/opt/app/swm/aftswmcli/bin/swmcli "component pkglist -c $SWM_COMPONENT -df -dh -dj -sui"`
+SWM_PKGS_COUNT=`echo "$SWM_PKGS" | wc -l`
+SWM_PKGS_OLDEST=`echo "$SWM_PKGS" | head -1`
+SWM_PKGS_MAX_COUNT=2
+
+if [ $SWM_PKGS_COUNT > $SWM_PKGS_MAX_COUNT ]
+then
+ SWM_PKG_OLDEST_VERSION=`echo $SWM_PKGS_OLDEST | awk '{print $2}'`
+
+ # Delete the oldest package for this component from the SWM repository
+ /opt/app/swm/aftswmcli/bin/swmcli "component pkgdelete -c $SWM_COMPONENT:$SWM_PKG_OLDEST_VERSION"
+else
+ echo "No need to clean up SWM, package count ($SWM_PKGS_COUNT) is below threshold ($SWM_PKGS_MAX_COUNT)"
+fi
diff --git a/src/main/swm/common/common.env b/src/main/swm/common/common.env
new file mode 100644
index 0000000..e788212
--- /dev/null
+++ b/src/main/swm/common/common.env
@@ -0,0 +1,21 @@
+export ROOT_DIR=${INSTALL_ROOT}opt/app/workload/fe3c/cambria
+
+OS=`uname`
+
+if [ "${OS}" = "SunOS" ]; then
+ CURRENT_USER=`/usr/xpg4/bin/id -un`
+ CURRENT_GROUP=`/usr/xpg4/bin/id -gn`
+else
+ CURRENT_USER=`id -un`
+ CURRENT_GROUP=`id -gn`
+fi
+
+export CURRENT_USER CURRENT_GROUP
+
+# Fail - used to quickly exit with a rc and error message
+fail() {
+ rc=$1
+ shift;
+ echo "ERROR: $@"
+ exit $rc
+}
diff --git a/src/main/swm/common/deinstall.env b/src/main/swm/common/deinstall.env
new file mode 100644
index 0000000..43a8d00
--- /dev/null
+++ b/src/main/swm/common/deinstall.env
@@ -0,0 +1 @@
+. `dirname $0`/common.env
diff --git a/src/main/swm/common/deinstall_postproc.sh b/src/main/swm/common/deinstall_postproc.sh
new file mode 100644
index 0000000..51d1b4c
--- /dev/null
+++ b/src/main/swm/common/deinstall_postproc.sh
@@ -0,0 +1,26 @@
+#!/bin/sh -x
+#*******************************************************************************
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+#*******************************************************************************
+
+. `dirname $0`/deinstall.env
+
+exit 0
diff --git a/src/main/swm/common/deinstall_preproc.sh b/src/main/swm/common/deinstall_preproc.sh
new file mode 100644
index 0000000..51d1b4c
--- /dev/null
+++ b/src/main/swm/common/deinstall_preproc.sh
@@ -0,0 +1,26 @@
+#!/bin/sh -x
+#*******************************************************************************
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+#*******************************************************************************
+
+. `dirname $0`/deinstall.env
+
+exit 0
diff --git a/src/main/swm/common/install.env b/src/main/swm/common/install.env
new file mode 100644
index 0000000..6dbd12c
--- /dev/null
+++ b/src/main/swm/common/install.env
@@ -0,0 +1 @@
+#NOOP \ No newline at end of file
diff --git a/src/main/swm/common/install_postproc.sh b/src/main/swm/common/install_postproc.sh
new file mode 100644
index 0000000..4166481
--- /dev/null
+++ b/src/main/swm/common/install_postproc.sh
@@ -0,0 +1,26 @@
+#!/bin/sh -x
+#*******************************************************************************
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+#*******************************************************************************
+
+. `dirname $0`/install.env
+
+exit 0
diff --git a/src/main/swm/common/install_preproc.sh b/src/main/swm/common/install_preproc.sh
new file mode 100644
index 0000000..4166481
--- /dev/null
+++ b/src/main/swm/common/install_preproc.sh
@@ -0,0 +1,26 @@
+#!/bin/sh -x
+#*******************************************************************************
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+#*******************************************************************************
+
+. `dirname $0`/install.env
+
+exit 0
diff --git a/src/main/swm/common/localize.sh b/src/main/swm/common/localize.sh
new file mode 100644
index 0000000..c6aa5ed
--- /dev/null
+++ b/src/main/swm/common/localize.sh
@@ -0,0 +1,170 @@
+#!/bin/sh
+#*******************************************************************************
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+#*******************************************************************************
+FINAL_CONFIG_FILE=${ROOT_DIR}/etc/cambriaApi.properties
+TEMPLATE_CONFIG_FILE=${ROOT_DIR}/etc/cambriaApi_template.properties
+BACKUP1_CONFIG_FILE=${ROOT_DIR}/etc/cambriaApi.properties.bk.1
+BACKUP2_CONFIG_FILE=${ROOT_DIR}/etc/cambriaApi.properties.bk.2
+
+echo "Localizing the Cambria API Server configuration"
+
+if [ -z "${ELASTICSEARCH_NODES}" ]; then
+ echo "ERROR: ELASTICSEARCH_NODES must be set"; exit 1
+fi
+
+if [ -z "${ZOOKEEPER_ENSEMBLE}" ]; then
+ echo "ERROR: ZOOKEEPER_ENSEMBLE must be set"; exit 2
+fi
+
+if [ -z "${ZOOKEEPER_CLIENT_PORT}" ]; then
+ ZOOKEEPER_CLIENT_PORT=2181
+fi
+
+if [ -z "${CAMBRIA_SERVICE_PORT}" ]; then
+ CAMBRIA_SERVICE_PORT=3904
+fi
+
+if [ -z "${CAMBRIA_BROKER_TYPE}" ]; then
+ CAMBRIA_BROKER_TYPE=kafka
+fi
+
+if [ -z "${KAFKA_PORT}" ]; then
+ KAFKA_PORT=9092
+fi
+
+if [ -z "${KAFKA_BROKER_LIST}" ]; then
+ KAFKA_BROKER_LIST=localhost:${KAFKA_PORT}
+fi
+
+#------------------------------------------------------------------------
+#- MAKE A BACKUP OF PREVIOUS BACKUP FILE, IF EXISTS
+#------------------------------------------------------------------------
+if [ -f ${BACKUP1_CONFIG_FILE} ]; then
+ cp -f ${BACKUP1_CONFIG_FILE} ${BACKUP2_CONFIG_FILE} || {
+ echo "ERROR: Could not copy ${BACKUP1_CONFIG_FILE} to ${BACKUP2_CONFIG_FILE}"
+ exit 5
+ }
+fi
+
+#------------------------------------------------------------------------
+#- MAKE A BACKUP OF CURRENT FILE, IF EXISTS
+#------------------------------------------------------------------------
+if [ -f ${FINAL_CONFIG_FILE} ]; then
+ cp -f ${FINAL_CONFIG_FILE} ${BACKUP1_CONFIG_FILE} || {
+ echo "ERROR: Could not copy ${FINAL_CONFIG_FILE} to ${BACKUP1_CONFIG_FILE}"
+ exit 6
+ }
+fi
+
+CAMBRIA_ZOOKEEPER_NODES=`echo ${ZOOKEEPER_ENSEMBLE} | sed -e "s/ /:${ZOOKEEPER_CLIENT_PORT},/g" | sed -e "s/$/:${ZOOKEEPER_CLIENT_PORT}/g"`
+
+#------------------------------------------------------------------------
+#- PROCESS THE TEMPLATE
+#------------------------------------------------------------------------
+sed -e 's/${CAMBRIA_SERVICE_PORT}/'${CAMBRIA_SERVICE_PORT}'/g' \
+ -e 's/${CAMBRIA_BROKER_TYPE}/'${CAMBRIA_BROKER_TYPE}'/g' \
+ -e 's/${KAFKA_BROKER_LIST}/'${KAFKA_BROKER_LIST}'/g' \
+ -e 's/${CAMBRIA_ZOOKEEPER_NODES}/'${CAMBRIA_ZOOKEEPER_NODES}'/g' ${TEMPLATE_CONFIG_FILE} > ${FINAL_CONFIG_FILE} || {
+ echo "ERROR: Could not process template file ${TEMPLATE_CONFIG_FILE} into ${FINAL_CONFIG_FILE}"
+ exit 7
+ }
+
+FINAL_LOG4J_FILE=${ROOT_DIR}/etc/log4j.xml
+TEMPLATE_LOG4J_FILE=${ROOT_DIR}/etc/log4j_template.xml
+BACKUP1_LOG4J_FILE=${ROOT_DIR}/etc/log4j.xml.bk.1
+BACKUP2_LOG4J_FILE=${ROOT_DIR}/etc/log4j.xml.bk.2
+
+if [ -z "${CAMBRIA_LOG_DIR}" ]; then
+ CAMBRIA_LOG_DIR=${ROOT_DIR}/logs
+fi
+
+if [ -z "${CAMBRIA_LOG_THRESHOLD}" ]; then
+ CAMBRIA_LOG_THRESHOLD="INFO"
+fi
+
+#------------------------------------------------------------------------
+#- MAKE A BACKUP OF PREVIOUS BACKUP FILE, IF EXISTS
+#------------------------------------------------------------------------
+if [ -f ${BACKUP1_LOG4J_FILE} ]; then
+ cp -f ${BACKUP1_LOG4J_FILE} ${BACKUP2_LOG4J_FILE} || {
+ echo "ERROR: Could not copy ${BACKUP1_LOG4J_FILE} to ${BACKUP2_LOG4J_FILE}"
+ exit 8
+ }
+fi
+
+#------------------------------------------------------------------------
+#- MAKE A BACKUP OF CURRENT FILE, IF EXISTS
+#------------------------------------------------------------------------
+if [ -f ${FINAL_LOG4J_FILE} ]; then
+ cp -f ${FINAL_LOG4J_FILE} ${BACKUP1_LOG4J_FILE} || {
+ echo "ERROR: Could not copy ${FINAL_LOG4J_FILE} to ${BACKUP1_LOG4J_FILE}"
+ exit 9
+ }
+fi
+
+#------------------------------------------------------------------------
+#- PROCESS THE TEMPLATE
+#------------------------------------------------------------------------
+sed -e 's/${CAMBRIA_LOG_THRESHOLD}/'${CAMBRIA_LOG_THRESHOLD}'/g' \
+ -e 's,${CAMBRIA_LOG_DIR},'${CAMBRIA_LOG_DIR}',g' ${TEMPLATE_LOG4J_FILE} > ${FINAL_LOG4J_FILE} || {
+ echo "ERROR: Could not process template file ${TEMPLATE_LOG4J_FILE} into ${FINAL_LOG4J_FILE}"
+ exit 10
+ }
+
+FINAL_LOGSTASH_FILE=${ROOT_DIR}/etc/messages.conf
+TEMPLATE_LOGSTASH_FILE=${ROOT_DIR}/etc/logstash_cambria_template.conf
+BACKUP1_LOGSTASH_FILE=${ROOT_DIR}/etc/messages.conf.bk.1
+BACKUP2_LOGSTASH_FILE=${ROOT_DIR}/etc/messages.conf.bk.2
+#------------------------------------------------------------------------
+#- MAKE A BACKUP OF PREVIOUS BACKUP FILE, IF EXISTS
+#------------------------------------------------------------------------
+if [ -f ${BACKUP1_LOGSTASH_FILE} ]; then
+ cp -f ${BACKUP1_LOGSTASH_FILE} ${BACKUP2_LOGSTASH_FILE} || {
+ echo "ERROR: Could not copy ${BACKUP1_LOGSTASH_FILE} to ${BACKUP2_LOGSTASH_FILE}"
+ exit 11
+ }
+fi
+
+#------------------------------------------------------------------------
+#- MAKE A BACKUP OF CURRENT FILE, IF EXISTS
+#------------------------------------------------------------------------
+if [ -f ${FINAL_LOGSTASH_FILE} ]; then
+ cp -f ${FINAL_LOGSTASH_FILE} ${BACKUP1_LOGSTASH_FILE} || {
+ echo "ERROR: Could not copy ${FINAL_LOGSTASH_FILE} to ${BACKUP1_LOGSTASH_FILE}"
+ exit 12
+ }
+fi
+
+#------------------------------------------------------------------------
+#- PROCESS THE TEMPLATE
+#------------------------------------------------------------------------
+sed -e 's,${CAMBRIA_SERVER_LOG},'${CAMBRIA_LOG_DIR}/cambria.log',g' \
+ -e 's/${ELASTICSEARCH_NODES}/'${ELASTICSEARCH_NODES}'/g' ${TEMPLATE_LOGSTASH_FILE} > ${FINAL_LOGSTASH_FILE} || {
+ echo "ERROR: Could not process template file ${TEMPLATE_LOGSTASH_FILE} into ${FINAL_LOGSTASH_FILE}"
+ exit 13
+ }
+
+#------------------------------------------------------------------------
+#- CLEAN EXIT
+#------------------------------------------------------------------------
+echo "Localized Successfully."
+exit 0
diff --git a/src/main/swm/deinstall/postproc/post_proc b/src/main/swm/deinstall/postproc/post_proc
new file mode 100644
index 0000000..05021d5
--- /dev/null
+++ b/src/main/swm/deinstall/postproc/post_proc
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+#cd ../../common
+#exec sh -x ./deinstall_postproc.sh
diff --git a/src/main/swm/deinstall/preproc/pre_proc b/src/main/swm/deinstall/preproc/pre_proc
new file mode 100644
index 0000000..e27b41a
--- /dev/null
+++ b/src/main/swm/deinstall/preproc/pre_proc
@@ -0,0 +1,8 @@
+#!/bin/sh
+
+#source /opt/app/datartr/dmaap1/setJavaHome.sh
+#cd /opt/app/datartr/dmaap1/tomcat_2/apache-tomcat-7.0.64/bin
+#exec sh -x ./startup.sh
+
+#cd ../../common
+#exec sh -x ./deinstall_preproc.sh
diff --git a/src/main/swm/descriptor.xml b/src/main/swm/descriptor.xml
new file mode 100644
index 0000000..2ef3c94
--- /dev/null
+++ b/src/main/swm/descriptor.xml
@@ -0,0 +1,46 @@
+<!--
+ ============LICENSE_START=======================================================
+ org.onap.dmaap
+ ================================================================================
+ Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ ============LICENSE_END=========================================================
+
+ ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+ -->
+<descriptor xmlns="http://aft.att.com/swm/descriptor" version="1">
+ <platforms>
+ <platform os="Linux" osVersions="*" architecture="*"/>
+ </platforms>
+
+ <paths>
+ <path name="/opt/app/dmaap/msgrtr/jenkinsbuild" type="d" user="msgrtr" group="dmaap" permissions="755" recursive="true"/>
+ </paths>
+
+ <actions>
+ <action type="INST">
+ <proc stage="PRE" user="msgrtr" group="dmaap"/>
+ <proc stage="POST" user="msgrtr" group="dmaap"/>
+ </action>
+ <action type="INIT">
+ <proc stage="PRE" user="msgrtr" group="dmaap"/>
+ <proc stage="POST" user="msgrtr" group="dmaap"/>
+ </action>
+ <action type="DINST">
+ <proc stage="PRE" user="msgrtr" group="dmaap"/>
+ <proc stage="POST" user="msgrtr" group="dmaap"/>
+ </action>
+ </actions>
+
+</descriptor>
diff --git a/src/main/swm/fallback/postproc/post_proc b/src/main/swm/fallback/postproc/post_proc
new file mode 100644
index 0000000..d017750
--- /dev/null
+++ b/src/main/swm/fallback/postproc/post_proc
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd ../../common
+exec sh -x ./install_postproc.sh
diff --git a/src/main/swm/fallback/preproc/pre_proc b/src/main/swm/fallback/preproc/pre_proc
new file mode 100644
index 0000000..3f1b26f
--- /dev/null
+++ b/src/main/swm/fallback/preproc/pre_proc
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd ../../common
+exec sh -x ./install_preproc.sh
diff --git a/src/main/swm/initinst/postproc/post_proc b/src/main/swm/initinst/postproc/post_proc
new file mode 100644
index 0000000..afbabdd
--- /dev/null
+++ b/src/main/swm/initinst/postproc/post_proc
@@ -0,0 +1,6 @@
+#!/bin/sh
+
+exec sh -x ../../install/postproc/post_proc
+
+#cd ../../common
+#exec sh -x ./install_postproc.sh
diff --git a/src/main/swm/initinst/preproc/pre_proc b/src/main/swm/initinst/preproc/pre_proc
new file mode 100644
index 0000000..2588a94
--- /dev/null
+++ b/src/main/swm/initinst/preproc/pre_proc
@@ -0,0 +1,6 @@
+#!/bin/sh
+
+#exec sh -x ../../install/preproc/pre_proc
+
+#cd ../../common
+#exec sh -x ./install_preproc.sh
diff --git a/src/main/swm/install/postproc/post_proc b/src/main/swm/install/postproc/post_proc
new file mode 100644
index 0000000..7954fb1
--- /dev/null
+++ b/src/main/swm/install/postproc/post_proc
@@ -0,0 +1,24 @@
+#!/bin/sh
+
+echo "Calling common script to set up environment"
+#source ../../common/common.env
+echo "Common environment set up"
+
+#source /opt/app/datartr/dmaap1/setJavaHome.sh
+
+echo "moving DMaaP.war"
+cd /opt/app/dmaap/msgrtr/jenkinsbuild/lib
+mv DMaaP.war /opt/app/dmaap/msgrtr/jenkinsbuild
+cd /opt/app/dmaap/msgrtr/jenkinsbuild
+rm -r lib
+rm -r bin
+rm -r etc
+
+#echo "swm testing"
+#cd /opt/app/datartr/dmaap1/tomcat_2/apache-tomcat-7.0.64/bin
+#exec sh -x ./startup.sh
+#echo "tomcat started"
+
+#Run common install tasks
+#cd ../../common
+#exec sh -x ./install_postproc.sh
diff --git a/src/main/swm/install/preproc/pre_proc b/src/main/swm/install/preproc/pre_proc
new file mode 100644
index 0000000..ffd71e3
--- /dev/null
+++ b/src/main/swm/install/preproc/pre_proc
@@ -0,0 +1,17 @@
+#!/bin/sh
+
+#echo "Calling common script to set up environment"
+#source ../../common/common.env
+#source /opt/app/datartr/dmaap1/setJavaHome.sh
+#echo "Common environment set up"
+
+#echo "swm testing"
+#cd /opt/app/datartr/dmaap1/tomcat_2/apache-tomcat-7.0.64/bin
+#exec sh -x ./shutdown.sh
+#echo "tomcat stopped"
+
+
+
+#echo "Running common install pre proc"
+#cd ../../common
+#exec sh -x ./install_preproc.sh
diff --git a/src/main/webapp/WEB-INF/spring-context.xml b/src/main/webapp/WEB-INF/spring-context.xml
new file mode 100644
index 0000000..d7f128e
--- /dev/null
+++ b/src/main/webapp/WEB-INF/spring-context.xml
@@ -0,0 +1,122 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ============LICENSE_START=======================================================
+ org.onap.dmaap
+ ================================================================================
+ Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ ============LICENSE_END=========================================================
+
+ ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+ -->
+
+<beans xmlns="http://www.springframework.org/schema/beans"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:mvc="http://www.springframework.org/schema/mvc"
+ xmlns:context="http://www.springframework.org/schema/context"
+ xsi:schemaLocation="
+ http://www.springframework.org/schema/mvc http://www.springframework.org/schema/mvc/spring-mvc-3.0.xsd
+ http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.0.xsd
+ http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-3.0.xsd">
+
+ <!-- Dependency Injection with annotations -->
+ <context:component-scan
+ base-package="com.att.nsa.cambria.utils, com.att.nsa.cambria, com.att.nsa.cambria.rest,
+ com.att.nsa.cambria.service.impl,com.att.nsa.cambria.beans,com.att.nsa.cambria.security,
+ com.att.nsa.cambria.transaction,com.att.nsa.cambria.exception" />
+
+ <context:property-placeholder
+ location="classpath:msgRtrApi.properties,classpath:DMaaPErrorMesaages.properties" />
+
+
+ <bean id="propertyReader" class="com.att.nsa.cambria.utils.PropertyReader" />
+ <bean
+ class="org.springframework.beans.factory.config.MethodInvokingFactoryBean">
+ <!-- Next value is the full qualified name of the static setter including
+ method name -->
+ <property name="staticMethod"
+ value="com.att.nsa.cambria.beans.DMaaPKafkaConsumerFactory.populateKafkaInternalDefaultsMap" />
+ <property name="arguments">
+ <list>
+ <ref bean="propertyReader" />
+ </list>
+ </property>
+ </bean>
+
+ <bean id="drumlinRequestRouter"
+ class="com.att.nsa.drumlin.service.framework.routing.DrumlinRequestRouter" />
+
+ <bean id="dMaaPMetricsSet" class="org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPMetricsSet">
+ <constructor-arg ref="propertyReader" />
+ </bean>
+
+ <bean id="dMaaPZkClient" class=" org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPZkClient">
+ <constructor-arg ref="propertyReader" />
+ </bean>
+
+ <bean id="dMaaPZkConfigDb" class=" org.onap.dmaap.messagerouter.msgrt.nsa.cambria.beans.DMaaPZkConfigDb">
+ <constructor-arg ref="dMaaPZkClient" />
+ <constructor-arg ref="propertyReader" />
+ </bean>
+
+ <bean id="kafkaPublisher" class=" org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.kafka.KafkaPublisher">
+ <constructor-arg ref="propertyReader" />
+ </bean>
+
+ <bean id="dMaaPKafkaConsumerFactory" class=" org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPKafkaConsumerFactory">
+ <constructor-arg ref="propertyReader" />
+ <constructor-arg ref="dMaaPMetricsSet" />
+ <constructor-arg ref="curator" />
+ </bean>
+
+ <bean id="curator" class="org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.DMaaPCuratorFactory"
+ factory-method="getCurator">
+ <constructor-arg ref="propertyReader" />
+ </bean>
+
+ <bean id="dMaaPKafkaMetaBroker" class=" org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPKafkaMetaBroker">
+ <constructor-arg ref="propertyReader" />
+ <constructor-arg ref="dMaaPZkClient" />
+ <constructor-arg ref="dMaaPZkConfigDb" />
+ </bean>
+
+ <bean id="q" class=" org.onap.dmaap.messagerouter.msgrtr.com.att.nsa.cambria.backends.memory.MemoryQueue" />
+
+ <bean id="mmb" class=" org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.memory.MemoryMetaBroker">
+ <constructor-arg ref="q" />
+ <constructor-arg ref="dMaaPZkConfigDb" />
+ <!-- <constructor-arg ref="propertyReader" />-->
+ </bean>
+
+ <bean id="dMaaPNsaApiDb" class="org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPNsaApiDb"
+ factory-method="buildApiKeyDb">
+ <constructor-arg ref="propertyReader" />
+ <constructor-arg ref="dMaaPZkConfigDb" />
+ </bean>
+
+ <!-- <bean id="dMaaPTranDb" class="com.att.nsa.cambria.transaction.DMaaPTransactionDB"
+ factory-method="buildTransactionDb"> <constructor-arg ref="propertyReader"
+ /> <constructor-arg ref="dMaaPZkConfigDb" /> </bean> -->
+
+ <bean id="dMaaPAuthenticatorImpl" class="org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security.DMaaPAuthenticatorImpl">
+ <constructor-arg ref="dMaaPNsaApiDb" />
+ </bean>
+ <bean id="defLength" class="org.onap.dmaap.messagerouter.msgrtr.nsa.filter.DefaultLength">
+ <property name="defaultLength" value="${maxcontentlength}"></property>
+ </bean>
+
+ <!-- <bean class="org.springframework.beans.factory.config.PropertyPlaceholderConfigurer">
+ <property name="location"> <value>msgRtrApi.properties</value> </property>
+ </bean> -->
+
+</beans> \ No newline at end of file
diff --git a/src/main/webapp/WEB-INF/web.xml b/src/main/webapp/WEB-INF/web.xml
new file mode 100644
index 0000000..c3e07b5
--- /dev/null
+++ b/src/main/webapp/WEB-INF/web.xml
@@ -0,0 +1,45 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ============LICENSE_START=======================================================
+ org.onap.dmaap
+ ================================================================================
+ Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ ============LICENSE_END=========================================================
+
+ ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+ -->
+
+<web-app xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://java.sun.com/xml/ns/javaee" xmlns:web="http://java.sun.com/xml/ns/javaee" xsi:schemaLocation="http://java.sun.com/xml/ns/javaee http://java.sun.com/xml/ns/javaee/web-app_2_5.xsd" id="WebApp_ID" version="2.5">
+ <display-name>DMaaP</display-name>
+ <listener>
+ <listener-class>org.springframework.web.context.ContextLoaderListener</listener-class>
+ </listener>
+ <context-param>
+ <param-name>contextConfigLocation</param-name>
+ <param-value>/WEB-INF/spring-context.xml</param-value>
+ </context-param>
+ <listener>
+ <listener-class>org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.listener.CambriaServletContextListener</listener-class>
+ </listener>
+ <filter>
+ <display-name>ContentLengthFilter</display-name>
+ <filter-name>ContentLengthFilter</filter-name>
+ <filter-class>org.onap.dmaap.messagerouter.msgrtr.nsa.filter.ContentLengthFilter</filter-class>
+ </filter>
+ <filter-mapping>
+ <filter-name>ContentLengthFilter</filter-name>
+ <url-pattern>/dmaaprest/events/*</url-pattern>
+ </filter-mapping>
+</web-app> \ No newline at end of file
diff --git a/src/test/java/com/att/sa/cambria/testClient/SimpleExample.java b/src/test/java/com/att/sa/cambria/testClient/SimpleExample.java
new file mode 100644
index 0000000..a6ad73e
--- /dev/null
+++ b/src/test/java/com/att/sa/cambria/testClient/SimpleExample.java
@@ -0,0 +1,335 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+
+package com.att.sa.cambria.testClient;
+
+import kafka.api.FetchRequest;
+import kafka.api.FetchRequestBuilder;
+import kafka.api.PartitionOffsetRequestInfo;
+import kafka.cluster.Broker;
+import kafka.common.ErrorMapping;
+import kafka.common.TopicAndPartition;
+import kafka.javaapi.*;
+import kafka.javaapi.consumer.SimpleConsumer;
+import kafka.message.MessageAndOffset;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+
+public class SimpleExample
+{
+// public static void main ( String args[] )
+// {
+// if ( args.length < 5 )
+// {
+// System.err.println ( "usage: SimpleExample <maxReads> <topic> <partition> <host> <port>" );
+// return;
+// }
+//
+// final long maxReads = Long.parseLong ( args[0] );
+// final String topic = args[1];
+// final int partition = Integer.parseInt ( args[2] );
+//
+// final int port = Integer.parseInt ( args[4] );
+// final hostPort hp = new hostPort ( args[3], port );
+// final LinkedList<hostPort> seeds = new LinkedList<hostPort> ();
+// seeds.add ( hp );
+//
+// try
+// {
+// final SimpleExample example = new SimpleExample ();
+// example.run ( maxReads, topic, partition, seeds );
+// }
+// catch ( Exception e )
+// {
+// System.out.println ( "Oops:" + e );
+// e.printStackTrace ();
+// }
+// }
+//
+// public SimpleExample ()
+// {
+// fReplicaBrokers = new ArrayList<hostPort> ();
+// }
+//
+// public void run ( long remainingAllowedReads, String a_topic, int a_partition, List<hostPort> seedHosts ) throws IOException
+// {
+// // find the meta data about the topic and partition we are interested in
+//
+// hostPort leadBroker = findLeader ( seedHosts, a_topic, a_partition );
+// if ( leadBroker == null )
+// {
+// System.out.println ( "Can't find leader for Topic and Partition. Exiting" );
+// return;
+// }
+//
+// final String clientName = "Client_" + a_topic + "_" + a_partition;
+//
+// SimpleConsumer consumer = new SimpleConsumer ( leadBroker.fHost, leadBroker.fPort, 100000, 64 * 1024, clientName );
+// long readOffset = getLastOffset ( consumer, a_topic, a_partition, kafka.api.OffsetRequest.EarliestTime (), clientName );
+//
+// int numErrors = 0;
+// while ( remainingAllowedReads > 0 )
+// {
+// if ( consumer == null )
+// {
+// consumer = new SimpleConsumer ( leadBroker.fHost, leadBroker.fPort, 100000, 64 * 1024, clientName );
+// }
+//
+// final FetchRequest req = new FetchRequestBuilder ()
+// .clientId ( clientName )
+// .addFetch ( a_topic, a_partition, readOffset, 100000 ).build ();
+// final FetchResponse fetchResponse = consumer.fetch ( req );
+//
+// if ( fetchResponse.hasError () )
+// {
+// numErrors++;
+//
+// // Something went wrong!
+// final short code = fetchResponse.errorCode ( a_topic, a_partition );
+// System.out.println ( "Error fetching data from the Broker:" + leadBroker + " Reason: " + code );
+// if ( numErrors > 5 )
+// break;
+//
+// if ( code == ErrorMapping.OffsetOutOfRangeCode () )
+// {
+// // We asked for an invalid offset. For simple case ask for
+// // the last element to reset
+// readOffset = getLastOffset ( consumer, a_topic,
+// a_partition, kafka.api.OffsetRequest.LatestTime (),
+// clientName );
+// continue;
+// }
+//
+// consumer.close ();
+// consumer = null;
+//
+// leadBroker = findNewLeader ( leadBroker, a_topic, a_partition );
+// continue;
+// }
+// numErrors = 0;
+//
+// long numRead = 0;
+// for ( MessageAndOffset messageAndOffset : fetchResponse.messageSet ( a_topic, a_partition ) )
+// {
+// long currentOffset = messageAndOffset.offset ();
+// if ( currentOffset < readOffset )
+// {
+// System.out.println ( "Found an old offset: "
+// + currentOffset + " Expecting: " + readOffset );
+// continue;
+// }
+// readOffset = messageAndOffset.nextOffset ();
+// ByteBuffer payload = messageAndOffset.message ().payload ();
+//
+// byte[] bytes = new byte [payload.limit ()];
+// payload.get ( bytes );
+// System.out.println ( String.valueOf ( messageAndOffset.offset () ) + ": " + new String ( bytes, "UTF-8" ) );
+// numRead++;
+// remainingAllowedReads--;
+// }
+//
+// if ( numRead == 0 )
+// {
+// try
+// {
+// Thread.sleep ( 1000 );
+// }
+// catch ( InterruptedException ie )
+// {
+// }
+// }
+// }
+//
+// if ( consumer != null )
+// {
+// consumer.close ();
+// }
+// }
+//
+// public static long getLastOffset ( SimpleConsumer consumer, String topic,
+// int partition, long whichTime, String clientName )
+// {
+// TopicAndPartition topicAndPartition = new TopicAndPartition ( topic,
+// partition );
+// Map<TopicAndPartition, PartitionOffsetRequestInfo> requestInfo = new HashMap<TopicAndPartition, PartitionOffsetRequestInfo> ();
+// requestInfo.put ( topicAndPartition, new PartitionOffsetRequestInfo (
+// whichTime, 1 ) );
+// kafka.javaapi.OffsetRequest request = new kafka.javaapi.OffsetRequest (
+// requestInfo, kafka.api.OffsetRequest.CurrentVersion (), clientName );
+// OffsetResponse response = consumer.getOffsetsBefore ( request );
+//
+// if ( response.hasError () )
+// {
+// System.out.println ( "Error fetching data Offset Data the Broker. Reason: "
+// + response.errorCode ( topic, partition ) );
+// return 0;
+// }
+//
+// final long[] offsets = response.offsets ( topic, partition );
+// return offsets[0];
+// }
+//
+// /**
+// * Find a new leader for a topic/partition, including a pause for the coordinator to
+// * find a new leader, as needed.
+// *
+// * @param oldLeader
+// * @param topic
+// * @param partition
+// * @return
+// * @throws IOException
+// */
+// private hostPort findNewLeader ( hostPort oldLeader, String topic, int partition ) throws IOException
+// {
+// try
+// {
+// int attemptsLeft = 3;
+// boolean haveSlept = false;
+//
+// while ( attemptsLeft-- > 0 )
+// {
+// System.out.println ( "" + attemptsLeft + " attempts Left" ); // FIXME: make sure it's 3 attempts!
+//
+// // ask the brokers for a leader
+// final hostPort newLeader = findLeader ( fReplicaBrokers, topic, partition );
+// if ( newLeader != null )
+// {
+// // we can use this leader if it's different (i.e. a new leader has been elected)
+// // or it's the same leader, but we waited to allow ZK to get a new one, and
+// // the original recovered
+// if ( !oldLeader.equals ( newLeader ) || haveSlept )
+// {
+// return newLeader;
+// }
+// }
+//
+// // sleep
+// haveSlept = true;
+// Thread.sleep ( 1000 );
+// }
+// }
+// catch ( InterruptedException x )
+// {
+// // just give up
+// }
+//
+// System.out.println ( "Unable to find new leader after Broker failure. Exiting" );
+// throw new IOException ( "Unable to find new leader after Broker failure. Exiting" );
+// }
+//
+// /**
+// * Given one or more seed brokers, find the leader for a given topic/partition
+// * @param seeds
+// * @param topic
+// * @param partition
+// * @return partition metadata, or null
+// */
+// private hostPort findLeader ( List<hostPort> seeds, String topic, int partition )
+// {
+// final List<String> topics = new ArrayList<String> ();
+// topics.add ( topic );
+//
+// for ( hostPort seed : seeds )
+// {
+// final SimpleConsumer consumer = new SimpleConsumer ( seed.fHost, seed.fPort, 100000, 64 * 1024, "leaderLookup" );
+// final TopicMetadataRequest req = new TopicMetadataRequest ( topics );
+// final TopicMetadataResponse resp = consumer.send ( req );
+// consumer.close ();
+//
+// final List<TopicMetadata> metaData = resp.topicsMetadata ();
+// for ( TopicMetadata item : metaData )
+// {
+// for ( PartitionMetadata part : item.partitionsMetadata () )
+// {
+// if ( part.partitionId () == partition )
+// {
+// // found our partition. load the details, then return it
+// fReplicaBrokers.clear ();
+// for ( kafka.cluster.Broker replica : part.replicas () )
+// {
+// fReplicaBrokers.add ( new hostPort ( replica.host (), replica.port () ) );
+// }
+// return new hostPort ( part.leader () );
+// }
+// }
+// }
+// }
+//
+// return null;
+// }
+//
+// private static class hostPort
+// {
+// public hostPort ( String host, int port ) { fHost = host; fPort = port; }
+//
+// public hostPort ( Broker leader )
+// {
+// fHost = leader.host ();
+// fPort = leader.port ();
+// }
+//
+//
+// public final String fHost;
+// public final int fPort;
+//
+// @Override
+// public int hashCode ()
+// {
+// final int prime = 31;
+// int result = 1;
+// result = prime * result
+// + ( ( fHost == null ) ? 0 : fHost.hashCode () );
+// result = prime * result + fPort;
+// return result;
+// }
+//
+// @Override
+// public boolean equals ( Object obj )
+// {
+// if ( this == obj )
+// return true;
+// if ( obj == null )
+// return false;
+// if ( getClass () != obj.getClass () )
+// return false;
+// hostPort other = (hostPort) obj;
+// if ( fHost == null )
+// {
+// if ( other.fHost != null )
+// return false;
+// }
+// else if ( !fHost.equals ( other.fHost ) )
+// return false;
+// if ( fPort != other.fPort )
+// return false;
+// return true;
+// }
+// }
+//
+// private List<hostPort> fReplicaBrokers;
+}
diff --git a/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dmaap/ApiKeyBean.java b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dmaap/ApiKeyBean.java
new file mode 100644
index 0000000..dd458eb
--- /dev/null
+++ b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dmaap/ApiKeyBean.java
@@ -0,0 +1,72 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.mr.test.dmaap;
+
+import java.io.Serializable;
+
+public class ApiKeyBean implements Serializable {
+
+ /*private static final long serialVersionUID = -8219849086890567740L;
+
+ // private static final String KEY_CHARS =
+ // "ABCDEFGHJIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
+
+
+ private String email;
+ private String description;
+
+ public ApiKeyBean() {
+ super();
+ }
+
+ public ApiKeyBean(String email, String description) {
+ super();
+ this.email = email;
+ this.description = description;
+ }
+
+ public String getEmail() {
+ return email;
+ }
+
+ public void setEmail(String email) {
+ this.email = email;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+
+ public void setDescription(String description) {
+ this.description = description;
+ }
+
+ /*
+ * public String getKey() { return generateKey(16); }
+ *
+ * public String getSharedSecret() { return generateKey(24); }
+ *
+ * private static String generateKey ( int length ) { return
+ * uniqueStringGenerator.createKeyUsingAlphabet ( KEY_CHARS, length ); }
+ */
+
+}
diff --git a/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dmaap/DMaapPubSubTest.java b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dmaap/DMaapPubSubTest.java
new file mode 100644
index 0000000..066bf50
--- /dev/null
+++ b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dmaap/DMaapPubSubTest.java
@@ -0,0 +1,138 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.mr.test.dmaap;
+
+import java.io.InputStream;
+import java.util.Scanner;
+
+import javax.ws.rs.client.Client;
+import javax.ws.rs.client.ClientBuilder;
+import javax.ws.rs.client.Entity;
+import javax.ws.rs.client.WebTarget;
+import javax.ws.rs.core.Response;
+
+import junit.framework.TestCase;
+
+import org.json.JSONObject;
+import org.apache.http.HttpStatus;
+import org.apache.log4j.Logger;
+
+import com.att.nsa.drumlin.till.data.sha1HmacSigner;
+
+public class DMaapPubSubTest {
+/* private static final Logger LOGGER = Logger.getLogger(DMaapTopicTest.class);
+ Client client = ClientBuilder.newClient();
+ String url = LoadPropertyFile.getPropertyFileData().getProperty("url");
+ WebTarget target = client.target(url);
+ String topicapikey;
+ String topicsecretKey;
+ String serverCalculatedSignature;
+ String date = LoadPropertyFile.getPropertyFileData().getProperty("date");
+ // changes by islam
+ String topic_name = LoadPropertyFile.getPropertyFileData().getProperty("topicName");
+ DmaapApiKeyTest keyInstance = new DmaapApiKeyTest();
+
+
+ public void testProduceMessage() {
+ LOGGER.info("test case publish message");
+ // DMaapTopicTest topicCreation = new DMaapTopicTest();
+ DmaapApiKeyTest keyInstance = new DmaapApiKeyTest();
+ // creating topic
+ createTopic(topic_name);
+
+ target = client.target(url);
+ target = target.path("/events/");
+ target = target.path(topic_name);
+ Response response2 = target.request().header("X-CambriaAuth", topicapikey + ":" + serverCalculatedSignature)
+ .header("X-CambriaDate", date).post(Entity.json("{message:producing first message}"));
+ keyInstance.assertStatus(response2);
+ LOGGER.info("successfully published message");
+ }
+
+ public void testConsumeMessage() {
+ LOGGER.info("test case subscribing message");
+ createTopic(topic_name);
+ target = client.target(url);
+ target = target.path("/events/");
+ target = target.path(topic_name);
+ target = target.path("consumGrp");
+ target = target.path(topicapikey);
+ Response response = target.request().get();
+ keyInstance.assertStatus(response);
+ LOGGER.info("successfully consumed messages");
+ InputStream is = (InputStream) response.getEntity();
+ Scanner s = new Scanner(is);
+ s.useDelimiter("\\A");
+ String data = s.next();
+ s.close();
+ LOGGER.info("Consumed Message data: " + data);
+ }
+
+ public void createTopic(String name) {
+ if (!topicExist(name)) {
+ TopicBean topicbean = new TopicBean();
+ topicbean.setDescription("creating topic");
+ topicbean.setPartitionCount(1);
+ topicbean.setReplicationCount(1);
+ topicbean.setTopicName(name);
+ topicbean.setTransactionEnabled(true);
+ target = client.target(url);
+ target = target.path("/topics/create");
+ JSONObject jsonObj = keyInstance.returnKey(new ApiKeyBean("ai039a@att.com", "topic creation"));
+ topicapikey = (String) jsonObj.get("key");
+ topicsecretKey = (String) jsonObj.get("secret");
+ serverCalculatedSignature = sha1HmacSigner.sign(date, topicsecretKey);
+ Response response = target.request().header("X-CambriaAuth", topicapikey + ":" + serverCalculatedSignature)
+ .header("X-CambriaDate", date).post(Entity.json(topicbean));
+ keyInstance.assertStatus(response);
+ }
+ }
+
+ public boolean topicExist(String topicName) {
+ target = target.path("/topics/" + topicName);
+ InputStream is, issecret;
+ Response response = target.request().get();
+ if (response.getStatus() == HttpStatus.SC_OK) {
+ is = (InputStream) response.getEntity();
+ Scanner s = new Scanner(is);
+ s.useDelimiter("\\A");
+ JSONObject dataObj = new JSONObject(s.next());
+ s.close();
+ // get owner of a topic
+ topicapikey = (String) dataObj.get("owner");
+ target = client.target(url);
+ target = target.path("/apiKeys/");
+ target = target.path(topicapikey);
+ Response response2 = target.request().get();
+ issecret = (InputStream) response2.getEntity();
+ Scanner st = new Scanner(issecret);
+ st.useDelimiter("\\A");
+ JSONObject dataObj1 = new JSONObject(st.next());
+ st.close();
+ // get secret key of this topic//
+ topicsecretKey = (String) dataObj1.get("secret");
+ serverCalculatedSignature = sha1HmacSigner.sign(date, topicsecretKey);
+ return true;
+ } else
+ return false;
+ }*/
+}
diff --git a/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dmaap/DMaapTopicTest.java b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dmaap/DMaapTopicTest.java
new file mode 100644
index 0000000..780ce1a
--- /dev/null
+++ b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dmaap/DMaapTopicTest.java
@@ -0,0 +1,267 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.mr.test.dmaap;
+
+import java.io.InputStream;
+import java.util.Properties;
+import java.util.Scanner;
+
+import javax.ws.rs.client.Client;
+import javax.ws.rs.client.ClientBuilder;
+import javax.ws.rs.client.Entity;
+import javax.ws.rs.client.WebTarget;
+import javax.ws.rs.core.Response;
+
+import junit.framework.TestCase;
+
+import org.apache.http.HttpStatus;
+import org.json.JSONObject;
+import org.apache.log4j.Logger;
+
+import com.att.nsa.drumlin.till.data.sha1HmacSigner;
+
+public class DMaapTopicTest {
+ /*private static final Logger LOGGER = Logger.getLogger(DMaapTopicTest.class);
+ Client client = ClientBuilder.newClient();
+ String topicapikey, topicsecretKey, serverCalculatedSignature;
+ Properties prop = LoadPropertyFile.getPropertyFileData();
+ String topicName = prop.getProperty("topicName");
+ String url = prop.getProperty("url");
+ String date = prop.getProperty("date");
+ WebTarget target = client.target(url);
+ DmaapApiKeyTest keyInstance = new DmaapApiKeyTest();
+
+
+ public void createTopic(String name) {
+ if (!topicExist(name)) {
+ TopicBean topicbean = new TopicBean();
+ topicbean.setDescription("creating topic");
+ topicbean.setPartitionCount(1);
+ topicbean.setReplicationCount(1);
+ topicbean.setTopicName(name);
+ topicbean.setTransactionEnabled(true);
+ target = client.target(url);
+ target = target.path("/topics/create");
+ JSONObject jsonObj = keyInstance.returnKey(new ApiKeyBean("nm254w@att.com", "topic creation"));
+ topicapikey = (String) jsonObj.get("key");
+ topicsecretKey = (String) jsonObj.get("secret");
+ serverCalculatedSignature = sha1HmacSigner.sign(date, topicsecretKey);
+ Response response = target.request().header("X-CambriaAuth", topicapikey + ":" + serverCalculatedSignature)
+ .header("X-CambriaDate", date).post(Entity.json(topicbean));
+ keyInstance.assertStatus(response);
+ }
+
+ }
+
+ public boolean topicExist(String topicName) {
+ target = target.path("/topics/" + topicName);
+ InputStream is, issecret;
+ Response response = target.request().get();
+ if (response.getStatus() == HttpStatus.SC_OK) {
+ is = (InputStream) response.getEntity();
+ Scanner s = new Scanner(is);
+ s.useDelimiter("\\A");
+ JSONObject dataObj = new JSONObject(s.next());
+ s.close();
+ // get owner of a topic
+ topicapikey = (String) dataObj.get("owner");
+ target = client.target(url);
+ target = target.path("/apiKeys/");
+ target = target.path(topicapikey);
+ Response response2 = target.request().get();
+ issecret = (InputStream) response2.getEntity();
+ Scanner st = new Scanner(issecret);
+ st.useDelimiter("\\A");
+ JSONObject dataObj1 = new JSONObject(st.next());
+ st.close();
+ // get secret key of this topic//
+ topicsecretKey = (String) dataObj1.get("secret");
+ serverCalculatedSignature = sha1HmacSigner.sign(date, topicsecretKey);
+ return true;
+ } else
+ return false;
+ }
+
+ public void testCreateTopic() {
+ LOGGER.info("test case create topic");
+ createTopic(topicName);
+ LOGGER.info("Returning after create topic");
+ }
+
+ public void testOneTopic() {
+ LOGGER.info("test case get specific topic name " + topicName);
+ createTopic(topicName);
+ target = client.target(url);
+ target = target.path("/topics/");
+ target = target.path(topicName);
+ Response response = target.request().get();
+ LOGGER.info("Successfully returned after fetching topic" + topicName);
+ keyInstance.assertStatus(response);
+ InputStream is = (InputStream) response.getEntity();
+ Scanner s = new Scanner(is);
+ s.useDelimiter("\\A");
+ JSONObject dataObj = new JSONObject(s.next());
+ LOGGER.info("Details of " + topicName + " : " + dataObj.toString());
+ s.close();
+ }
+
+ public void testdeleteTopic() {
+ LOGGER.info("test case delete topic name " + topicName);
+ createTopic(topicName);
+ target = client.target(url);
+ target = target.path("/topics/");
+ target = target.path(topicName);
+ Response response = target.request().header("X-CambriaAuth", topicapikey + ":" + serverCalculatedSignature)
+ .header("X-CambriaDate", date).delete();
+ keyInstance.assertStatus(response);
+ LOGGER.info("Successfully returned after deleting topic" + topicName);
+ }
+
+ public void testAllTopic() {
+ LOGGER.info("test case fetch all topic");
+ target = client.target(url);
+ target = target.path("/topics");
+ Response response = target.request().get();
+ keyInstance.assertStatus(response);
+ LOGGER.info("successfully returned after fetching all the topic");
+ InputStream is = (InputStream) response.getEntity();
+ Scanner s = new Scanner(is);
+ s.useDelimiter("\\A");
+ JSONObject dataObj = new JSONObject(s.next());
+ s.close();
+ LOGGER.info("List of all topics " + dataObj.toString());
+ }
+
+ public void testPublisherForTopic() {
+ LOGGER.info("test case get all publishers for topic: " + topicName);
+ // creating topic to check
+ createTopic(topicName);
+ target = client.target(url);
+ target = target.path("/topics/");
+ target = target.path(topicName);
+ target = target.path("/producers");
+ // checking all producer for a particular topic
+ Response response = target.request().get();
+ keyInstance.assertStatus(response);
+ LOGGER.info("Successfully returned after getting all the publishers" + topicName);
+ }
+
+ public void testPermitPublisherForTopic() {
+ LOGGER.info("test case permit user for topic " + topicName);
+ JSONObject jsonObj = keyInstance.returnKey(new ApiKeyBean("ai039a@att.com", "adding user to "));
+ String userapikey = (String) jsonObj.get("key");
+ createTopic(topicName);
+ // adding user to a topic//
+ target = client.target(url);
+ target = target.path("/topics/");
+ target = target.path(topicName);
+ target = target.path("/producers/");
+ target = target.path(userapikey);
+ Response response = target.request().header("X-CambriaAuth", topicapikey + ":" + serverCalculatedSignature)
+ .header("X-CambriaDate", date).put(Entity.json(""));
+ keyInstance.assertStatus(response);
+ LOGGER.info("successfully returned after permiting the user for topic " + topicName);
+ }
+
+ public void testDenyPublisherForTopic() {
+ LOGGER.info("test case denying user for topic " + topicName);
+ JSONObject jsonObj = keyInstance.returnKey(new ApiKeyBean("ai039a@att.com", "adding user to "));
+ String userapikey = (String) jsonObj.get("key");
+ createTopic(topicName);
+ // adding user to a topic//
+ target = client.target(url);
+ target = target.path("/topics/");
+ target = target.path(topicName);
+ target = target.path("/producers/");
+ target = target.path(userapikey);
+ target.request().header("X-CambriaAuth", topicapikey + ":" + serverCalculatedSignature)
+ .header("X-CambriaDate", date).put(Entity.json(""));
+ // deleting user who is just added//
+ target = client.target(url);
+ target = target.path("/topics/");
+ target = target.path(topicName);
+ target = target.path("/producers/");
+ target = target.path(userapikey);
+ Response response2 = target.request().header("X-CambriaAuth", topicapikey + ":" + serverCalculatedSignature)
+ .header("X-CambriaDate", date).delete();
+ keyInstance.assertStatus(response2);
+ LOGGER.info("successfully returned after denying the user for topic " + topicName);
+ }
+
+ public void testConsumerForTopic() {
+ LOGGER.info("test case get all consumers for topic: " + topicName);
+ // creating topic to check
+ createTopic(topicName);
+ target = client.target(url);
+ target = target.path("/topics/");
+ target = target.path(topicName);
+ target = target.path("/consumers");
+ // checking all consumer for a particular topic
+ Response response = target.request().get();
+ keyInstance.assertStatus(response);
+ LOGGER.info("Successfully returned after getting all the consumers" + topicName);
+ }
+
+ public void testPermitConsumerForTopic() {
+ LOGGER.info("test case get all consumer for topic: " + topicName);
+ // creating user for adding to topic//
+ JSONObject jsonObj = keyInstance.returnKey(new ApiKeyBean("ai039a@att.com", "adding user to "));
+ String userapikey = (String) jsonObj.get("key");
+ createTopic(topicName);
+ // adding user to a topic//
+ target = client.target(url);
+ target = target.path("/topics/");
+ target = target.path(topicName);
+ target = target.path("/consumers/");
+ target = target.path(userapikey);
+ Response response = target.request().header("X-CambriaAuth", topicapikey + ":" + serverCalculatedSignature)
+ .header("X-CambriaDate", date).put(Entity.json(""));
+ keyInstance.assertStatus(response);
+ LOGGER.info("Successfully returned after getting all the consumers" + topicName);
+ }
+
+ public void testDenyConsumerForTopic() {
+ LOGGER.info("test case denying consumer for topic " + topicName);
+ // creating user for adding and deleting from topic//
+ JSONObject jsonObj = keyInstance.returnKey(new ApiKeyBean("ai039a@att.com", "adding user to "));
+ String userapikey = (String) jsonObj.get("key");
+ createTopic(topicName);
+ // adding user to a topic//
+ target = client.target(url);
+ target = target.path("/topics/");
+ target = target.path(topicName);
+ target = target.path("/consumers/");
+ target = target.path(userapikey);
+ target.request().header("X-CambriaAuth", topicapikey + ":" + serverCalculatedSignature)
+ .header("X-CambriaDate", date).put(Entity.json(""));
+ // deleting user who is just added//
+ target = client.target(url);
+ target = target.path("/topics/");
+ target = target.path(topicName);
+ target = target.path("/consumers/");
+ target = target.path(userapikey);
+ Response response2 = target.request().header("X-CambriaAuth", topicapikey + ":" + serverCalculatedSignature)
+ .header("X-CambriaDate", date).delete();
+ keyInstance.assertStatus(response2);
+ LOGGER.info("successfully returned after denying the consumer for topic " + topicName);
+ }*/
+}
diff --git a/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dmaap/DmaapAdminTest.java b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dmaap/DmaapAdminTest.java
new file mode 100644
index 0000000..19e78a9
--- /dev/null
+++ b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dmaap/DmaapAdminTest.java
@@ -0,0 +1,60 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.mr.test.dmaap;
+
+
+
+public class DmaapAdminTest {
+ /*private static final Logger LOGGER = Logger.getLogger(DmaapAdminTest.class);
+ Client client = ClientBuilder.newClient();
+ WebTarget target = client.target(LoadPropertyFile.getPropertyFileData().getProperty("url"));
+
+
+ public void assertStatus(Response response) {
+ assertTrue(response.getStatus() == HttpStatus.SC_OK);
+ }
+
+ // 1.get consumer cache
+ public void testConsumerCache() {
+ LOGGER.info("test case consumer cache");
+ target = target.path("/admin/consumerCache");
+ Response response = target.request().get();
+ assertStatus(response);
+ LOGGER.info("Successfully returned after fetching consumer cache");
+ InputStream is = (InputStream) response.getEntity();
+ Scanner s = new Scanner(is);
+ s.useDelimiter("\\A");
+ String data = s.next();
+ s.close();
+ LOGGER.info("Details of consumer cache :" + data);
+ }
+
+ // 2.drop consumer cache
+ public void testDropConsumerCache() {
+ LOGGER.info("test case drop consumer cache");
+ target = target.path("/admin/dropConsumerCache");
+ Response response = target.request().post(Entity.json(null));
+ assertStatus(response);
+ LOGGER.info("Successfully returned after dropping consumer cache");
+ }
+*/
+}
diff --git a/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dmaap/DmaapApiKeyTest.java b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dmaap/DmaapApiKeyTest.java
new file mode 100644
index 0000000..e2ed0d4
--- /dev/null
+++ b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dmaap/DmaapApiKeyTest.java
@@ -0,0 +1,162 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.mr.test.dmaap;
+
+import java.io.InputStream;
+import java.util.Properties;
+import java.util.Scanner;
+
+import javax.ws.rs.client.Client;
+import javax.ws.rs.client.ClientBuilder;
+import javax.ws.rs.client.Entity;
+import javax.ws.rs.client.WebTarget;
+import javax.ws.rs.core.Response;
+
+import junit.framework.TestCase;
+
+import org.apache.log4j.Logger;
+import org.apache.http.HttpStatus;
+import org.json.JSONObject;
+
+import com.att.nsa.drumlin.till.data.sha1HmacSigner;
+
+public class DmaapApiKeyTest {
+ /*
+ private static final Logger LOGGER = Logger.getLogger(DmaapApiKeyTest.class);
+ Client client = ClientBuilder.newClient();
+ Properties prop = LoadPropertyFile.getPropertyFileData();
+ String url = prop.getProperty("url");
+ WebTarget target = client.target(url);
+ String date = prop.getProperty("date");
+
+
+ public JSONObject returnKey(ApiKeyBean apikeybean) {
+ LOGGER.info("Call to return newly created key");
+ target = client.target(url);
+ target = target.path("/apiKeys/create");
+ Response response = target.request().post(Entity.json(apikeybean));
+ assertStatus(response);
+ LOGGER.info("successfully created keys");
+ InputStream is = (InputStream) response.getEntity();
+ Scanner s = new Scanner(is);
+ s.useDelimiter("\\A");
+ JSONObject dataObj = new JSONObject(s.next());
+ s.close();
+ LOGGER.info("key details :" + dataObj.toString());
+ return dataObj;
+ }
+
+ // 1. create key
+ public void testCreateKey() {
+ LOGGER.info("test case create key");
+ ApiKeyBean apiKeyBean = new ApiKeyBean("nm254w@att.com", "Creating Api Key.");
+ returnKey(apiKeyBean);
+ LOGGER.info("Successfully returned after creating key");
+ }
+
+ public void assertStatus(Response response) {
+ assertTrue(response.getStatus() == HttpStatus.SC_OK);
+ }
+
+ // 2. get Allkey details
+ public void testAllKey() {
+ LOGGER.info("test case get all key");
+ target = target.path("/apiKeys");
+ Response response = target.request().get();
+ assertStatus(response);
+ LOGGER.info("successfully returned after get all key");
+ InputStream is = (InputStream) response.getEntity();
+ Scanner s = new Scanner(is);
+ s.useDelimiter("\\A");
+ LOGGER.info("Details of key: " + s.next());
+ s.close();
+
+ }
+
+ // 3. get specific key
+ public void testSpecificKey() {
+ LOGGER.info("test case get specific key");
+ String apiKey = "";
+ ApiKeyBean apiKeyBean = new ApiKeyBean("ai039@att.com", "Creating Api Key.");
+
+ apiKey = (String) returnKey(apiKeyBean).get("key");
+ target = client.target(url);
+ target = target.path("/apiKeys/");
+ target = target.path(apiKey);
+ Response response = target.request().get();
+ assertStatus(response);
+ LOGGER.info("successfully returned after fetching specific key");
+ }
+
+ // 4. update key
+
+ public void testUpdateKey() {
+ LOGGER.info("test case update key");
+ String apiKey = "";
+ String secretKey = "";
+ final String serverCalculatedSignature;
+ final String X_CambriaAuth;
+ final String X_CambriaDate;
+ JSONObject jsonObj;
+
+ ApiKeyBean apiKeyBean = new ApiKeyBean("ai039@att.com", "Creating Api Key for update");
+ ApiKeyBean apiKeyBean1 = new ApiKeyBean("ai03911@att.com", "updating Api Key.");
+ jsonObj = returnKey(apiKeyBean);
+ apiKey = (String) jsonObj.get("key");
+ secretKey = (String) jsonObj.get("secret");
+
+ serverCalculatedSignature = sha1HmacSigner.sign(date, secretKey);
+ X_CambriaAuth = apiKey + ":" + serverCalculatedSignature;
+ X_CambriaDate = date;
+ target = client.target(url);
+ target = target.path("/apiKeys/" + apiKey);
+ Response response1 = target.request().header("X-CambriaAuth", X_CambriaAuth)
+ .header("X-CambriaDate", X_CambriaDate).put(Entity.json(apiKeyBean1));
+ assertStatus(response1);
+ LOGGER.info("successfully returned after updating key");
+ }
+
+ // 5. delete key
+ public void testDeleteKey() {
+ LOGGER.info("test case delete key");
+ String apiKey = "";
+ String secretKey = "";
+ final String serverCalculatedSignature;
+ final String X_CambriaAuth;
+ final String X_CambriaDate;
+ JSONObject jsonObj;
+ ApiKeyBean apiKeyBean = new ApiKeyBean("ai039@att.com", "Creating Api Key.");
+ jsonObj = returnKey(apiKeyBean);
+ apiKey = (String) jsonObj.get("key");
+ secretKey = (String) jsonObj.get("secret");
+ serverCalculatedSignature = sha1HmacSigner.sign(date, secretKey);
+ X_CambriaAuth = apiKey + ":" + serverCalculatedSignature;
+ X_CambriaDate = date;
+ target = client.target(url);
+ target = target.path("/apiKeys/" + apiKey);
+ Response response2 = target.request().header("X-CambriaAuth", X_CambriaAuth)
+ .header("X-CambriaDate", X_CambriaDate).delete();
+ assertStatus(response2);
+ LOGGER.info("successfully returned after deleting key");
+ }
+*/
+} \ No newline at end of file
diff --git a/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dmaap/DmaapMetricsTest.java b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dmaap/DmaapMetricsTest.java
new file mode 100644
index 0000000..658e274
--- /dev/null
+++ b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dmaap/DmaapMetricsTest.java
@@ -0,0 +1,77 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.mr.test.dmaap;
+
+import java.io.InputStream;
+import java.util.Scanner;
+
+import javax.ws.rs.client.Client;
+import javax.ws.rs.client.ClientBuilder;
+import javax.ws.rs.client.WebTarget;
+import javax.ws.rs.core.Response;
+
+import junit.framework.TestCase;
+
+import org.apache.log4j.Logger;
+import org.apache.http.HttpStatus;
+
+public class DmaapMetricsTest {
+ /*private static final Logger LOGGER = Logger.getLogger(DmaapMetricsTest.class);
+ Client client = ClientBuilder.newClient();
+ WebTarget target = client.target(LoadPropertyFile.getPropertyFileData().getProperty("url"));
+
+ public void assertStatus(Response response) {
+ assertTrue(response.getStatus() == HttpStatus.SC_OK);
+ }
+
+
+ // 1.get metrics
+ public void testMetrics() {
+ LOGGER.info("test case get all metrics");
+ target = target.path("/metrics");
+ Response response = target.request().get();
+ assertStatus(response);
+ LOGGER.info("successfully returned after fetching all metrics");
+ InputStream is = (InputStream) response.getEntity();
+ Scanner s = new Scanner(is);
+ s.useDelimiter("\\A");
+ String data = s.next();
+ s.close();
+ LOGGER.info("DmaapMetricTest Test all metrics" + data);
+ }
+
+ // 2.get metrics by name
+ public void testMetricsByName() {
+ LOGGER.info("test case get metrics by name");
+ target = target.path("/metrics/startTime");
+ Response response = target.request().get();
+ assertStatus(response);
+ LOGGER.info("successfully returned after fetching specific metrics");
+ InputStream is = (InputStream) response.getEntity();
+ Scanner s = new Scanner(is);
+ s.useDelimiter("\\A");
+ String data = s.next();
+ s.close();
+ LOGGER.info("DmaapMetricTest metrics by name" + data);
+ }
+*/
+}
diff --git a/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dmaap/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dmaap/JUnitTestSuite.java
new file mode 100644
index 0000000..4b2d52a
--- /dev/null
+++ b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dmaap/JUnitTestSuite.java
@@ -0,0 +1,44 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.mr.test.dmaap;
+
+import junit.framework.TestSuite;
+
+import org.junit.runner.RunWith;
+import org.junit.runners.Suite;
+import org.junit.runners.Suite.SuiteClasses;
+import org.apache.log4j.Logger;
+
+@RunWith(Suite.class)
+@SuiteClasses({ DMaapPubSubTest.class, DmaapApiKeyTest.class, DMaapTopicTest.class, DmaapMetricsTest.class,
+ DmaapAdminTest.class })
+public class JUnitTestSuite {
+ /*private static final Logger LOGGER = Logger.getLogger(DMaapTopicTest.class);
+
+
+ public static void main(String[] args) {
+ LOGGER.info("Running the test suite");
+ TestSuite tstSuite = new TestSuite();
+ LOGGER.info("Total Test Counts " + tstSuite.countTestCases());
+ }*/
+
+}
diff --git a/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dmaap/LoadPropertyFile.java b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dmaap/LoadPropertyFile.java
new file mode 100644
index 0000000..8c4afa2
--- /dev/null
+++ b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dmaap/LoadPropertyFile.java
@@ -0,0 +1,48 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.mr.test.dmaap;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Properties;
+
+import org.apache.log4j.Logger;
+
+public class LoadPropertyFile {
+ /*private static final Logger LOGGER = Logger
+ .getLogger(LoadPropertyFile.class);
+
+ static public Properties getPropertyFileData() {
+ Properties prop = new Properties();
+ LOGGER.info("loading the property file");
+
+ try {
+ InputStream inputStream = LoadPropertyFile.class.getClassLoader()
+ .getResourceAsStream("DMaaPUrl.properties");
+ prop.load(inputStream);
+ LOGGER.info("successfully loaded the property file");
+ } catch (IOException e) {
+ LOGGER.error("Error while retrieving API keys: " + e);
+ }
+ return prop;
+ }*/
+}
diff --git a/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dmaap/TestRunner.java b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dmaap/TestRunner.java
new file mode 100644
index 0000000..bffb32e
--- /dev/null
+++ b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dmaap/TestRunner.java
@@ -0,0 +1,42 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.mr.test.dmaap;
+
+import org.junit.runner.JUnitCore;
+import org.junit.runner.Result;
+import org.junit.runner.notification.Failure;
+import org.apache.log4j.Logger;
+
+public class TestRunner {
+ /*private static final Logger LOGGER = Logger.getLogger(TestRunner.class);
+
+
+ public static void main(String[] args) {
+ // TODO Auto-generated method stub
+ Result result = JUnitCore.runClasses(JUnitTestSuite.class);
+ for (Failure failure : result.getFailures()) {
+ LOGGER.info(failure.toString());
+ }
+ LOGGER.info(result.wasSuccessful());
+ }
+*/
+}
diff --git a/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dmaap/TopicBean.java b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dmaap/TopicBean.java
new file mode 100644
index 0000000..9c0ddbb
--- /dev/null
+++ b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dmaap/TopicBean.java
@@ -0,0 +1,72 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+/**
+ *
+ */
+package org.onap.dmaap.messagerouter.msgrtr.mr.test.dmaap;
+
+import java.io.Serializable;
+
+public class TopicBean implements Serializable {
+
+ /*
+ * private static final long serialVersionUID = -8620390377775457949L;
+ * private String topicName; private String description;
+ *
+ *
+ * private int partitionCount; private int replicationCount; private boolean
+ * transactionEnabled = false;
+ *
+ * public boolean isTransactionEnabled() { return transactionEnabled; }
+ *
+ * public void setTransactionEnabled(boolean transactionEnabled) {
+ * this.transactionEnabled = transactionEnabled; }
+ *
+ * public TopicBean() { super(); }
+ *
+ * public TopicBean(String topicName, String description, int
+ * partitionCount, int replicationCount, boolean transactionEnabled) {
+ * super(); this.topicName = topicName; this.description = description;
+ * this.partitionCount = partitionCount; this.replicationCount =
+ * replicationCount; this.transactionEnabled = transactionEnabled; }
+ *
+ * public String getTopicName() { return topicName; }
+ *
+ * public void setTopicName(String topicName) { this.topicName = topicName;
+ * }
+ *
+ * public String getDescription() { return description; }
+ *
+ * public void setDescription(String description) { this.description =
+ * description; }
+ *
+ * public int getPartitionCount() { return partitionCount; }
+ *
+ * public void setPartitionCount(int partitionCount) { this.partitionCount =
+ * partitionCount; }
+ *
+ * public int getReplicationCount() { return replicationCount; }
+ *
+ * public void setReplicationCount(int replicationCount) {
+ * this.replicationCount = replicationCount; }
+ */
+}
diff --git a/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/ApiKeyBean.java b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/ApiKeyBean.java
new file mode 100644
index 0000000..03e34ad
--- /dev/null
+++ b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/ApiKeyBean.java
@@ -0,0 +1,72 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.mr.test.dme2;
+
+import java.io.Serializable;
+
+public class ApiKeyBean implements Serializable {
+
+ private static final long serialVersionUID = -8219849086890567740L;
+
+ // private static final String KEY_CHARS =
+ // "ABCDEFGHJIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
+
+
+ private String email;
+ private String description;
+
+ public ApiKeyBean() {
+ super();
+ }
+
+ public ApiKeyBean(String email, String description) {
+ super();
+ this.email = email;
+ this.description = description;
+ }
+
+ public String getEmail() {
+ return email;
+ }
+
+ public void setEmail(String email) {
+ this.email = email;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+
+ public void setDescription(String description) {
+ this.description = description;
+ }
+
+ /*
+ * public String getKey() { return generateKey(16); }
+ *
+ * public String getSharedSecret() { return generateKey(24); }
+ *
+ * private static String generateKey ( int length ) { return
+ * uniqueStringGenerator.createKeyUsingAlphabet ( KEY_CHARS, length ); }
+ */
+
+}
diff --git a/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/DME2AdminTest.java b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/DME2AdminTest.java
new file mode 100644
index 0000000..ce7f80b
--- /dev/null
+++ b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/DME2AdminTest.java
@@ -0,0 +1,148 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.mr.test.dme2;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.HashMap;
+import java.util.Properties;
+
+import org.apache.log4j.Logger;
+import org.onap.dmaap.messagerouter.msgrtr.mr.test.dmaap.DmaapAdminTest;
+
+import junit.framework.TestCase;
+
+import com.att.aft.dme2.api.DME2Client;
+import com.att.aft.dme2.api.DME2Exception;
+
+public class DME2AdminTest extends TestCase {
+ private static final Logger LOGGER = Logger.getLogger(DME2AdminTest.class);
+
+ protected String url;
+
+ protected Properties props;
+
+ protected HashMap<String, String> hm;
+
+ protected String methodType;
+
+ protected String contentType;
+
+ protected String user;
+
+ protected String password;
+
+ @Override
+ protected void setUp() throws Exception {
+ super.setUp();
+ System.setProperty("AFT_DME2_CLIENT_SSL_INCLUDE_PROTOCOLS", "SSLv3,TLSv1,TLSv1.1");
+ System.setProperty("AFT_DME2_CLIENT_IGNORE_SSL_CONFIG", "false");
+ System.setProperty("AFT_DME2_CLIENT_KEYSTORE_PASSWORD", "changeit");
+ this.props = LoadPropertyFile.getPropertyFileDataProducer();
+ String latitude = props.getProperty("Latitude");
+ String longitude = props.getProperty("Longitude");
+ String version = props.getProperty("Version");
+ String serviceName = props.getProperty("ServiceName");
+ serviceName = "dmaap-v1.dev.dmaap.dt.saat.acsi.att.com/admin";
+ String env = props.getProperty("Environment");
+ String partner = props.getProperty("Partner");
+ String protocol = props.getProperty("Protocol");
+
+ methodType = props.getProperty("MethodTypeGet");
+ contentType = props.getProperty("contenttype");
+ user = props.getProperty("user");
+ password = props.getProperty("password");
+
+
+ this.url = protocol + "://" + serviceName + "?" + "version=" + version + "&" + "envContext=" + env + "&"
+ + "routeOffer=" + partner + "&partner=BOT_R";
+ LoadPropertyFile.loadAFTProperties(latitude, longitude);
+ hm = new HashMap<String, String>();
+ hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000");
+ hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000");
+ hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000");
+ }
+
+ public void testGetConsumerCache() {
+ LOGGER.info("test case consumer cache started");
+
+ String subContextPath = props.getProperty("SubContextPathGetAdminConsumerCache");
+ try {
+ DME2Client sender = new DME2Client(new URI(url), 5000L);
+ sender.setAllowAllHttpReturnCodes(true);
+ sender.setMethod(methodType);
+ sender.setSubContext(subContextPath);
+ sender.setPayload("");
+ sender.addHeader("Content-Type", contentType);
+
+ sender.addHeader("X-CambriaAuth", "user1:7J49YriFlyRgebyOsSJhZvY/C60=");
+ sender.addHeader("X-X-CambriaDate", "2016-10-18T09:56:04-05:00");
+
+ //sender.setCredentials(user, password);
+ sender.setHeaders(hm);
+ LOGGER.info("Getting consumer Cache");
+ String reply = sender.sendAndWait(5000L);
+ System.out.println(reply);
+ assertTrue(LoadPropertyFile.isValidJsonString(reply));
+ assertNotNull(reply);
+ LOGGER.info("response from consumer cache=" + reply);
+
+ } catch (DME2Exception e) {
+ e.printStackTrace();
+ } catch (URISyntaxException e) {
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public void ttestDropConsumerCache() {
+ LOGGER.info("Drom consumer cache initiated");
+
+ String subContextPath = props.getProperty("SubContextPathDropAdminConsumerCache");
+
+ try {
+ DME2Client sender = new DME2Client(new URI(url), 5000L);
+ sender.setAllowAllHttpReturnCodes(true);
+ sender.setMethod(methodType);
+ sender.setSubContext(subContextPath);
+ sender.setPayload("");
+ sender.addHeader("Content-Type", contentType);
+ sender.setCredentials(user, password);
+ sender.setHeaders(hm);
+
+ LOGGER.info("Dropping consumer cache...........");
+ String reply = sender.sendAndWait(5000L);
+
+ // assertTrue(LoadPropertyFile.isValidJsonString(reply));
+ assertNotNull(reply);
+ LOGGER.info("response =" + reply);
+ } catch (DME2Exception e) {
+ e.printStackTrace();
+ } catch (URISyntaxException e) {
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+}
diff --git a/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/DME2ApiKeyTest.java b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/DME2ApiKeyTest.java
new file mode 100644
index 0000000..9088eb9
--- /dev/null
+++ b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/DME2ApiKeyTest.java
@@ -0,0 +1,229 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.mr.test.dme2;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.Properties;
+
+import org.apache.log4j.Logger;
+import org.json.JSONObject;
+
+import com.att.aft.dme2.api.DME2Client;
+import com.att.aft.dme2.api.DME2Exception;
+import com.att.aft.dme2.internal.jackson.map.ObjectMapper;
+
+import junit.framework.TestCase;
+
+public class DME2ApiKeyTest extends TestCase {
+ private static final Logger LOGGER = Logger.getLogger(DME2ApiKeyTest.class);
+
+ protected String url;
+
+ protected Properties props;
+
+ @Override
+ protected void setUp() throws Exception {
+ super.setUp();
+ System.setProperty("AFT_DME2_CLIENT_SSL_INCLUDE_PROTOCOLS", "SSLv3,TLSv1,TLSv1.1");
+ System.setProperty("AFT_DME2_CLIENT_IGNORE_SSL_CONFIG", "false");
+ System.setProperty("AFT_DME2_CLIENT_KEYSTORE_PASSWORD", "changeit");
+ this.props = LoadPropertyFile.getPropertyFileDataProducer();
+ String latitude = props.getProperty("Latitude");
+ String longitude = props.getProperty("Longitude");
+ String version = props.getProperty("Version");
+ String serviceName = props.getProperty("ServiceName");
+ String env = props.getProperty("Environment");
+ String partner = props.getProperty("Partner");
+ String protocol = props.getProperty("Protocol");
+ this.url = protocol + "://" + serviceName + "?" + "version=" + version + "&" + "envContext=" + env + "&"
+ + "routeOffer=" + partner + "&partner=BOT_R";
+ LoadPropertyFile.loadAFTProperties(latitude, longitude);
+ }
+
+ public void testCreateKey() {
+ LOGGER.info("Create Key test case initiated");
+
+ ApiKeyBean apiKeyBean = new ApiKeyBean("user1@us.att.com", "Creating Api Key.m");
+
+ System.out.println(url);
+
+ returnKey(apiKeyBean, url, props);
+
+ }
+
+ public String returnKey(ApiKeyBean apibean, String url, Properties props) {
+
+ String reply = null;
+ try {
+ LOGGER.info("Call to return key ");
+ DME2Client sender = new DME2Client(new URI(url), 5000L);
+ sender.setAllowAllHttpReturnCodes(true);
+ sender.setMethod(props.getProperty("MethodTypePost"));
+ sender.setSubContext(props.getProperty("SubContextPathGetCreateKeys"));
+ String jsonStringApiBean = new ObjectMapper().writeValueAsString(apibean);
+ sender.setPayload(jsonStringApiBean);
+ sender.addHeader("content-type", props.getProperty("contenttype"));
+ sender.setCredentials(props.getProperty("user"), props.getProperty("password"));
+ LOGGER.info("creating ApiKey");
+ reply = sender.sendAndWait(5000L);
+ System.out.println("reply: " + reply);
+ assertTrue(LoadPropertyFile.isValidJsonString(reply));
+ LOGGER.info("response =" + reply);
+
+ } catch (DME2Exception e) {
+ e.printStackTrace();
+ } catch (URISyntaxException e) {
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ return reply;
+ }
+
+ public void testGetAllKey() {
+ LOGGER.info("Test case Get All key initiated....");
+ try {
+ DME2Client sender = new DME2Client(new URI(this.url), 5000L);
+ sender.setAllowAllHttpReturnCodes(true);
+ sender.setMethod(this.props.getProperty("MethodTypeGet"));
+ String subcontextPath = this.props.getProperty("SubContextPathGetApiKeys");
+ // sender.setSubContext(subcontextPath);
+ sender.setPayload("");
+ sender.addHeader("content-type", props.getProperty("contenttype"));
+ sender.setCredentials(props.getProperty("user"), props.getProperty("password"));
+ LOGGER.info("Fetching all keys");
+ String reply = sender.sendAndWait(5000L);
+ System.out.println(reply);
+ assertTrue(LoadPropertyFile.isValidJsonString(reply));
+ LOGGER.info("response =" + reply);
+ } catch (DME2Exception e) {
+ e.printStackTrace();
+ } catch (URISyntaxException e) {
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public void testGetOneKey() {
+ LOGGER.info("Test case get one key initiated");
+ ApiKeyBean apiKeyBean = new ApiKeyBean("user1@att.com", "Creating Api Key.m");
+ JSONObject jsonObj = new JSONObject(returnKey(apiKeyBean, url, props));
+ String apiKey = (String) jsonObj.get("key");
+ try {
+ DME2Client sender = new DME2Client(new URI(url), 5000L);
+ sender.setAllowAllHttpReturnCodes(true);
+ sender.setMethod(props.getProperty("MethodTypeGet"));
+ sender.setSubContext(props.getProperty("SubContextPathGetOneKey") + apiKey);
+ sender.setPayload("");
+ sender.addHeader("content-type", props.getProperty("contenttype"));
+ sender.setCredentials(props.getProperty("user"), props.getProperty("password"));
+
+ LOGGER.info("Fetching details of api key: " + apiKey);
+ String reply = sender.sendAndWait(5000L);
+ System.out.println(reply);
+ assertTrue(LoadPropertyFile.isValidJsonString(reply));
+ LOGGER.info("response =" + reply);
+
+ } catch (DME2Exception e) {
+ e.printStackTrace();
+ } catch (URISyntaxException e) {
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ // ............. test case update key is not applicable in
+ // DME2.................//
+ public void testUpdateKey() {
+ ApiKeyBean apiKeyBean = new ApiKeyBean("user1@att.com", "Creating Api Key.m");
+
+ JSONObject jsonObj = new JSONObject(returnKey(apiKeyBean, url, props));
+ String apiKey = (String) jsonObj.get("key");
+ try {
+ DME2Client sender = new DME2Client(new URI(url), 5000L);
+ sender.setAllowAllHttpReturnCodes(true);
+ String p = props.getProperty("MethodTypePut");
+ sender.setMethod(p);
+ String s = props.getProperty("SubContextPathUpdateKeys") + apiKey;
+ sender.setSubContext(s);
+
+ String jsonStringApiBean = new ObjectMapper()
+ .writeValueAsString(new ApiKeyBean("user1@att.com", "updating key"));
+ sender.setPayload(jsonStringApiBean);
+ System.out.println(jsonStringApiBean);
+ String c = props.getProperty("contenttype");
+ sender.addHeader("content-type", c);
+ sender.setCredentials(props.getProperty("keyUser"), props.getProperty("keyPass"));
+
+ System.out.println("creating ApiKey");
+ String reply = sender.sendAndWait(5000L);
+ assertNotNull(reply);
+ System.out.println("response =" + reply);
+
+ } catch (DME2Exception e) {
+ e.printStackTrace();
+ } catch (URISyntaxException e) {
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ // ............. test case delete key is not applicable in
+ // DME2.................//
+
+ public void testDeleteKey() {
+ ApiKeyBean apiKeyBean = new ApiKeyBean("user1@att.com", "Creating Api Key.m");
+
+ JSONObject jsonObj = new JSONObject(returnKey(apiKeyBean, url, props));
+ String apiKey = (String) jsonObj.get("key");
+ try {
+ DME2Client sender = new DME2Client(new URI(url), 5000L);
+ sender.setAllowAllHttpReturnCodes(true);
+ String p = props.getProperty("MethodTypeDelete");
+ sender.setMethod(p);
+ String s = props.getProperty("SubContextPathDeleteteKeys") + apiKey;
+ sender.setSubContext(s);
+
+ sender.setPayload(""); // System.out.println(jsonStringApiBean);
+ String c = props.getProperty("contenttype");
+ sender.addHeader("content-type", c);
+ sender.setCredentials(props.getProperty("keyUser"), props.getProperty("keyPass"));
+
+ System.out.println("creating ApiKey");
+ String reply = sender.sendAndWait(5000L);
+ assertNotNull(reply);
+ System.out.println("response =" + reply);
+
+ } catch (DME2Exception e) {
+ e.printStackTrace();
+ } catch (URISyntaxException e) {
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+}
diff --git a/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/DME2ConsumerFilterTest.java b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/DME2ConsumerFilterTest.java
new file mode 100644
index 0000000..3a4bc75
--- /dev/null
+++ b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/DME2ConsumerFilterTest.java
@@ -0,0 +1,97 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.mr.test.dme2;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.URLEncoder;
+import java.util.HashMap;
+import java.util.Properties;
+
+import org.apache.log4j.Logger;
+import org.onap.dmaap.messagerouter.msgrtr.mr.test.dmaap.DmaapAdminTest;
+
+import junit.framework.TestCase;
+
+import com.att.aft.dme2.api.DME2Client;
+import com.att.aft.dme2.api.DME2Exception;
+
+public class DME2ConsumerFilterTest extends TestCase {
+ private static final Logger LOGGER = Logger.getLogger(DME2ConsumerFilterTest.class);
+
+ public void testConsumerFilter() {
+ LOGGER.info("Test case consumer filter initiated");
+
+ Properties props = LoadPropertyFile.getPropertyFileDataProducer();
+ String latitude = props.getProperty("Latitude");
+ String longitude = props.getProperty("Longitude");
+ String version = props.getProperty("Version");
+ String serviceName = props.getProperty("ServiceName");
+ String env = props.getProperty("Environment");
+ String partner = props.getProperty("Partner");
+ String protocol = props.getProperty("Protocol");
+ String methodType = props.getProperty("MethodTypeGet");
+ String user = props.getProperty("user");
+ String password = props.getProperty("password");
+ String contenttype = props.getProperty("contenttype");
+
+ String url = protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version + "/"
+ + "envContext=" + env + "/" + "partner=" + partner;
+ LoadPropertyFile.loadAFTProperties(latitude, longitude);
+ HashMap<String, String> hm = new HashMap<String, String>();
+ hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000");
+ hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000");
+ hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000");
+
+ try {
+ // ..checking for topic exist is commented
+ // if (!topicTestObj.topicExist(url, props, hm)) {
+ // throw new Exception("Topic does not exist");
+ // } else {
+ DME2Client sender = new DME2Client(new URI(url), 5000L);
+ sender.setAllowAllHttpReturnCodes(true);
+ sender.setMethod(methodType);
+ String subContextPathConsumer = props.getProperty("SubContextPathConsumer") + props.getProperty("newTopic")
+ + "/" + props.getProperty("group") + "/" + props.getProperty("id") + "?"
+ + props.getProperty("filterType");
+
+ sender.setSubContext(URLEncoder.encode(subContextPathConsumer, "UTF-8"));
+ sender.setPayload("");
+
+ sender.addHeader("Content-Type", contenttype);
+ sender.setCredentials(user, password);
+ sender.setHeaders(hm);
+
+ LOGGER.info("Consuming Message for Filter");
+ String reply = sender.sendAndWait(5000L);
+ assertNotNull(reply);
+ LOGGER.info("Message received = " + reply);
+ // }
+ } catch (DME2Exception e) {
+ e.printStackTrace();
+ } catch (URISyntaxException e) {
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+}
diff --git a/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/DME2ConsumerTest.java b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/DME2ConsumerTest.java
new file mode 100644
index 0000000..5406c6a
--- /dev/null
+++ b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/DME2ConsumerTest.java
@@ -0,0 +1,95 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.mr.test.dme2;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.HashMap;
+import java.util.Properties;
+
+import org.apache.log4j.Logger;
+import org.onap.dmaap.messagerouter.msgrtr.mr.test.dmaap.DmaapAdminTest;
+
+import junit.framework.TestCase;
+
+import com.att.aft.dme2.api.DME2Client;
+import com.att.aft.dme2.api.DME2Exception;
+
+public class DME2ConsumerTest extends TestCase {
+ private static final Logger LOGGER = Logger.getLogger(DME2ConsumerTest.class);
+
+ public void testConsumer() {
+ LOGGER.info("Test case subcribing initiated");
+
+ Properties props = LoadPropertyFile.getPropertyFileDataProducer();
+ String latitude = props.getProperty("Latitude");
+ String longitude = props.getProperty("Longitude");
+ String version = props.getProperty("Version");
+ String serviceName = props.getProperty("ServiceName");
+ String env = props.getProperty("Environment");
+ String partner = props.getProperty("Partner");
+ String protocol = props.getProperty("Protocol");
+ String methodType = props.getProperty("MethodTypeGet");
+ String user = props.getProperty("user");
+ String password = props.getProperty("password");
+ String contenttype = props.getProperty("contenttype");
+ String url = protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version + "/"
+ + "envContext=" + env + "/" + "partner=" + partner;
+ LoadPropertyFile.loadAFTProperties(latitude, longitude);
+ HashMap<String, String> hm = new HashMap<String, String>();
+ hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000");
+ hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000");
+ hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000");
+ try {
+
+ // topic exist logic is commented
+ // if (!topicTestObj.topicExist(url, props, hm)) {
+ // throw new Exception("Topic does not exist");
+ // } else {
+ DME2Client sender = new DME2Client(new URI(url), 5000L);
+ sender.setAllowAllHttpReturnCodes(true);
+ sender.setMethod(methodType);
+ String subContextPathConsumer = props.getProperty("SubContextPathConsumer") + props.getProperty("newTopic")
+ + "/" + props.getProperty("group") + "/" + props.getProperty("id");
+ sender.setSubContext(subContextPathConsumer);
+ sender.setPayload("");
+
+ sender.addHeader("Content-Type", contenttype);
+ sender.setCredentials(user, password);
+ sender.setHeaders(hm);
+
+ LOGGER.info("Consuming Message");
+ String reply = sender.sendAndWait(5000L);
+
+ assertNotNull(reply);
+ LOGGER.info("Message received = " + reply);
+ // }
+ } catch (DME2Exception e) {
+ e.printStackTrace();
+ } catch (URISyntaxException e) {
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+}
diff --git a/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/DME2MetricsTest.java b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/DME2MetricsTest.java
new file mode 100644
index 0000000..6f674f4
--- /dev/null
+++ b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/DME2MetricsTest.java
@@ -0,0 +1,133 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.mr.test.dme2;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.HashMap;
+import java.util.Properties;
+
+import org.apache.log4j.Logger;
+import org.onap.dmaap.messagerouter.msgrtr.mr.test.dmaap.DmaapAdminTest;
+
+import junit.framework.TestCase;
+
+import com.att.aft.dme2.api.DME2Client;
+import com.att.aft.dme2.api.DME2Exception;
+
+public class DME2MetricsTest extends TestCase {
+ private static final Logger LOGGER = Logger.getLogger(DME2MetricsTest.class);
+
+ public void testGetMetrics() {
+ LOGGER.info("Test case get metrics initiated...");
+
+ Properties props = LoadPropertyFile.getPropertyFileDataProducer();
+ String latitude = props.getProperty("Latitude");
+ String longitude = props.getProperty("Longitude");
+ String version = props.getProperty("Version");
+ String serviceName = props.getProperty("ServiceName");
+ String env = props.getProperty("Environment");
+ String partner = props.getProperty("Partner");
+ String subContextPath = props.getProperty("SubContextPathGetMetrics");
+ String protocol = props.getProperty("Protocol");
+ String methodType = props.getProperty("MethodTypeGet");
+ String user = props.getProperty("user");
+ String password = props.getProperty("password");
+ String contenttype = props.getProperty("contenttype");
+ String url = protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version + "/"
+ + "envContext=" + env + "/" + "partner=" + partner;
+ LoadPropertyFile.loadAFTProperties(latitude, longitude);
+ HashMap<String, String> hm = new HashMap<String, String>();
+ hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000");
+ hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000");
+ hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000");
+
+ try {
+ DME2Client sender = new DME2Client(new URI(url), 5000L);
+ sender.setAllowAllHttpReturnCodes(true);
+ sender.setMethod(methodType);
+ sender.setSubContext(subContextPath);
+ sender.setPayload("");
+ sender.addHeader("Content-Type", contenttype);
+ sender.setCredentials(user, password);
+ sender.setHeaders(hm);
+
+ LOGGER.info("Getting Metrics Details");
+ String reply = sender.sendAndWait(5000L);
+ assertTrue(LoadPropertyFile.isValidJsonString(reply));
+ LOGGER.info("response =" + reply);
+
+ } catch (DME2Exception e) {
+ e.printStackTrace();
+ } catch (URISyntaxException e) {
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public void testGetMetricsByName() {
+ LOGGER.info("Test case get metrics by name initiated");
+ Properties props = LoadPropertyFile.getPropertyFileDataProducer();
+ String latitude = props.getProperty("Latitude");
+ String longitude = props.getProperty("Longitude");
+ String version = props.getProperty("Version");
+ String serviceName = props.getProperty("ServiceName");
+ String env = props.getProperty("Environment");
+ String partner = props.getProperty("Partner");
+ String subContextPath = props.getProperty("SubContextPathGetMetricsByName");
+ String protocol = props.getProperty("Protocol");
+ String methodType = props.getProperty("MethodTypeGet");
+ String user = props.getProperty("user");
+ String password = props.getProperty("password");
+ String contenttype = props.getProperty("contenttype");
+ String url = protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version + "/"
+ + "envContext=" + env + "/" + "partner=" + partner;
+ LoadPropertyFile.loadAFTProperties(latitude, longitude);
+ HashMap<String, String> hm = new HashMap<String, String>();
+ hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000");
+ hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000");
+ hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000");
+ try {
+ DME2Client sender = new DME2Client(new URI(url), 5000L);
+ sender.setAllowAllHttpReturnCodes(true);
+ sender.setMethod(methodType);
+ sender.setSubContext(subContextPath);
+ sender.setPayload("");
+ sender.addHeader("Content-Type", contenttype);
+ sender.setCredentials(user, password);
+ sender.setHeaders(hm);
+
+ LOGGER.info("Getting Metrics By name");
+ String reply = sender.sendAndWait(5000L);
+ assertTrue(LoadPropertyFile.isValidJsonString(reply));
+ LOGGER.info("response =" + reply);
+
+ } catch (DME2Exception e) {
+ e.printStackTrace();
+ } catch (URISyntaxException e) {
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+}
diff --git a/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/DME2ProducerTest.java b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/DME2ProducerTest.java
new file mode 100644
index 0000000..2941b69
--- /dev/null
+++ b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/DME2ProducerTest.java
@@ -0,0 +1,101 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.mr.test.dme2;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.HashMap;
+import java.util.Properties;
+
+import org.apache.log4j.Logger;
+import org.onap.dmaap.messagerouter.msgrtr.mr.test.dmaap.DmaapAdminTest;
+
+import junit.framework.TestCase;
+
+import com.att.aft.dme2.api.DME2Client;
+import com.att.aft.dme2.api.DME2Exception;
+import com.att.aft.dme2.internal.jackson.map.ObjectMapper;
+
+public class DME2ProducerTest extends TestCase {
+ private static final Logger LOGGER = Logger.getLogger(DmaapAdminTest.class);
+
+ public void testProducer() {
+ DME2TopicTest topicTestObj = new DME2TopicTest();
+
+ Properties props = LoadPropertyFile.getPropertyFileDataProducer();
+ String latitude = props.getProperty("Latitude");
+ String longitude = props.getProperty("Longitude");
+ String version = props.getProperty("Version");
+ String serviceName = props.getProperty("ServiceName");
+ String env = props.getProperty("Environment");
+ String partner = props.getProperty("Partner");
+ String protocol = props.getProperty("Protocol");
+ String url = protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version + "/"
+ + "envContext=" + env + "/" + "partner=" + partner;
+ LoadPropertyFile.loadAFTProperties(latitude, longitude);
+ HashMap<String, String> hm = new HashMap<String, String>();
+ hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000");
+ hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000");
+ hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000");
+ // checking whether topic exist or not
+ if (!topicTestObj.topicExist(url, props, hm)) {
+ // if topic doesn't exist then create the topic
+ topicTestObj.createTopic(url, props, hm);
+ // after creating the topic publish on that topic
+ publishMessage(url, props, hm);
+ } else {
+ // if topic already exist start publishing on the topic
+ publishMessage(url, props, hm);
+ }
+
+ }
+
+ public void publishMessage(String url, Properties props, HashMap<String, String> mapData) {
+ try {
+ LOGGER.info("Call to publish message ");
+ DME2Client sender = new DME2Client(new URI(url), 5000L);
+ sender.setAllowAllHttpReturnCodes(true);
+ sender.setMethod(props.getProperty("MethodTypePost"));
+ String subcontextpathPublish = props.getProperty("SubContextPathproducer") + props.getProperty("newTopic");
+ sender.setSubContext(subcontextpathPublish);
+ String jsonStringApiBean = new ObjectMapper().writeValueAsString(new ApiKeyBean("example@att.com",
+ "description"));
+ sender.setPayload(jsonStringApiBean);
+
+ sender.setCredentials(props.getProperty("user"), props.getProperty("password"));
+ sender.addHeader("content-type", props.getProperty("contenttype"));
+ LOGGER.info("Publishing message");
+ String reply = sender.sendAndWait(5000L);
+ // assertTrue(LoadPropertyFile.isValidJsonString(reply));
+ assertNotNull(reply);
+ LOGGER.info("response =" + reply);
+
+ } catch (DME2Exception e) {
+ e.printStackTrace();
+ } catch (URISyntaxException e) {
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+}
diff --git a/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/DME2TopicTest.java b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/DME2TopicTest.java
new file mode 100644
index 0000000..bfa25f1
--- /dev/null
+++ b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/DME2TopicTest.java
@@ -0,0 +1,546 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.mr.test.dme2;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.HashMap;
+import java.util.Properties;
+
+import javax.ws.rs.client.Entity;
+
+import org.apache.log4j.Logger;
+import org.onap.dmaap.messagerouter.msgrtr.mr.test.dmaap.DmaapAdminTest;
+
+import junit.framework.TestCase;
+
+import com.att.aft.dme2.api.DME2Client;
+import com.att.aft.dme2.api.DME2Exception;
+import com.att.aft.dme2.internal.jackson.map.ObjectMapper;
+
+public class DME2TopicTest extends TestCase {
+ private String latitude;
+ private String longitude;
+ private String version;
+ private String serviceName;
+ private String env;
+ private String partner;
+ private String protocol;
+ private String methodTypeGet;
+ private String methodTypePost;
+ private String methodTypeDelete;
+ private String methodTypePut;
+
+ private String user;
+ private String password;
+ private String contenttype;
+ private String subContextPathGetAllTopic;
+ private String subContextPathGetOneTopic;
+ private String SubContextPathCreateTopic;
+ private String SubContextPathGetPublisherl;
+ private String SubContextPathGetPublisher;
+ private String SubContextPathGetPermitPublisher;
+ private String SubContextPathGetConsumer;
+ private String SubContextPathGetPermitConsumer;
+ private static final Logger LOGGER = Logger.getLogger(DME2TopicTest.class);
+
+ public void createTopic(String url, Properties props, HashMap<String, String> mapData) {
+ LOGGER.info("create topic method starts");
+ if (!topicExist(url, props, mapData)) {
+ LOGGER.info("creating a new topic");
+ try {
+ DME2Client sender = new DME2Client(new URI(url), 5000L);
+ sender.setAllowAllHttpReturnCodes(true);
+ sender.setMethod(props.getProperty("MethodTypePost"));
+ sender.setSubContext(props.getProperty("SubContextPathCreateTopic"));
+ TopicBeanDME2 topicBean = new TopicBeanDME2(props.getProperty("newTopic"),
+ props.getProperty("topicDescription"), Integer.parseInt(props.getProperty("partition")),
+ Integer.parseInt(props.getProperty("replication")), Boolean.valueOf(props
+ .getProperty("txenabled")));
+ String jsonStringApiBean = new ObjectMapper().writeValueAsString(topicBean);
+ sender.setPayload(jsonStringApiBean);
+ sender.addHeader("content-type", props.getProperty("contenttype"));
+ sender.setCredentials(props.getProperty("user"), props.getProperty("password"));
+
+ LOGGER.info("creating Topic");
+ String reply = sender.sendAndWait(5000L);
+ assertTrue(LoadPropertyFile.isValidJsonString(reply));
+ LOGGER.info("response =" + reply);
+
+ } catch (DME2Exception e) {
+ e.printStackTrace();
+ } catch (URISyntaxException e) {
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ }
+
+ public boolean topicExist(String url, Properties props, HashMap<String, String> mapData) {
+ boolean topicExist = false;
+ try {
+ LOGGER.info("Checking topic exists or not");
+ DME2Client sender = new DME2Client(new URI(url), 5000L);
+ sender.setAllowAllHttpReturnCodes(true);
+ sender.setMethod(props.getProperty("MethodTypeGet"));
+ String subcontextPath = props.getProperty("subContextPathGetOneTopic") + props.getProperty("newTopic");
+ sender.setSubContext(subcontextPath);
+ sender.setPayload("");
+ sender.addHeader("content-type", props.getProperty("contenttype"));
+ sender.setCredentials(props.getProperty("user"), props.getProperty("password"));
+ String reply = sender.sendAndWait(5000L);
+ topicExist = LoadPropertyFile.isValidJsonString(reply);
+ LOGGER.info("Topic exist =" + topicExist);
+ } catch (DME2Exception e) {
+ e.printStackTrace();
+ } catch (URISyntaxException e) {
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ return topicExist;
+ }
+
+ public void testAllTopics() {
+ LOGGER.info("Test case get all topics initiated");
+ Properties props = LoadPropertyFile.getPropertyFileDataProducer();
+ latitude = props.getProperty("Latitude");
+ longitude = props.getProperty("Longitude");
+ version = props.getProperty("Version");
+ serviceName = props.getProperty("ServiceName");
+ env = props.getProperty("Environment");
+ partner = props.getProperty("Partner");
+ subContextPathGetAllTopic = props.getProperty("subContextPathGetAllTopic");
+ protocol = props.getProperty("Protocol");
+ methodTypeGet = props.getProperty("MethodTypeGet");
+ user = props.getProperty("user");
+ password = props.getProperty("password");
+ contenttype = props.getProperty("contenttype");
+
+ String url = protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version + "/"
+ + "envContext=" + env + "/" + "partner=" + partner;
+ LoadPropertyFile.loadAFTProperties(latitude, longitude); // } else {
+ HashMap<String, String> hm = new HashMap<String, String>();
+ hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000");
+ hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000");
+ hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000");
+
+ try {
+ DME2Client sender = new DME2Client(new URI(url), 5000L);
+ sender.setAllowAllHttpReturnCodes(true);
+ sender.setMethod(methodTypeGet);
+ sender.setSubContext(subContextPathGetAllTopic);
+ sender.setPayload("");
+
+ sender.addHeader("Content-Type", contenttype);
+ sender.setCredentials(user, password);
+ sender.setHeaders(hm);
+
+ LOGGER.info("Retrieving all topics");
+ String reply = sender.sendAndWait(5000L);
+ assertTrue(LoadPropertyFile.isValidJsonString(reply));
+ LOGGER.info("All Topics details = " + reply);
+
+ } catch (DME2Exception e) {
+ e.printStackTrace();
+ } catch (URISyntaxException e) {
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public void testOneTopic() {
+ LOGGER.info("Test case get one topic initiated");
+ Properties props = LoadPropertyFile.getPropertyFileDataProducer();
+ latitude = props.getProperty("Latitude");
+ longitude = props.getProperty("Longitude");
+ version = props.getProperty("Version");
+ serviceName = props.getProperty("ServiceName");
+ env = props.getProperty("Environment");
+ partner = props.getProperty("Partner");
+ subContextPathGetOneTopic = props.getProperty("subContextPathGetOneTopic");
+ protocol = props.getProperty("Protocol");
+ methodTypeGet = props.getProperty("MethodTypeGet");
+ user = props.getProperty("user");
+ password = props.getProperty("password");
+ contenttype = props.getProperty("contenttype");
+
+ String url = protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version + "/"
+ + "envContext=" + env + "/" + "partner=" + partner;
+ LoadPropertyFile.loadAFTProperties(latitude, longitude);
+
+ HashMap<String, String> hm = new HashMap<String, String>();
+ hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000");
+ hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000");
+ hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000");
+ System.out.println("Retrieving topic detail");
+ if (!topicExist(url, props, hm)) {
+ createTopic(url, props, hm);
+ } else {
+ assertTrue(true);
+ }
+ }
+
+ public void createTopicForDeletion(String url, Properties props, HashMap<String, String> mapData) {
+ LOGGER.info("create topic method starts");
+
+ LOGGER.info("creating a new topic for deletion");
+ try {
+ DME2Client sender = new DME2Client(new URI(url), 5000L);
+ sender.setAllowAllHttpReturnCodes(true);
+ sender.setMethod(props.getProperty("MethodTypePost"));
+ sender.setSubContext(props.getProperty("SubContextPathCreateTopic"));
+ TopicBeanDME2 topicBean = new TopicBeanDME2(props.getProperty("deleteTopic"),
+ props.getProperty("topicDescription"), Integer.parseInt(props.getProperty("partition")),
+ Integer.parseInt(props.getProperty("replication")), Boolean.valueOf(props.getProperty("txenabled")));
+ String jsonStringApiBean = new ObjectMapper().writeValueAsString(topicBean);
+ sender.setPayload(jsonStringApiBean);
+ sender.addHeader("content-type", props.getProperty("contenttype"));
+ sender.setCredentials(props.getProperty("user"), props.getProperty("password"));
+
+ LOGGER.info("creating Topic");
+ String reply = sender.sendAndWait(5000L);
+ assertTrue(LoadPropertyFile.isValidJsonString(reply));
+ LOGGER.info("response =" + reply);
+
+ } catch (DME2Exception e) {
+ e.printStackTrace();
+ } catch (URISyntaxException e) {
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ public boolean topicExistForDeletion(String url, Properties props, HashMap<String, String> mapData) {
+ boolean topicExist = false;
+ try {
+ LOGGER.info("Checking topic exists for deletion");
+ DME2Client sender = new DME2Client(new URI(url), 5000L);
+ sender.setAllowAllHttpReturnCodes(true);
+ sender.setMethod(props.getProperty("MethodTypeGet"));
+ String subcontextPath = props.getProperty("subContextPathGetOneTopic") + props.getProperty("deleteTopic");
+ sender.setSubContext(subcontextPath);
+ sender.setPayload("");
+ sender.addHeader("content-type", props.getProperty("contenttype"));
+ sender.setCredentials(props.getProperty("user"), props.getProperty("password"));
+ String reply = sender.sendAndWait(5000L);
+ topicExist = LoadPropertyFile.isValidJsonString(reply);
+ LOGGER.info("Topic exist for deletion=" + topicExist);
+ } catch (DME2Exception e) {
+ e.printStackTrace();
+ } catch (URISyntaxException e) {
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ return topicExist;
+ }
+
+ public void testDeleteTopic() {
+ Properties props = LoadPropertyFile.getPropertyFileDataProducer();
+ latitude = props.getProperty("Latitude");
+ longitude = props.getProperty("Longitude");
+ version = props.getProperty("Version");
+ serviceName = props.getProperty("ServiceName");
+ env = props.getProperty("Environment");
+ partner = props.getProperty("Partner");
+ SubContextPathCreateTopic = props.getProperty("SubContextPathCreateTopic");
+ protocol = props.getProperty("Protocol");
+ methodTypePost = props.getProperty("MethodTypeDelete");
+ user = props.getProperty("user");
+ password = props.getProperty("password");
+ contenttype = props.getProperty("contenttypejson");
+ String url = protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version + "/"
+ + "envContext=" + env + "/" + "partner=" + partner;
+ LoadPropertyFile.loadAFTProperties(latitude, longitude);
+ HashMap<String, String> hm = new HashMap<String, String>();
+ hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000");
+ hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000");
+ hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000");
+ System.out.println("deleteing topic");
+ if (!topicExistForDeletion(url, props, hm)) {
+ createTopicForDeletion(url, props, hm);
+ deleteTopic(url, props, hm);
+ } else {
+ deleteTopic(url, props, hm);
+ }
+ }
+
+ public void deleteTopic(String url, Properties props, HashMap<String, String> mapData) {
+ try {
+ DME2Client sender = new DME2Client(new URI(url), 5000L);
+ sender.setAllowAllHttpReturnCodes(true);
+ sender.setMethod(props.getProperty("MethodTypeDelete"));
+ String subsontextPathDelete = props.getProperty("subContextPathGetOneTopic")
+ + props.getProperty("deleteTopic");
+ sender.setSubContext(subsontextPathDelete);
+ sender.setPayload("");
+ sender.addHeader("content-type", props.getProperty("contenttype"));
+ sender.setCredentials(props.getProperty("user"), props.getProperty("password"));
+ System.out.println("Deleting Topic " + props.getProperty("deleteTopic"));
+ String reply = sender.sendAndWait(5000L);
+ assertNotNull(reply);
+ System.out.println("response =" + reply);
+ } catch (DME2Exception e) {
+ e.printStackTrace();
+ } catch (URISyntaxException e) {
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public void testGetProducersTopics() {
+ LOGGER.info("Test case get list of producers on topic");
+ Properties props = LoadPropertyFile.getPropertyFileDataProducer();
+ latitude = props.getProperty("Latitude");
+ longitude = props.getProperty("Longitude");
+ version = props.getProperty("Version");
+ serviceName = props.getProperty("ServiceName");
+ env = props.getProperty("Environment");
+ partner = props.getProperty("Partner");
+ SubContextPathGetPublisher = props.getProperty("SubContextPathGetPublisher");
+ protocol = props.getProperty("Protocol");
+ methodTypeGet = props.getProperty("MethodTypeGet");
+ user = props.getProperty("user");
+ password = props.getProperty("password");
+ contenttype = props.getProperty("contenttype");
+
+ String url = protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version + "/"
+ + "envContext=" + env + "/" + "partner=" + partner;
+ LoadPropertyFile.loadAFTProperties(latitude, longitude);
+
+ HashMap<String, String> hm = new HashMap<String, String>();
+ hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000");
+ hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000");
+ hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000");
+
+ try {
+ DME2Client sender = new DME2Client(new URI(url), 5000L);
+ sender.setAllowAllHttpReturnCodes(true);
+ sender.setMethod(methodTypeGet);
+ sender.setSubContext(SubContextPathGetPublisher);
+ sender.setPayload("");
+
+ sender.addHeader("Content-Type", contenttype);
+ sender.setCredentials(user, password);
+ sender.setHeaders(hm);
+
+ LOGGER.info("Retrieving List of publishers");
+ String reply = sender.sendAndWait(5000L);
+ assertTrue(LoadPropertyFile.isValidJsonString(reply));
+ LOGGER.info("All Publishers details = " + reply);
+
+ } catch (DME2Exception e) {
+ e.printStackTrace();
+ } catch (URISyntaxException e) {
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ // permitting a producer on topic is not applicable
+ // public void testPermitProducersTopics() {
+ // LOGGER.info("Test case ");
+ // Properties props = LoadPropertyFile.getPropertyFileDataProducer();
+ // latitude = props.getProperty("Latitude");
+ // longitude = props.getProperty("Longitude");
+ // version = props.getProperty("Version");
+ // serviceName = props.getProperty("ServiceName");
+ // env = props.getProperty("Environment");
+ // partner = props.getProperty("Partner");
+ // SubContextPathGetPermitPublisher =
+ // props.getProperty("SubContextPathGetPermitPublisher");
+ // protocol = props.getProperty("Protocol");
+ // methodTypePut = props.getProperty("MethodTypePut");
+ // user = props.getProperty("user");
+ // password = props.getProperty("password");
+ // contenttype = props.getProperty("contenttype");
+ //
+ // String url = protocol + "://DME2SEARCH/" + "service=" + serviceName + "/"
+ // + "version=" + version + "/"
+ // + "envContext=" + env + "/" + "partner=" + partner;
+ // LoadPropertyFile.loadAFTProperties(latitude, longitude);
+ //
+ // HashMap<String, String> hm = new HashMap<String, String>();
+ // hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000");
+ // hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000");
+ // hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000");
+ //
+ // try {
+ // DME2Client sender = new DME2Client(new URI(url), 5000L);
+ // sender.setAllowAllHttpReturnCodes(true);
+ // sender.setMethod(methodTypePut);
+ // sender.setSubContext(SubContextPathGetPermitPublisher);
+ // sender.setPayload("");
+ //
+ // sender.addHeader("Content-Type", contenttype);
+ // sender.setCredentials(user, password);
+ // sender.setHeaders(hm);
+ //
+ // System.out.println("Permitting a producer on topic");
+ // String reply = sender.sendAndWait(5000L);
+ // System.out.println("Reply from server = " + reply);
+ //
+ // } catch (DME2Exception e) {
+ // e.printStackTrace();
+ // } catch (URISyntaxException e) {
+ // e.printStackTrace();
+ // } catch (Exception e) {
+ // e.printStackTrace();
+ // }
+ // }
+
+ public void testGetConsumersTopics() {
+ LOGGER.info("Test case get list of consumers on topic ");
+ Properties props = LoadPropertyFile.getPropertyFileDataProducer();
+ latitude = props.getProperty("Latitude");
+ longitude = props.getProperty("Longitude");
+ version = props.getProperty("Version");
+ serviceName = props.getProperty("ServiceName");
+ env = props.getProperty("Environment");
+ partner = props.getProperty("Partner");
+ SubContextPathGetConsumer = props.getProperty("SubContextPathGetConsumer");
+ protocol = props.getProperty("Protocol");
+ methodTypeGet = props.getProperty("MethodTypeGet");
+ user = props.getProperty("user");
+ password = props.getProperty("password");
+ contenttype = props.getProperty("contenttype");
+
+ String url = protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version + "/"
+ + "envContext=" + env + "/" + "partner=" + partner;
+ LoadPropertyFile.loadAFTProperties(latitude, longitude);
+
+ HashMap<String, String> hm = new HashMap<String, String>();
+ hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000");
+ hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000");
+ hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000");
+
+ try {
+ DME2Client sender = new DME2Client(new URI(url), 5000L);
+ sender.setAllowAllHttpReturnCodes(true);
+ sender.setMethod(methodTypeGet);
+ sender.setSubContext(SubContextPathGetConsumer);
+ sender.setPayload("");
+
+ sender.addHeader("Content-Type", contenttype);
+ sender.setCredentials(user, password);
+ sender.setHeaders(hm);
+
+ LOGGER.info("Retrieving consumer details on topics");
+ String reply = sender.sendAndWait(5000L);
+ assertTrue(LoadPropertyFile.isValidJsonString(reply));
+ System.out.println("Reply from server = " + reply);
+
+ } catch (DME2Exception e) {
+ e.printStackTrace();
+ } catch (URISyntaxException e) {
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public void testCreateTopic() {
+ LOGGER.info("Test case create topic starts");
+ Properties props = LoadPropertyFile.getPropertyFileDataProducer();
+ latitude = props.getProperty("Latitude");
+ longitude = props.getProperty("Longitude");
+ version = props.getProperty("Version");
+ serviceName = props.getProperty("ServiceName");
+ env = props.getProperty("Environment");
+ partner = props.getProperty("Partner");
+ SubContextPathCreateTopic = props.getProperty("SubContextPathCreateTopic");
+ protocol = props.getProperty("Protocol");
+ methodTypePost = props.getProperty("MethodTypePost");
+ user = props.getProperty("user");
+ password = props.getProperty("password");
+ contenttype = props.getProperty("contenttypejson");
+
+ String url = protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version + "/"
+ + "envContext=" + env + "/" + "partner=" + partner;
+ LoadPropertyFile.loadAFTProperties(latitude, longitude);
+ HashMap<String, String> hm = new HashMap<String, String>();
+ hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000");
+ hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000");
+ hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000");
+
+ createTopic(url, props, hm);
+ }
+ // permitting a consumer on topic is not applicable
+ // public void testPermitConsumerTopics() {
+ // Properties props = LoadPropertyFile.getPropertyFileDataProducer();
+ // latitude = props.getProperty("Latitude");
+ // longitude = props.getProperty("Longitude");
+ // version = props.getProperty("Version");
+ // serviceName = props.getProperty("ServiceName");
+ // env = props.getProperty("Environment");
+ // partner = props.getProperty("Partner");
+ // SubContextPathGetPermitConsumer =
+ // props.getProperty("SubContextPathGetPermitConsumer");
+ // protocol = props.getProperty("Protocol");
+ // methodTypePut = props.getProperty("MethodTypePut");
+ // user = props.getProperty("user");
+ // password = props.getProperty("password");
+ // contenttype = props.getProperty("contenttype");
+ //
+ // String url = protocol + "://DME2SEARCH/" + "service=" + serviceName + "/"
+ // + "version=" + version + "/"
+ // + "envContext=" + env + "/" + "partner=" + partner;
+ // LoadPropertyFile.loadAFTProperties(latitude, longitude);
+ //
+ // HashMap<String, String> hm = new HashMap<String, String>();
+ // hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000");
+ // hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000");
+ // hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000");
+ //
+ // try {
+ // DME2Client sender = new DME2Client(new URI(url), 5000L);
+ // sender.setAllowAllHttpReturnCodes(true);
+ // sender.setMethod(methodTypePut);
+ // sender.setSubContext(SubContextPathGetPermitConsumer);
+ // sender.setPayload("");
+ //
+ // sender.addHeader("Content-Type", contenttype);
+ // sender.setCredentials(user, password);
+ // sender.setHeaders(hm);
+ //
+ // System.out.println("Permitting a consumer on topic");
+ // String reply = sender.sendAndWait(5000L);
+ // assertNotNull(reply);
+ // System.out.println("Reply from server = " + reply);
+ //
+ // } catch (DME2Exception e) {
+ // e.printStackTrace();
+ // } catch (URISyntaxException e) {
+ // e.printStackTrace();
+ // } catch (Exception e) {
+ // e.printStackTrace();
+ // }
+ // }
+
+}
diff --git a/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/JUnitTestSuite.java
new file mode 100644
index 0000000..45d1dba
--- /dev/null
+++ b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/JUnitTestSuite.java
@@ -0,0 +1,44 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.mr.test.dme2;
+
+import junit.framework.TestSuite;
+
+import org.junit.runner.RunWith;
+import org.junit.runners.Suite;
+import org.junit.runners.Suite.SuiteClasses;
+import org.apache.log4j.Logger;
+
+@RunWith(Suite.class)
+@SuiteClasses({ DME2AdminTest.class, DME2ApiKeyTest.class, DME2ConsumerTest.class, DME2ConsumerTest.class,
+ DME2MetricsTest.class, DME2ProducerTest.class, DME2TopicTest.class, })
+public class JUnitTestSuite {
+ private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class);
+
+ public static void main(String[] args) {
+ LOGGER.info("Running the test suite");
+
+ TestSuite tstSuite = new TestSuite();
+ LOGGER.info("Total Test Counts " + tstSuite.countTestCases());
+ }
+
+}
diff --git a/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/LoadPropertyFile.java b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/LoadPropertyFile.java
new file mode 100644
index 0000000..188adc7
--- /dev/null
+++ b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/LoadPropertyFile.java
@@ -0,0 +1,69 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.mr.test.dme2;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Properties;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.json.JSONObject;
+
+public class LoadPropertyFile {
+ //private static final Logger LOGGER = Logger.getLogger(LoadPropertyFile.class);
+ private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(LoadPropertyFile.class);
+
+ static public Properties getPropertyFileDataProducer() {
+ Properties prop = new Properties();
+ LOGGER.info("loading the property file");
+ try {
+ InputStream inputStream = LoadPropertyFile.class.getClassLoader()
+ .getResourceAsStream("dme2testcase.properties");
+
+ prop.load(inputStream);
+ LOGGER.info("successfully loaded the property file");
+ } catch (IOException e) {
+ LOGGER.error("Error while retrieving API keys: " + e);
+ }
+ return prop;
+ }
+
+ static public void loadAFTProperties(String lat, String longi) {
+ System.setProperty("AFT_LATITUDE", lat);
+ System.setProperty("AFT_LONGITUDE", longi);
+ System.setProperty("AFT_ENVIRONMENT", "AFTUAT");
+ // printProperties();
+ System.out.println("Latitude =" + lat);
+ System.out.println("Longitude =" + longi);
+ }
+
+ static public boolean isValidJsonString(String chkString) {
+ boolean isJson = true;
+ try {
+ new JSONObject(chkString);
+ } catch (Exception e) {
+ isJson = false;
+ }
+ return isJson;
+ }
+}
diff --git a/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/TestRunner.java b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/TestRunner.java
new file mode 100644
index 0000000..26c8f34
--- /dev/null
+++ b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/TestRunner.java
@@ -0,0 +1,42 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.mr.test.dme2;
+
+import org.junit.runner.JUnitCore;
+import org.junit.runner.Result;
+import org.junit.runner.notification.Failure;
+import org.apache.log4j.Logger;
+
+public class TestRunner {
+ private static final Logger LOGGER = Logger.getLogger(TestRunner.class);
+
+ public static void main(String[] args) {
+ // TODO Auto-generated method stub
+ Result result = JUnitCore.runClasses(JUnitTestSuite.class);
+ for (Failure failure : result.getFailures()) {
+ LOGGER.info(failure.toString());
+
+ }
+ LOGGER.info(result.wasSuccessful());
+ }
+
+}
diff --git a/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/TopicBeanDME2.java b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/TopicBeanDME2.java
new file mode 100644
index 0000000..d8b1287
--- /dev/null
+++ b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/mr/test/dme2/TopicBeanDME2.java
@@ -0,0 +1,94 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+/**
+ *
+ */
+package org.onap.dmaap.messagerouter.msgrtr.mr.test.dme2;
+
+import java.io.Serializable;
+
+public class TopicBeanDME2 implements Serializable {
+
+ private static final long serialVersionUID = -8620390377775457949L;
+ private String topicName;
+ private String description;
+
+
+ private int partitionCount;
+ private int replicationCount;
+ private boolean transactionEnabled = false;
+
+ public boolean isTransactionEnabled() {
+ return transactionEnabled;
+ }
+
+ public void setTransactionEnabled(boolean transactionEnabled) {
+ this.transactionEnabled = transactionEnabled;
+ }
+
+ public TopicBeanDME2() {
+ super();
+ }
+
+ public TopicBeanDME2(String topicName, String description, int partitionCount, int replicationCount,
+ boolean transactionEnabled) {
+ super();
+ this.topicName = topicName;
+ this.description = description;
+ this.partitionCount = partitionCount;
+ this.replicationCount = replicationCount;
+ this.transactionEnabled = transactionEnabled;
+ }
+
+ public String getTopicName() {
+ return topicName;
+ }
+
+ public void setTopicName(String topicName) {
+ this.topicName = topicName;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+
+ public void setDescription(String description) {
+ this.description = description;
+ }
+
+ public int getPartitionCount() {
+ return partitionCount;
+ }
+
+ public void setPartitionCount(int partitionCount) {
+ this.partitionCount = partitionCount;
+ }
+
+ public int getReplicationCount() {
+ return replicationCount;
+ }
+
+ public void setReplicationCount(int replicationCount) {
+ this.replicationCount = replicationCount;
+ }
+
+}
diff --git a/src/test/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/CambriaApiTestCase.java b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/CambriaApiTestCase.java
new file mode 100644
index 0000000..5800918
--- /dev/null
+++ b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/CambriaApiTestCase.java
@@ -0,0 +1,52 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import junit.framework.TestCase;
+
+import org.junit.Ignore;
+
+@Ignore
+public class CambriaApiTestCase extends TestCase {
+
+ @Override
+ protected void setUp() throws Exception {
+ final Map<String, String> argMap = new HashMap<String, String> ();
+
+ argMap.put("broker.type", "memory");
+ argMap.put("accounts.dao.class", "com.att.nsa.fe3c.dao.memory.MemoryDAOFactory");
+ argMap.put("topic.dao.class", "com.att.nsa.fe3c.dao.memory.MemoryDAOFactory");
+
+ //CambriaApiServer.start(argMap);
+ System.out.println("setUp() complete");
+ }
+
+ @Override
+ protected void tearDown() throws Exception {
+ System.out.println("tearDown() started");
+ //CambriaApiServer.stop();
+ System.out.println("tearDown() complete");
+ }
+}
diff --git a/src/test/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/CambriaRateLimiterTest.java b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/CambriaRateLimiterTest.java
new file mode 100644
index 0000000..f362a0c
--- /dev/null
+++ b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/CambriaRateLimiterTest.java
@@ -0,0 +1,78 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria;
+
+import junit.framework.TestCase;
+
+import org.junit.Test;
+
+import com.att.nsa.apiServer.util.NsaTestClock;
+
+public class CambriaRateLimiterTest
+{
+ @Test
+ public void testRateLimiter ()
+ {
+ /*final NsaTestClock clock = new NsaTestClock(1, false);
+
+ final String topic = "topic";
+ final String consumerGroup = "group";
+ final String clientId = "id";
+
+ final int window = 5;
+
+ // rate limit: 1 empty call/min avg over 5 minutes, with 10ms delay
+ final CambriaRateLimiter rater = new CambriaRateLimiter ( 1.0, window, 10 );
+ try
+ {
+ // prime with a call to start rate window
+ rater.onCall ( topic, consumerGroup, clientId );
+ rater.onSend ( topic, consumerGroup, clientId, 1 );
+ clock.addMs ( 1000*60*window );
+
+ // rate should now be 0, with a good window
+ for ( int i=0; i<4; i++ )
+ {
+ clock.addMs ( 1000*15 );
+ rater.onCall ( topic, consumerGroup, clientId );
+ rater.onSend ( topic, consumerGroup, clientId, 0 );
+ }
+ // rate is now 0.8 = 4 calls in last 5 minutes = 4/5 = 0.8
+
+ clock.addMs ( 1000*15 );
+ rater.onCall ( topic, consumerGroup, clientId );
+ rater.onSend ( topic, consumerGroup, clientId, 0 );
+ // rate = 1.0 = 5 calls in last 5 mins
+
+ clock.addMs ( 1000 );
+ rater.onCall ( topic, consumerGroup, clientId );
+ rater.onSend ( topic, consumerGroup, clientId, 0 );
+ // rate = 1.2 = 6 calls in last 5 mins, should fire
+
+ fail ( "Should have thrown rate limit exception." );
+ }
+ catch ( CambriaApiException x )
+ {
+ // good
+ }*/
+ }
+}
diff --git a/src/test/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/UtilsTest.java b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/UtilsTest.java
new file mode 100644
index 0000000..e768033
--- /dev/null
+++ b/src/test/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/UtilsTest.java
@@ -0,0 +1,58 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils;
+
+import static org.junit.Assert.*;
+
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.Utils;
+
+public class UtilsTest {
+
+ private static final String DATE_FORMAT = "dd-MM-yyyy::hh:mm:ss:SSS";
+
+ @Before
+ public void setUp() throws Exception {
+ }
+
+ @After
+ public void tearDown() throws Exception {
+ }
+
+ @Test
+ public void testGetFormattedDate() {
+ Date now = new Date();
+ String dateStr = Utils.getFormattedDate(now);
+ SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT);
+ String expectedStr = sdf.format(now);
+ assertNotNull(dateStr);
+ assertTrue("Formatted date does not match - expected [" + expectedStr
+ + "] received [" + dateStr + "]",
+ dateStr.equalsIgnoreCase(expectedStr));
+ }
+
+}