From 3fc19dc9157f4d05bdbd6fd05a52f0592268c4e7 Mon Sep 17 00:00:00 2001
From: Varun Gudisena <vg411h@att.com>
Date: Thu, 31 Aug 2017 10:52:33 -0500
Subject: Revert package name changes

Reverted package name changes to avoid any potential issues. Renamed maven
    group id only.

Issue-id: DMAAP-74
Change-Id: Ic741b602ade60f108d940c0571a1d94b7be2abc2
Signed-off-by: Varun Gudisena <vg411h@att.com>
---
 .../metrics/cambria/DMaaPMetricsSender.java        | 197 ++++++
 .../com/att/nsa/cambria/CambriaApiException.java   |  80 +++
 .../com/att/nsa/cambria/CambriaApiVersionInfo.java |  88 +++
 .../com/att/nsa/cambria/backends/Consumer.java     |  96 +++
 .../att/nsa/cambria/backends/ConsumerFactory.java  | 110 +++
 .../com/att/nsa/cambria/backends/MetricsSet.java   |  71 ++
 .../com/att/nsa/cambria/backends/Publisher.java    |  98 +++
 .../nsa/cambria/backends/kafka/KafkaConsumer.java  | 245 +++++++
 .../cambria/backends/kafka/KafkaConsumerCache.java | 613 ++++++++++++++++
 .../nsa/cambria/backends/kafka/KafkaPublisher.java | 168 +++++
 .../backends/memory/MemoryConsumerFactory.java     | 160 +++++
 .../cambria/backends/memory/MemoryMetaBroker.java  | 199 ++++++
 .../nsa/cambria/backends/memory/MemoryQueue.java   | 207 ++++++
 .../backends/memory/MemoryQueuePublisher.java      |  90 +++
 .../cambria/backends/memory/MessageDropper.java    |  61 ++
 .../nsa/cambria/backends/memory/MessageLogger.java | 101 +++
 .../java/com/att/nsa/cambria/beans/ApiKeyBean.java |  88 +++
 .../att/nsa/cambria/beans/DMaaPCambriaLimiter.java | 227 ++++++
 .../com/att/nsa/cambria/beans/DMaaPContext.java    | 104 +++
 .../cambria/beans/DMaaPKafkaConsumerFactory.java   | 319 +++++++++
 .../nsa/cambria/beans/DMaaPKafkaMetaBroker.java    | 462 ++++++++++++
 .../com/att/nsa/cambria/beans/DMaaPMetricsSet.java | 232 ++++++
 .../com/att/nsa/cambria/beans/DMaaPNsaApiDb.java   | 139 ++++
 .../com/att/nsa/cambria/beans/DMaaPZkClient.java   |  45 ++
 .../com/att/nsa/cambria/beans/DMaaPZkConfigDb.java |  52 ++
 .../java/com/att/nsa/cambria/beans/LogDetails.java | 214 ++++++
 .../java/com/att/nsa/cambria/beans/TopicBean.java  | 155 ++++
 .../nsa/cambria/constants/CambriaConstants.java    | 126 ++++
 .../exception/DMaaPAccessDeniedException.java      |  42 ++
 .../exception/DMaaPCambriaExceptionMapper.java     |  94 +++
 .../nsa/cambria/exception/DMaaPErrorMessages.java  | 239 +++++++
 .../nsa/cambria/exception/DMaaPResponseCode.java   |  93 +++
 .../cambria/exception/DMaaPWebExceptionMapper.java | 137 ++++
 .../att/nsa/cambria/exception/ErrorResponse.java   | 135 ++++
 .../listener/CambriaServletContextListener.java    |  64 ++
 .../nsa/cambria/listener/DME2EndPointLoader.java   | 123 ++++
 .../com/att/nsa/cambria/metabroker/Broker.java     |  92 +++
 .../java/com/att/nsa/cambria/metabroker/Topic.java | 133 ++++
 .../publisher/CambriaBatchingPublisher.java        |  52 ++
 .../cambria/metrics/publisher/CambriaClient.java   |  89 +++
 .../cambria/metrics/publisher/CambriaConsumer.java |  52 ++
 .../metrics/publisher/CambriaPublisher.java        | 101 +++
 .../metrics/publisher/CambriaPublisherUtility.java | 146 ++++
 .../publisher/DMaaPCambriaClientFactory.java       | 423 +++++++++++
 .../metrics/publisher/impl/CambriaBaseClient.java  |  98 +++
 .../nsa/cambria/metrics/publisher/impl/Clock.java  |  74 ++
 .../publisher/impl/DMaaPCambriaConsumerImpl.java   | 170 +++++
 .../impl/DMaaPCambriaSimplerBatchPublisher.java    | 429 +++++++++++
 .../att/nsa/cambria/resources/CambriaEventSet.java | 114 +++
 .../resources/CambriaOutboundEventStream.java      | 516 ++++++++++++++
 .../streamReaders/CambriaJsonStreamReader.java     | 172 +++++
 .../streamReaders/CambriaRawStreamReader.java      | 141 ++++
 .../streamReaders/CambriaStreamReader.java         | 229 ++++++
 .../streamReaders/CambriaTextStreamReader.java     | 140 ++++
 .../cambria/security/DMaaPAAFAuthenticator.java    |  39 +
 .../security/DMaaPAAFAuthenticatorImpl.java        |  91 +++
 .../nsa/cambria/security/DMaaPAuthenticator.java   |  61 ++
 .../cambria/security/DMaaPAuthenticatorImpl.java   | 135 ++++
 .../security/impl/DMaaPMechIdAuthenticator.java    |  89 +++
 .../impl/DMaaPOriginalUebAuthenticator.java        | 291 ++++++++
 .../com/att/nsa/cambria/service/AdminService.java  |  83 +++
 .../att/nsa/cambria/service/ApiKeysService.java    | 105 +++
 .../com/att/nsa/cambria/service/EventsService.java |  75 ++
 .../com/att/nsa/cambria/service/MMService.java     |  68 ++
 .../att/nsa/cambria/service/MetricsService.java    |  54 ++
 .../com/att/nsa/cambria/service/TopicService.java  | 176 +++++
 .../nsa/cambria/service/TransactionService.java    |  61 ++
 .../com/att/nsa/cambria/service/UIService.java     |  91 +++
 .../nsa/cambria/service/impl/AdminServiceImpl.java | 188 +++++
 .../cambria/service/impl/ApiKeysServiceImpl.java   | 326 +++++++++
 .../service/impl/BaseTransactionDbImpl.java        | 153 ++++
 .../cambria/service/impl/EventsServiceImpl.java    | 788 +++++++++++++++++++++
 .../nsa/cambria/service/impl/MMServiceImpl.java    | 605 ++++++++++++++++
 .../cambria/service/impl/MetricsServiceImpl.java   | 115 +++
 .../nsa/cambria/service/impl/TopicServiceImpl.java | 649 +++++++++++++++++
 .../service/impl/TransactionServiceImpl.java       | 100 +++
 .../nsa/cambria/service/impl/UIServiceImpl.java    | 206 ++++++
 .../transaction/DMaaPTransactionFactory.java       |  44 ++
 .../cambria/transaction/DMaaPTransactionObj.java   |  83 +++
 .../cambria/transaction/DMaaPTransactionObjDB.java |  86 +++
 .../nsa/cambria/transaction/TransactionObj.java    | 202 ++++++
 .../att/nsa/cambria/transaction/TrnRequest.java    | 183 +++++
 .../impl/DMaaPSimpleTransactionFactory.java        |  62 ++
 .../att/nsa/cambria/utils/ConfigurationReader.java | 499 +++++++++++++
 .../att/nsa/cambria/utils/DMaaPCuratorFactory.java |  68 ++
 .../nsa/cambria/utils/DMaaPResponseBuilder.java    | 359 ++++++++++
 .../java/com/att/nsa/cambria/utils/Emailer.java    | 214 ++++++
 .../com/att/nsa/cambria/utils/PropertyReader.java  | 133 ++++
 src/main/java/com/att/nsa/cambria/utils/Utils.java | 145 ++++
 .../com/att/nsa/filter/ContentLengthFilter.java    | 134 ++++
 .../java/com/att/nsa/filter/DefaultLength.java     |  37 +
 .../metrics/cambria/DMaaPMetricsSender.java        | 198 ------
 .../msgrtr/nsa/cambria/CambriaApiException.java    |  80 ---
 .../msgrtr/nsa/cambria/CambriaApiVersionInfo.java  |  88 ---
 .../msgrtr/nsa/cambria/backends/Consumer.java      |  96 ---
 .../nsa/cambria/backends/ConsumerFactory.java      | 110 ---
 .../msgrtr/nsa/cambria/backends/MetricsSet.java    |  71 --
 .../msgrtr/nsa/cambria/backends/Publisher.java     |  98 ---
 .../nsa/cambria/backends/kafka/KafkaConsumer.java  | 245 -------
 .../cambria/backends/kafka/KafkaConsumerCache.java | 614 ----------------
 .../nsa/cambria/backends/kafka/KafkaPublisher.java | 169 -----
 .../backends/memory/MemoryConsumerFactory.java     | 160 -----
 .../cambria/backends/memory/MemoryMetaBroker.java  | 200 ------
 .../nsa/cambria/backends/memory/MemoryQueue.java   | 207 ------
 .../backends/memory/MemoryQueuePublisher.java      |  90 ---
 .../cambria/backends/memory/MessageDropper.java    |  61 --
 .../nsa/cambria/backends/memory/MessageLogger.java | 101 ---
 .../msgrtr/nsa/cambria/beans/ApiKeyBean.java       |  88 ---
 .../nsa/cambria/beans/DMaaPCambriaLimiter.java     | 227 ------
 .../msgrtr/nsa/cambria/beans/DMaaPContext.java     | 104 ---
 .../cambria/beans/DMaaPKafkaConsumerFactory.java   | 320 ---------
 .../nsa/cambria/beans/DMaaPKafkaMetaBroker.java    | 462 ------------
 .../msgrtr/nsa/cambria/beans/DMaaPMetricsSet.java  | 233 ------
 .../msgrtr/nsa/cambria/beans/DMaaPNsaApiDb.java    | 139 ----
 .../msgrtr/nsa/cambria/beans/DMaaPZkClient.java    |  45 --
 .../msgrtr/nsa/cambria/beans/DMaaPZkConfigDb.java  |  52 --
 .../msgrtr/nsa/cambria/beans/LogDetails.java       | 214 ------
 .../msgrtr/nsa/cambria/beans/TopicBean.java        | 155 ----
 .../nsa/cambria/constants/CambriaConstants.java    | 125 ----
 .../exception/DMaaPAccessDeniedException.java      |  42 --
 .../exception/DMaaPCambriaExceptionMapper.java     |  92 ---
 .../nsa/cambria/exception/DMaaPErrorMessages.java  | 239 -------
 .../nsa/cambria/exception/DMaaPResponseCode.java   |  93 ---
 .../cambria/exception/DMaaPWebExceptionMapper.java | 137 ----
 .../nsa/cambria/exception/ErrorResponse.java       | 135 ----
 .../listener/CambriaServletContextListener.java    |  64 --
 .../nsa/cambria/listener/DME2EndPointLoader.java   | 124 ----
 .../msgrtr/nsa/cambria/metabroker/Broker.java      |  93 ---
 .../msgrtr/nsa/cambria/metabroker/Topic.java       | 133 ----
 .../publisher/CambriaBatchingPublisher.java        |  52 --
 .../cambria/metrics/publisher/CambriaClient.java   |  89 ---
 .../cambria/metrics/publisher/CambriaConsumer.java |  52 --
 .../metrics/publisher/CambriaPublisher.java        | 101 ---
 .../metrics/publisher/CambriaPublisherUtility.java | 146 ----
 .../publisher/DMaaPCambriaClientFactory.java       | 423 -----------
 .../metrics/publisher/impl/CambriaBaseClient.java  |  99 ---
 .../nsa/cambria/metrics/publisher/impl/Clock.java  |  74 --
 .../publisher/impl/DMaaPCambriaConsumerImpl.java   | 170 -----
 .../impl/DMaaPCambriaSimplerBatchPublisher.java    | 430 -----------
 .../nsa/cambria/resources/CambriaEventSet.java     | 115 ---
 .../resources/CambriaOutboundEventStream.java      | 516 --------------
 .../streamReaders/CambriaJsonStreamReader.java     | 171 -----
 .../streamReaders/CambriaRawStreamReader.java      | 142 ----
 .../streamReaders/CambriaStreamReader.java         | 229 ------
 .../streamReaders/CambriaTextStreamReader.java     | 140 ----
 .../cambria/security/DMaaPAAFAuthenticator.java    |  39 -
 .../security/DMaaPAAFAuthenticatorImpl.java        |  90 ---
 .../nsa/cambria/security/DMaaPAuthenticator.java   |  62 --
 .../cambria/security/DMaaPAuthenticatorImpl.java   | 136 ----
 .../security/impl/DMaaPMechIdAuthenticator.java    |  88 ---
 .../impl/DMaaPOriginalUebAuthenticator.java        | 293 --------
 .../msgrtr/nsa/cambria/service/AdminService.java   |  83 ---
 .../msgrtr/nsa/cambria/service/ApiKeysService.java | 106 ---
 .../msgrtr/nsa/cambria/service/EventsService.java  |  76 --
 .../msgrtr/nsa/cambria/service/MMService.java      |  68 --
 .../msgrtr/nsa/cambria/service/MetricsService.java |  54 --
 .../msgrtr/nsa/cambria/service/TopicService.java   | 176 -----
 .../nsa/cambria/service/TransactionService.java    |  62 --
 .../msgrtr/nsa/cambria/service/UIService.java      |  92 ---
 .../nsa/cambria/service/impl/AdminServiceImpl.java | 188 -----
 .../cambria/service/impl/ApiKeysServiceImpl.java   | 325 ---------
 .../service/impl/BaseTransactionDbImpl.java        | 154 ----
 .../cambria/service/impl/EventsServiceImpl.java    | 788 ---------------------
 .../nsa/cambria/service/impl/MMServiceImpl.java    | 604 ----------------
 .../cambria/service/impl/MetricsServiceImpl.java   | 115 ---
 .../nsa/cambria/service/impl/TopicServiceImpl.java | 649 -----------------
 .../service/impl/TransactionServiceImpl.java       | 100 ---
 .../nsa/cambria/service/impl/UIServiceImpl.java    | 206 ------
 .../transaction/DMaaPTransactionFactory.java       |  44 --
 .../cambria/transaction/DMaaPTransactionObj.java   |  83 ---
 .../cambria/transaction/DMaaPTransactionObjDB.java |  86 ---
 .../nsa/cambria/transaction/TransactionObj.java    | 202 ------
 .../msgrtr/nsa/cambria/transaction/TrnRequest.java | 183 -----
 .../impl/DMaaPSimpleTransactionFactory.java        |  61 --
 .../nsa/cambria/utils/ConfigurationReader.java     | 497 -------------
 .../nsa/cambria/utils/DMaaPCuratorFactory.java     |  69 --
 .../nsa/cambria/utils/DMaaPResponseBuilder.java    | 358 ----------
 .../msgrtr/nsa/cambria/utils/Emailer.java          | 215 ------
 .../msgrtr/nsa/cambria/utils/PropertyReader.java   | 133 ----
 .../msgrtr/nsa/cambria/utils/Utils.java            | 145 ----
 .../msgrtr/nsa/filter/ContentLengthFilter.java     | 133 ----
 .../msgrtr/nsa/filter/DefaultLength.java           |  37 -
 src/main/resources/DMaaPUrl.properties             |   2 +-
 src/main/resources/dme2testcase.properties         |  12 +-
 src/main/resources/endpoint.properties             |   2 +-
 src/main/resources/routes.conf                     |   2 +-
 src/main/scripts/cambriaTool.sh                    |   2 +-
 src/main/scripts/swmpkgclean.sh                    |   2 +-
 src/main/webapp/WEB-INF/spring-context.xml         |  24 +-
 src/main/webapp/WEB-INF/web.xml                    |   4 +-
 190 files changed, 15668 insertions(+), 15678 deletions(-)
 create mode 100644 src/main/java/com/att/nsa/apiServer/metrics/cambria/DMaaPMetricsSender.java
 create mode 100644 src/main/java/com/att/nsa/cambria/CambriaApiException.java
 create mode 100644 src/main/java/com/att/nsa/cambria/CambriaApiVersionInfo.java
 create mode 100644 src/main/java/com/att/nsa/cambria/backends/Consumer.java
 create mode 100644 src/main/java/com/att/nsa/cambria/backends/ConsumerFactory.java
 create mode 100644 src/main/java/com/att/nsa/cambria/backends/MetricsSet.java
 create mode 100644 src/main/java/com/att/nsa/cambria/backends/Publisher.java
 create mode 100644 src/main/java/com/att/nsa/cambria/backends/kafka/KafkaConsumer.java
 create mode 100644 src/main/java/com/att/nsa/cambria/backends/kafka/KafkaConsumerCache.java
 create mode 100644 src/main/java/com/att/nsa/cambria/backends/kafka/KafkaPublisher.java
 create mode 100644 src/main/java/com/att/nsa/cambria/backends/memory/MemoryConsumerFactory.java
 create mode 100644 src/main/java/com/att/nsa/cambria/backends/memory/MemoryMetaBroker.java
 create mode 100644 src/main/java/com/att/nsa/cambria/backends/memory/MemoryQueue.java
 create mode 100644 src/main/java/com/att/nsa/cambria/backends/memory/MemoryQueuePublisher.java
 create mode 100644 src/main/java/com/att/nsa/cambria/backends/memory/MessageDropper.java
 create mode 100644 src/main/java/com/att/nsa/cambria/backends/memory/MessageLogger.java
 create mode 100644 src/main/java/com/att/nsa/cambria/beans/ApiKeyBean.java
 create mode 100644 src/main/java/com/att/nsa/cambria/beans/DMaaPCambriaLimiter.java
 create mode 100644 src/main/java/com/att/nsa/cambria/beans/DMaaPContext.java
 create mode 100644 src/main/java/com/att/nsa/cambria/beans/DMaaPKafkaConsumerFactory.java
 create mode 100644 src/main/java/com/att/nsa/cambria/beans/DMaaPKafkaMetaBroker.java
 create mode 100644 src/main/java/com/att/nsa/cambria/beans/DMaaPMetricsSet.java
 create mode 100644 src/main/java/com/att/nsa/cambria/beans/DMaaPNsaApiDb.java
 create mode 100644 src/main/java/com/att/nsa/cambria/beans/DMaaPZkClient.java
 create mode 100644 src/main/java/com/att/nsa/cambria/beans/DMaaPZkConfigDb.java
 create mode 100644 src/main/java/com/att/nsa/cambria/beans/LogDetails.java
 create mode 100644 src/main/java/com/att/nsa/cambria/beans/TopicBean.java
 create mode 100644 src/main/java/com/att/nsa/cambria/constants/CambriaConstants.java
 create mode 100644 src/main/java/com/att/nsa/cambria/exception/DMaaPAccessDeniedException.java
 create mode 100644 src/main/java/com/att/nsa/cambria/exception/DMaaPCambriaExceptionMapper.java
 create mode 100644 src/main/java/com/att/nsa/cambria/exception/DMaaPErrorMessages.java
 create mode 100644 src/main/java/com/att/nsa/cambria/exception/DMaaPResponseCode.java
 create mode 100644 src/main/java/com/att/nsa/cambria/exception/DMaaPWebExceptionMapper.java
 create mode 100644 src/main/java/com/att/nsa/cambria/exception/ErrorResponse.java
 create mode 100644 src/main/java/com/att/nsa/cambria/listener/CambriaServletContextListener.java
 create mode 100644 src/main/java/com/att/nsa/cambria/listener/DME2EndPointLoader.java
 create mode 100644 src/main/java/com/att/nsa/cambria/metabroker/Broker.java
 create mode 100644 src/main/java/com/att/nsa/cambria/metabroker/Topic.java
 create mode 100644 src/main/java/com/att/nsa/cambria/metrics/publisher/CambriaBatchingPublisher.java
 create mode 100644 src/main/java/com/att/nsa/cambria/metrics/publisher/CambriaClient.java
 create mode 100644 src/main/java/com/att/nsa/cambria/metrics/publisher/CambriaConsumer.java
 create mode 100644 src/main/java/com/att/nsa/cambria/metrics/publisher/CambriaPublisher.java
 create mode 100644 src/main/java/com/att/nsa/cambria/metrics/publisher/CambriaPublisherUtility.java
 create mode 100644 src/main/java/com/att/nsa/cambria/metrics/publisher/DMaaPCambriaClientFactory.java
 create mode 100644 src/main/java/com/att/nsa/cambria/metrics/publisher/impl/CambriaBaseClient.java
 create mode 100644 src/main/java/com/att/nsa/cambria/metrics/publisher/impl/Clock.java
 create mode 100644 src/main/java/com/att/nsa/cambria/metrics/publisher/impl/DMaaPCambriaConsumerImpl.java
 create mode 100644 src/main/java/com/att/nsa/cambria/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisher.java
 create mode 100644 src/main/java/com/att/nsa/cambria/resources/CambriaEventSet.java
 create mode 100644 src/main/java/com/att/nsa/cambria/resources/CambriaOutboundEventStream.java
 create mode 100644 src/main/java/com/att/nsa/cambria/resources/streamReaders/CambriaJsonStreamReader.java
 create mode 100644 src/main/java/com/att/nsa/cambria/resources/streamReaders/CambriaRawStreamReader.java
 create mode 100644 src/main/java/com/att/nsa/cambria/resources/streamReaders/CambriaStreamReader.java
 create mode 100644 src/main/java/com/att/nsa/cambria/resources/streamReaders/CambriaTextStreamReader.java
 create mode 100644 src/main/java/com/att/nsa/cambria/security/DMaaPAAFAuthenticator.java
 create mode 100644 src/main/java/com/att/nsa/cambria/security/DMaaPAAFAuthenticatorImpl.java
 create mode 100644 src/main/java/com/att/nsa/cambria/security/DMaaPAuthenticator.java
 create mode 100644 src/main/java/com/att/nsa/cambria/security/DMaaPAuthenticatorImpl.java
 create mode 100644 src/main/java/com/att/nsa/cambria/security/impl/DMaaPMechIdAuthenticator.java
 create mode 100644 src/main/java/com/att/nsa/cambria/security/impl/DMaaPOriginalUebAuthenticator.java
 create mode 100644 src/main/java/com/att/nsa/cambria/service/AdminService.java
 create mode 100644 src/main/java/com/att/nsa/cambria/service/ApiKeysService.java
 create mode 100644 src/main/java/com/att/nsa/cambria/service/EventsService.java
 create mode 100644 src/main/java/com/att/nsa/cambria/service/MMService.java
 create mode 100644 src/main/java/com/att/nsa/cambria/service/MetricsService.java
 create mode 100644 src/main/java/com/att/nsa/cambria/service/TopicService.java
 create mode 100644 src/main/java/com/att/nsa/cambria/service/TransactionService.java
 create mode 100644 src/main/java/com/att/nsa/cambria/service/UIService.java
 create mode 100644 src/main/java/com/att/nsa/cambria/service/impl/AdminServiceImpl.java
 create mode 100644 src/main/java/com/att/nsa/cambria/service/impl/ApiKeysServiceImpl.java
 create mode 100644 src/main/java/com/att/nsa/cambria/service/impl/BaseTransactionDbImpl.java
 create mode 100644 src/main/java/com/att/nsa/cambria/service/impl/EventsServiceImpl.java
 create mode 100644 src/main/java/com/att/nsa/cambria/service/impl/MMServiceImpl.java
 create mode 100644 src/main/java/com/att/nsa/cambria/service/impl/MetricsServiceImpl.java
 create mode 100644 src/main/java/com/att/nsa/cambria/service/impl/TopicServiceImpl.java
 create mode 100644 src/main/java/com/att/nsa/cambria/service/impl/TransactionServiceImpl.java
 create mode 100644 src/main/java/com/att/nsa/cambria/service/impl/UIServiceImpl.java
 create mode 100644 src/main/java/com/att/nsa/cambria/transaction/DMaaPTransactionFactory.java
 create mode 100644 src/main/java/com/att/nsa/cambria/transaction/DMaaPTransactionObj.java
 create mode 100644 src/main/java/com/att/nsa/cambria/transaction/DMaaPTransactionObjDB.java
 create mode 100644 src/main/java/com/att/nsa/cambria/transaction/TransactionObj.java
 create mode 100644 src/main/java/com/att/nsa/cambria/transaction/TrnRequest.java
 create mode 100644 src/main/java/com/att/nsa/cambria/transaction/impl/DMaaPSimpleTransactionFactory.java
 create mode 100644 src/main/java/com/att/nsa/cambria/utils/ConfigurationReader.java
 create mode 100644 src/main/java/com/att/nsa/cambria/utils/DMaaPCuratorFactory.java
 create mode 100644 src/main/java/com/att/nsa/cambria/utils/DMaaPResponseBuilder.java
 create mode 100644 src/main/java/com/att/nsa/cambria/utils/Emailer.java
 create mode 100644 src/main/java/com/att/nsa/cambria/utils/PropertyReader.java
 create mode 100644 src/main/java/com/att/nsa/cambria/utils/Utils.java
 create mode 100644 src/main/java/com/att/nsa/filter/ContentLengthFilter.java
 create mode 100644 src/main/java/com/att/nsa/filter/DefaultLength.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/apiServer/metrics/cambria/DMaaPMetricsSender.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/CambriaApiException.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/CambriaApiVersionInfo.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/Consumer.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/ConsumerFactory.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/MetricsSet.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/Publisher.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/kafka/KafkaConsumer.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/kafka/KafkaConsumerCache.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/kafka/KafkaPublisher.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MemoryConsumerFactory.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MemoryMetaBroker.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MemoryQueue.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MemoryQueuePublisher.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MessageDropper.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MessageLogger.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/ApiKeyBean.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPCambriaLimiter.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPContext.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPKafkaConsumerFactory.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPKafkaMetaBroker.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPMetricsSet.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPNsaApiDb.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPZkClient.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPZkConfigDb.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/LogDetails.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/TopicBean.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/constants/CambriaConstants.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/DMaaPAccessDeniedException.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/DMaaPCambriaExceptionMapper.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/DMaaPErrorMessages.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/DMaaPResponseCode.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/DMaaPWebExceptionMapper.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/ErrorResponse.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/listener/CambriaServletContextListener.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/listener/DME2EndPointLoader.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metabroker/Broker.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metabroker/Topic.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/CambriaBatchingPublisher.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/CambriaClient.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/CambriaConsumer.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/CambriaPublisher.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/CambriaPublisherUtility.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/DMaaPCambriaClientFactory.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/impl/CambriaBaseClient.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/impl/Clock.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/impl/DMaaPCambriaConsumerImpl.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisher.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/CambriaEventSet.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/CambriaOutboundEventStream.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/streamReaders/CambriaJsonStreamReader.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/streamReaders/CambriaRawStreamReader.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/streamReaders/CambriaStreamReader.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/streamReaders/CambriaTextStreamReader.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/DMaaPAAFAuthenticator.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/DMaaPAAFAuthenticatorImpl.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/DMaaPAuthenticator.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/DMaaPAuthenticatorImpl.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/impl/DMaaPMechIdAuthenticator.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/impl/DMaaPOriginalUebAuthenticator.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/AdminService.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/ApiKeysService.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/EventsService.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/MMService.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/MetricsService.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/TopicService.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/TransactionService.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/UIService.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/AdminServiceImpl.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/ApiKeysServiceImpl.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/BaseTransactionDbImpl.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/EventsServiceImpl.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/MMServiceImpl.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/MetricsServiceImpl.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/TopicServiceImpl.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/TransactionServiceImpl.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/UIServiceImpl.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/DMaaPTransactionFactory.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/DMaaPTransactionObj.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/DMaaPTransactionObjDB.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/TransactionObj.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/TrnRequest.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/impl/DMaaPSimpleTransactionFactory.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/ConfigurationReader.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/DMaaPCuratorFactory.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/DMaaPResponseBuilder.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/Emailer.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/PropertyReader.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/Utils.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/filter/ContentLengthFilter.java
 delete mode 100644 src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/filter/DefaultLength.java

(limited to 'src/main')

diff --git a/src/main/java/com/att/nsa/apiServer/metrics/cambria/DMaaPMetricsSender.java b/src/main/java/com/att/nsa/apiServer/metrics/cambria/DMaaPMetricsSender.java
new file mode 100644
index 0000000..431163e
--- /dev/null
+++ b/src/main/java/com/att/nsa/apiServer/metrics/cambria/DMaaPMetricsSender.java
@@ -0,0 +1,197 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.apiServer.metrics.cambria;
+
+import java.io.IOException;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.ScheduledFuture;
+import java.util.concurrent.TimeUnit;
+
+import org.json.JSONException;
+import org.json.JSONObject;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.att.nsa.cambria.constants.CambriaConstants;
+import com.att.nsa.cambria.metrics.publisher.CambriaPublisher;
+import com.att.nsa.cambria.metrics.publisher.DMaaPCambriaClientFactory;
+import com.att.nsa.drumlin.till.nv.rrNvReadable;
+import com.att.nsa.metrics.CdmMetricsRegistry;
+import com.att.nsa.metrics.impl.CdmConstant;
+
+/**
+ * MetricsSender will send the given metrics registry content as an event on the
+ * Cambria event broker to the given topic.
+ * 
+ * @author author
+ *
+ */
+public class DMaaPMetricsSender implements Runnable {
+	public static final String kSetting_CambriaEnabled = "metrics.send.cambria.enabled";
+	public static final String kSetting_CambriaBaseUrl = "metrics.send.cambria.baseUrl";
+	public static final String kSetting_CambriaTopic = "metrics.send.cambria.topic";
+	public static final String kSetting_CambriaSendFreqSecs = "metrics.send.cambria.sendEverySeconds";
+
+	/**
+	 * Schedule a periodic send of the given metrics registry using the given
+	 * settings container for the Cambria location, topic, and send frequency.
+	 * <br/>
+	 * <br/>
+	 * If the enabled flag is false, this method returns null.
+	 * 
+	 * @param scheduler
+	 * @param metrics
+	 * @param settings
+	 * @param defaultTopic
+	 * @return a handle to the scheduled task
+	 */
+	public static ScheduledFuture<?> sendPeriodically(ScheduledExecutorService scheduler, CdmMetricsRegistry metrics,
+			 String defaultTopic) {
+		log.info("Inside : DMaaPMetricsSender : sendPeriodically");
+	String	cambriaSetting= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_CambriaEnabled);
+	boolean setEnable=true;
+	if (cambriaSetting!=null && cambriaSetting.equals("false") )
+	setEnable= false;
+	//System.out.println(setEnable+"XXXXXXXXXXXXXXXX");
+		if (setEnable) {
+			String Setting_CambriaBaseUrl=com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_CambriaEnabled);
+			
+			Setting_CambriaBaseUrl=Setting_CambriaBaseUrl==null?"localhost":Setting_CambriaBaseUrl;
+			
+			String Setting_CambriaTopic=com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_CambriaTopic);
+			if(Setting_CambriaTopic==null) Setting_CambriaTopic = "msgrtr.apinode.metrics.dmaap";     
+			
+	//		Setting_CambriaBaseUrl=Setting_CambriaBaseUrl==null?defaultTopic:Setting_CambriaBaseUrl;
+			
+			String Setting_CambriaSendFreqSecs=com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_CambriaSendFreqSecs);
+			
+			int _CambriaSendFreqSecs =30;
+			if(Setting_CambriaSendFreqSecs!=null){
+				 _CambriaSendFreqSecs = Integer.parseInt(Setting_CambriaSendFreqSecs);
+			}
+			
+
+			return DMaaPMetricsSender.sendPeriodically(scheduler, metrics,
+					Setting_CambriaBaseUrl,Setting_CambriaTopic,_CambriaSendFreqSecs
+				);
+			/*return DMaaPMetricsSender.sendPeriodically(scheduler, metrics,
+					settings.getString(kSetting_CambriaBaseUrl, "localhost"),
+					settings.getString(kSetting_CambriaTopic, defaultTopic),
+					settings.getInt(kSetting_CambriaSendFreqSecs, 30));*/
+		} else {
+			return null;
+		}
+	}
+
+	/**
+	 * Schedule a periodic send of the metrics registry to the given Cambria
+	 * broker and topic.
+	 * 
+	 * @param scheduler
+	 * @param metrics
+	 *            the registry to send
+	 * @param cambriaBaseUrl
+	 *            the base URL for Cambria
+	 * @param topic
+	 *            the topic to publish on
+	 * @param everySeconds
+	 *            how frequently to publish
+	 * @return a handle to the scheduled task
+	 */
+	public static ScheduledFuture<?> sendPeriodically(ScheduledExecutorService scheduler, CdmMetricsRegistry metrics,
+			String cambriaBaseUrl, String topic, int everySeconds) {
+		return scheduler.scheduleAtFixedRate(new DMaaPMetricsSender(metrics, cambriaBaseUrl, topic), everySeconds,
+				everySeconds, TimeUnit.SECONDS);
+	}
+
+	/**
+	 * Create a metrics sender.
+	 * 
+	 * @param metrics
+	 * @param cambriaBaseUrl
+	 * @param topic
+	 */
+	public DMaaPMetricsSender(CdmMetricsRegistry metrics, String cambriaBaseUrl, String topic) {
+		try {
+			fMetrics = metrics;
+			fHostname = InetAddress.getLocalHost().getHostName();
+
+			// setup a "simple" publisher that will send metrics immediately
+			fCambria = DMaaPCambriaClientFactory.createSimplePublisher(cambriaBaseUrl, topic);
+		} catch (UnknownHostException e) {
+			log.warn("Unable to get localhost address in MetricsSender constructor.", e);
+			throw new RuntimeException(e);
+		}
+	}
+
+	/**
+	 * Send on demand.
+	 */
+	public void send() {
+		try {
+			final JSONObject o = fMetrics.toJson();
+			o.put("hostname", fHostname);
+			o.put("now", System.currentTimeMillis());
+			o.put("metricsSendTime", addTimeStamp());
+			o.put("transactionEnabled", false);
+			fCambria.send(fHostname, o.toString());
+		} catch (JSONException e) {
+			log.warn("Error posting metrics to Cambria: " + e.getMessage());
+		} catch (IOException e) {
+			log.warn("Error posting metrics to Cambria: " + e.getMessage());
+		}
+	}
+
+	/**
+	 * Run() calls send(). It's meant for use in a background-scheduled task.
+	 */
+	@Override
+	public void run() {
+		send();
+	}
+
+	private final CdmMetricsRegistry fMetrics;
+	private final CambriaPublisher fCambria;
+	private final String fHostname;
+
+	//private static final Logger log = LoggerFactory.getLogger(MetricsSender.class);
+
+	private static final EELFLogger log = EELFManager.getInstance().getLogger(MetricsSender.class);
+	/**
+	 * method creates and returnd CdmConstant object using current timestamp
+	 * 
+	 * @return
+	 */
+	public CdmConstant addTimeStamp() {
+		// Add the timestamp with every metrics send
+		final long metricsSendTime = System.currentTimeMillis();
+		final Date d = new Date(metricsSendTime);
+		final String text = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssz").format(d);
+		return new CdmConstant(metricsSendTime / 1000, "Metrics Send Time (epoch); " + text);
+	}
+}
diff --git a/src/main/java/com/att/nsa/cambria/CambriaApiException.java b/src/main/java/com/att/nsa/cambria/CambriaApiException.java
new file mode 100644
index 0000000..1f8f705
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/CambriaApiException.java
@@ -0,0 +1,80 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria;
+
+import org.json.JSONObject;
+
+import com.att.nsa.apiServer.NsaAppException;
+import com.att.nsa.cambria.exception.ErrorResponse;
+
+public class CambriaApiException extends NsaAppException
+{
+	
+	private ErrorResponse errRes;
+	/**
+	 * Implements constructor CambriaApiException
+	 * @param jsonObject
+	 * 
+	 */
+	public CambriaApiException ( JSONObject jsonObject )
+	{
+		super ( jsonObject );
+	}
+
+	/**
+	 * Implements constructor CambriaApiException
+	 * @param status
+	 * @param msg
+	 */
+	public CambriaApiException ( int status, String msg )
+	{
+		super ( status, msg );
+	}
+
+	/**
+	 * Implements constructor CambriaApiException
+	 * @param status
+	 * @param jsonObject
+	 */
+	public CambriaApiException ( int status, JSONObject jsonObject )
+	{
+		super ( status, jsonObject );
+	}
+	
+	public CambriaApiException (ErrorResponse errRes)
+	{
+		super(errRes.getHttpStatusCode(),errRes.getErrorMessage());
+		this.errRes = errRes;
+	}
+	
+	/*
+	 * defined long type constant serialVersionUID
+	 */
+	private static final long serialVersionUID = 1L;
+	public ErrorResponse getErrRes() {
+		return errRes;
+	}
+
+	public void setErrRes(ErrorResponse errRes) {
+		this.errRes = errRes;
+	}
+}
diff --git a/src/main/java/com/att/nsa/cambria/CambriaApiVersionInfo.java b/src/main/java/com/att/nsa/cambria/CambriaApiVersionInfo.java
new file mode 100644
index 0000000..117f8ab
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/CambriaApiVersionInfo.java
@@ -0,0 +1,88 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Properties;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+/**
+ * CambriaApiVersionInfo will provide the version of cambria code
+ * 
+ * @author author
+ *
+ */
+public class CambriaApiVersionInfo {
+    
+	/**
+	 * 3 constants are defined:-
+	 * PROPS,VERSION and LOG
+	 */
+	
+	private static final Properties PROPS = new Properties();
+    private static final String VERSION;
+
+
+    private static final EELFLogger LOG = EELFManager.getInstance().getLogger(CambriaApiVersionInfo.class);
+    
+    /**
+     * private constructor created with no argument
+     * to avoid default constructor
+     */
+    private CambriaApiVersionInfo()
+    {
+    	
+    }
+    
+    /**
+     * returns version of String type
+     */
+    public static String getVersion() {
+        return VERSION;
+    }
+
+    /** 
+     * 
+     * defines static initialization method
+     * It initializes VERSION Constant
+     * it handles exception in try catch block 
+     * and throws IOException
+     * 
+     */
+    
+    static {
+        String use = null;
+        try {
+            final InputStream is = CambriaApiVersionInfo.class
+                    .getResourceAsStream("/cambriaApiVersion.properties");
+            if (is != null) {
+            	PROPS.load(is);
+                use = PROPS.getProperty("cambriaApiVersion", null);
+            }
+        } catch (IOException e) {
+            LOG.error("Failed due to IO EXception:"+e);
+        }
+        VERSION = use;
+    }
+}
diff --git a/src/main/java/com/att/nsa/cambria/backends/Consumer.java b/src/main/java/com/att/nsa/cambria/backends/Consumer.java
new file mode 100644
index 0000000..f1b43c2
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/backends/Consumer.java
@@ -0,0 +1,96 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.backends;
+
+/**
+ * A consumer interface. Consumers pull the next message from a given topic.
+ * @author author
+ */
+public interface Consumer
+{	
+	/**
+	 * A message interface provide the offset and message
+	 * @author author
+	 *
+	 */
+	public interface Message
+	{	
+		/**
+		 * returning the offset of that particular message 
+		 * @return long
+		 */
+		long getOffset ();
+		/**
+		 * returning the message 
+		 * @return message
+		 */
+		String getMessage ();
+	}
+
+	/**
+	 * Get this consumer's name
+	 * @return name
+	 */
+	String getName ();
+
+	/**
+	 * Get creation time in ms
+	 * @return
+	 */
+	long getCreateTimeMs ();
+
+	/**
+	 * Get last access time in ms
+	 * @return
+	 */
+	long getLastAccessMs ();
+	
+	/**
+	 * Get the next message from this source. This method must not block.
+	 * @return the next message, or null if none are waiting
+	 */
+	Message nextMessage ();
+
+	/**
+	 * Get the next message from this source. This method must not block.
+	 * @param atOffset start with the next message at or after atOffset. -1 means next from last request
+	 * @return the next message, or null if none are waiting
+	 */
+//	Message nextMessage ( long atOffset );
+
+	/**
+	 * Close/clean up this consumer
+	 */
+	void close();
+	
+	/**
+	 * Commit the offset of the last consumed message
+	 * 
+	 */
+	void commitOffsets();
+	
+	/**
+	 * Get the offset this consumer is currently at
+	 * @return offset
+	 */
+	long getOffset();
+}
diff --git a/src/main/java/com/att/nsa/cambria/backends/ConsumerFactory.java b/src/main/java/com/att/nsa/cambria/backends/ConsumerFactory.java
new file mode 100644
index 0000000..1597c07
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/backends/ConsumerFactory.java
@@ -0,0 +1,110 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.backends;
+
+import java.util.Collection;
+
+/**
+ * This is the factory class to instantiate the consumer
+ * 
+ * @author author
+ *
+ */
+
+public interface ConsumerFactory {
+	public static final String kSetting_EnableCache = "cambria.consumer.cache.enabled";
+	public static boolean kDefault_IsCacheEnabled = true;
+
+	/**
+	 * User defined exception for Unavailable Exception
+	 * 
+	 * @author author
+	 *
+	 */
+	public class UnavailableException extends Exception {
+		/**
+		 * Unavailable Exception with message
+		 * 
+		 * @param msg
+		 */
+		public UnavailableException(String msg) {
+			super(msg);
+		}
+
+		/**
+		 * Unavailable Exception with the throwable object
+		 * 
+		 * @param t
+		 */
+		public UnavailableException(Throwable t) {
+			super(t);
+		}
+
+		/**
+		 * Unavailable Exception with the message and cause
+		 * 
+		 * @param msg
+		 * @param cause
+		 */
+		public UnavailableException(String msg, Throwable cause) {
+			super(msg, cause);
+		}
+
+		private static final long serialVersionUID = 1L;
+	}
+
+	/**
+	 * For admin use, drop all cached consumers.
+	 */
+	public void dropCache();
+
+	/**
+	 * Get or create a consumer for the given set of info (topic, group, id)
+	 * 
+	 * @param topic
+	 * @param consumerGroupId
+	 * @param clientId
+	 * @param timeoutMs
+	 * @return
+	 * @throws UnavailableException
+	 */
+	public Consumer getConsumerFor(String topic, String consumerGroupId,
+			String clientId, int timeoutMs) throws UnavailableException;
+
+	/**
+	 * For factories that employ a caching mechanism, this allows callers to
+	 * explicitly destory a consumer that resides in the factory's cache.
+	 * 
+	 * @param topic
+	 * @param consumerGroupId
+	 * @param clientId
+	 */
+	public void destroyConsumer(String topic, String consumerGroupId,
+			String clientId);
+
+	/**
+	 * For admin/debug, we provide access to the consumers
+	 * 
+	 * @return a collection of consumers
+	 */
+	public Collection<? extends Consumer> getConsumers();
+}
diff --git a/src/main/java/com/att/nsa/cambria/backends/MetricsSet.java b/src/main/java/com/att/nsa/cambria/backends/MetricsSet.java
new file mode 100644
index 0000000..ce104ac
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/backends/MetricsSet.java
@@ -0,0 +1,71 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.backends;
+
+import com.att.nsa.metrics.CdmMetricsRegistry;
+/**
+ * This interface will help to generate metrics
+ * @author author
+ *
+ */
+public interface MetricsSet extends CdmMetricsRegistry{
+
+	/**
+	 * This method will setup cambria sender code
+	 */
+	public void setupCambriaSender ();
+	/**
+	 * This method will define on route complete
+	 * @param name
+	 * @param durationMs
+	 */
+	public void onRouteComplete ( String name, long durationMs );
+	/**
+	 * This method will help the kafka publisher while publishing the messages
+	 * @param amount
+	 */
+	public void publishTick ( int amount );
+	/**
+	 * This method will help the kafka consumer while consuming the messages
+	 * @param amount
+	 */
+	public void consumeTick ( int amount );
+	/**
+	 * This method will call if the kafka consumer cache missed 
+	 */
+	public void onKafkaConsumerCacheMiss ();
+	/**
+	 * This method will call if the kafka consumer cache will be hit while publishing/consuming the messages
+	 */
+	public void onKafkaConsumerCacheHit ();
+	/**
+	 * This method will call if the kafka consumer cache claimed
+	 */
+	public void onKafkaConsumerClaimed ();
+	/**
+	 * This method will call if Kafka consumer is timed out
+	 */
+	public void onKafkaConsumerTimeout ();
+
+
+
+}
diff --git a/src/main/java/com/att/nsa/cambria/backends/Publisher.java b/src/main/java/com/att/nsa/cambria/backends/Publisher.java
new file mode 100644
index 0000000..696e78f
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/backends/Publisher.java
@@ -0,0 +1,98 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.backends;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import kafka.producer.KeyedMessage;
+
+import com.att.nsa.cambria.beans.LogDetails;
+
+/**
+ * A publisher interface. Publishers receive messages and post them to a topic.
+ * @author author
+ */
+public interface Publisher
+{
+	/**
+	 * A message interface. The message has a key and a body.
+	 * @author author
+	 */
+	public interface message
+	{
+		/**
+		 * Get the key for this message. The key is used to partition messages
+		 * into "sub-streams" that have guaranteed order. The key can be null,
+		 * which means the message can be processed without any concern for order.
+		 * 
+		 * @return a key, possibly null
+		 */
+		String getKey();
+
+		/**
+		 * Get the message body.
+		 * @return a message body
+		 */
+		String getMessage();
+		/**
+		 * set the logging params for transaction enabled logging 
+		 * @param logDetails
+		 */
+		void setLogDetails (LogDetails logDetails);
+		/**
+		 * Get the log details for transaction enabled logging
+		 * @return LogDetails
+		 */
+		LogDetails getLogDetails ();
+		
+		/**
+		 * boolean transactionEnabled
+		 * @return true/false
+		 */
+		boolean isTransactionEnabled();
+		/**
+		 * Set the transaction enabled flag from prop file or topic based implementation
+		 * @param transactionEnabled
+		 */
+		void setTransactionEnabled(boolean transactionEnabled);
+	}
+
+	/**
+	 * Send a single message to a topic. Equivalent to sendMessages with a list of size 1.
+	 * @param topic
+	 * @param msg
+	 * @throws IOException
+	 */
+	public void sendMessage ( String topic, message msg ) throws IOException;
+
+	/**
+	 * Send messages to a topic.
+	 * @param topic
+	 * @param msgs
+	 * @throws IOException
+	 */
+	public void sendMessages ( String topic, List<? extends message> msgs ) throws IOException;
+	
+	public void sendBatchMessage(String topic ,ArrayList<KeyedMessage<String,String>> kms) throws IOException;
+}
diff --git a/src/main/java/com/att/nsa/cambria/backends/kafka/KafkaConsumer.java b/src/main/java/com/att/nsa/cambria/backends/kafka/KafkaConsumer.java
new file mode 100644
index 0000000..44c74a6
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/backends/kafka/KafkaConsumer.java
@@ -0,0 +1,245 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.backends.kafka;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import kafka.consumer.ConsumerIterator;
+import kafka.consumer.KafkaStream;
+import kafka.javaapi.consumer.ConsumerConnector;
+import kafka.message.MessageAndMetadata;
+
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+import com.att.nsa.cambria.backends.Consumer;
+
+/**
+ * A consumer instance that's created per-request. These are stateless so that
+ * clients can connect to this service as a proxy.
+ * 
+ * @author author
+ *
+ */
+public class KafkaConsumer implements Consumer {
+	private enum State {
+		OPENED, CLOSED
+	}
+
+	/**
+	 * KafkaConsumer() is constructor. It has following 4 parameters:-
+	 * @param topic
+	 * @param group
+	 * @param id
+	 * @param cc
+	 * 
+	 */
+	
+	public KafkaConsumer(String topic, String group, String id, ConsumerConnector cc) {
+		fTopic = topic;
+		fGroup = group;
+		fId = id;
+		fConnector = cc;
+
+		fCreateTimeMs = System.currentTimeMillis();
+		fLastTouch = fCreateTimeMs;
+
+		fLogTag = fGroup + "(" + fId + ")/" + fTopic;
+		offset = 0;
+
+		state = KafkaConsumer.State.OPENED;
+
+		final Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
+		topicCountMap.put(fTopic, 1);
+		final Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = fConnector
+				.createMessageStreams(topicCountMap);
+		final List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(fTopic);
+		fStream = streams.iterator().next();
+	}
+
+	
+	/** getName() method returns string type value.
+	 * returns 3 parameters in string:- 
+	 * fTopic,fGroup,fId
+	 * @Override
+	 */
+	public String getName() {
+		return fTopic + " : " + fGroup + " : " + fId;
+	}
+
+	/** getCreateTimeMs() method returns long type value.
+	 * returns fCreateTimeMs variable value 
+	 * @Override
+	 * 
+	 */
+	public long getCreateTimeMs() {
+		return fCreateTimeMs;
+	}
+
+	/** getLastAccessMs() method returns long type value.
+	 * returns fLastTouch variable value 
+	 * @Override
+	 * 
+	 */
+	public long getLastAccessMs() {
+		return fLastTouch;
+	}
+
+	
+	/** 
+	 * nextMessage() is synchronized method that means at a time only one object can access it.
+	 * getName() method returns String which is of type Consumer.Message
+	 * @Override
+	 * */
+	public synchronized Consumer.Message nextMessage() {
+		if (getState() == KafkaConsumer.State.CLOSED) {
+			log.warn("nextMessage() called on closed KafkaConsumer " + getName());
+			return null;
+		}
+
+		try {
+			ConsumerIterator<byte[], byte[]> it = fStream.iterator();
+			if (it.hasNext()) {
+				final MessageAndMetadata<byte[], byte[]> msg = it.next();
+				offset = msg.offset();
+
+				return new Consumer.Message() {
+					@Override
+					public long getOffset() {
+						return msg.offset();
+					}
+
+					@Override
+					public String getMessage() {
+						return new String(msg.message());
+					}
+				};
+			}
+		} catch (kafka.consumer.ConsumerTimeoutException x) {
+			log.debug(fLogTag + ": ConsumerTimeoutException in Kafka consumer; returning null. ");
+		} catch (java.lang.IllegalStateException x) {
+			log.error(fLogTag + ": Illegal state exception in Kafka consumer; dropping stream. " + x.getMessage());
+		}
+
+		return null;
+	}
+	
+	/** getOffset() method returns long type value.
+	 * returns offset variable value 
+	 * @Override
+	 * 
+	 */
+	public long getOffset() {
+		return offset;
+	}
+
+	/** commit offsets 
+	 * commitOffsets() method will be called on closed of KafkaConsumer.
+	 * @Override
+	 * 
+	 */
+	public void commitOffsets() {
+		if (getState() == KafkaConsumer.State.CLOSED) {
+			log.warn("commitOffsets() called on closed KafkaConsumer " + getName());
+			return;
+		}
+		fConnector.commitOffsets();
+	}
+
+	/**
+	 * updating fLastTouch with current time in ms
+	 */
+	public void touch() {
+		fLastTouch = System.currentTimeMillis();
+	}
+	
+	/** getLastTouch() method returns long type value.
+	 * returns fLastTouch variable value
+	 * 
+	 */
+	public long getLastTouch() {
+		return fLastTouch;
+	}
+
+	/**
+	 *   setting the kafkaConsumer state to closed
+	 */
+	public synchronized void close() {
+		if (getState() == KafkaConsumer.State.CLOSED) {
+			log.warn("close() called on closed KafkaConsumer " + getName());
+			return;
+		}
+
+		setState(KafkaConsumer.State.CLOSED);
+		fConnector.shutdown();
+	}
+	
+	/**
+	 * getConsumerGroup() returns Consumer group
+	 * @return
+	 */
+	public String getConsumerGroup() {
+		return fGroup;
+	}
+	
+	/**
+	 * getConsumerId returns Consumer Id
+	 * @return
+	 */
+	public String getConsumerId() {
+		return fId;
+	}
+
+	/**
+	 * getState returns kafkaconsumer state
+	 * @return
+	 */	
+	private KafkaConsumer.State getState() {
+		return this.state;
+	}
+	
+	/**
+	 * setState() sets the kafkaConsumer state
+	 * @param state
+	 */
+	private void setState(KafkaConsumer.State state) {
+		this.state = state;
+	}
+
+	private ConsumerConnector fConnector;
+	private final String fTopic;
+	private final String fGroup;
+	private final String fId;
+	private final String fLogTag;
+	private final KafkaStream<byte[], byte[]> fStream;
+	private long fCreateTimeMs;
+	private long fLastTouch;
+	private long offset;
+	private KafkaConsumer.State state;
+	private static final EELFLogger log = EELFManager.getInstance().getLogger(KafkaConsumer.class);
+	//private static final Logger log = LoggerFactory.getLogger(KafkaConsumer.class);
+}
diff --git a/src/main/java/com/att/nsa/cambria/backends/kafka/KafkaConsumerCache.java b/src/main/java/com/att/nsa/cambria/backends/kafka/KafkaConsumerCache.java
new file mode 100644
index 0000000..5f38443
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/backends/kafka/KafkaConsumerCache.java
@@ -0,0 +1,613 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.backends.kafka;
+
+import java.io.IOException;
+import java.util.Collection;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map.Entry;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.TimeUnit;
+
+import org.I0Itec.zkclient.exception.ZkException;
+import org.I0Itec.zkclient.exception.ZkInterruptedException;
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.imps.CuratorFrameworkState;
+import org.apache.curator.framework.recipes.cache.ChildData;
+import org.apache.curator.framework.recipes.cache.PathChildrenCache;
+import org.apache.curator.framework.recipes.cache.PathChildrenCacheEvent;
+import org.apache.curator.framework.recipes.cache.PathChildrenCacheListener;
+import org.apache.curator.framework.state.ConnectionState;
+import org.apache.curator.framework.state.ConnectionStateListener;
+import org.apache.curator.utils.EnsurePath;
+import org.apache.curator.utils.ZKPaths;
+import org.apache.http.annotation.NotThreadSafe;
+import org.apache.zookeeper.KeeperException;
+import org.apache.zookeeper.KeeperException.NoNodeException;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+import com.att.nsa.cambria.backends.Consumer;
+import com.att.nsa.cambria.backends.MetricsSet;
+import com.att.nsa.cambria.constants.CambriaConstants;
+import com.att.nsa.cambria.utils.ConfigurationReader;
+import com.att.nsa.drumlin.till.nv.rrNvReadable;
+
+/**
+ * @NotThreadSafe but expected to be used within KafkaConsumerFactory, which
+ *                must be
+ * @author author
+ *
+ */
+@NotThreadSafe
+public class KafkaConsumerCache {
+
+	private static final String kSetting_ConsumerHandoverWaitMs = "cambria.consumer.cache.handoverWaitMs";
+	private static final int kDefault_ConsumerHandoverWaitMs = 500;
+
+	private static final String kSetting_SweepEverySeconds = "cambria.consumer.cache.sweepFreqSeconds";
+	private static final String kSetting_TouchEveryMs = "cambria.consumer.cache.touchFreqMs";
+
+	private static final String kSetting_ZkBasePath = "cambria.consumer.cache.zkBasePath";
+	private static final String kDefault_ZkBasePath = CambriaConstants.kDefault_ZkRoot + "/consumerCache";
+
+	// kafka defaults to timing out a client after 6 seconds of inactivity, but
+	// it heartbeats even when the client isn't fetching. Here, we don't
+	// want to prematurely rebalance the consumer group. Assuming clients are
+	// hitting
+	// the server at least every 30 seconds, timing out after 2 minutes should
+	// be okay.
+	// FIXME: consider allowing the client to specify its expected call rate?
+	private static final long kDefault_MustTouchEveryMs = 1000 * 60 * 2;
+
+	// check for expirations pretty regularly
+	private static final long kDefault_SweepEverySeconds = 15;
+
+	private enum Status {
+		NOT_STARTED, CONNECTED, DISCONNECTED, SUSPENDED
+	}
+
+	/**
+	 * User defined exception class for kafka consumer cache
+	 * 
+	 * @author author
+	 *
+	 */
+	public class KafkaConsumerCacheException extends Exception {
+		/**
+		 * To throw the exception
+		 * 
+		 * @param t
+		 */
+		KafkaConsumerCacheException(Throwable t) {
+			super(t);
+		}
+
+		/**
+		 * 
+		 * @param s
+		 */
+		public KafkaConsumerCacheException(String s) {
+			super(s);
+		}
+
+		private static final long serialVersionUID = 1L;
+	}
+
+	/**
+	 * Creates a KafkaConsumerCache object. Before it is used, you must call
+	 * startCache()
+	 * 
+	 * @param apiId
+	 * @param s
+	 * @param metrics
+	 */
+	public KafkaConsumerCache(String apiId,  MetricsSet metrics) {
+
+		if (apiId == null) {
+			throw new IllegalArgumentException("API Node ID must be specified.");
+		}
+
+		fApiId = apiId;
+	//	fSettings = s;
+		fMetrics = metrics;
+		String strkSetting_ZkBasePath= AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_ZkBasePath);
+		if(null==strkSetting_ZkBasePath)strkSetting_ZkBasePath = kDefault_ZkBasePath;
+		fBaseZkPath = strkSetting_ZkBasePath;
+
+		fConsumers = new ConcurrentHashMap<String, KafkaConsumer>();
+		fSweepScheduler = Executors.newScheduledThreadPool(1);
+
+		curatorConsumerCache = null;
+
+		status = Status.NOT_STARTED;
+
+		listener = new ConnectionStateListener() {
+			public void stateChanged(CuratorFramework client, ConnectionState newState) {
+				if (newState == ConnectionState.LOST) {
+					log.info("ZooKeeper connection expired");
+					handleConnectionLoss();
+				} else if (newState == ConnectionState.READ_ONLY) {
+					log.warn("ZooKeeper connection set to read only mode.");
+				} else if (newState == ConnectionState.RECONNECTED) {
+					log.info("ZooKeeper connection re-established");
+					handleReconnection();
+				} else if (newState == ConnectionState.SUSPENDED) {
+					log.warn("ZooKeeper connection has been suspended.");
+					handleConnectionSuspended();
+				}
+			}
+		};
+	}
+
+	/**
+	 * Start the cache service. This must be called before any get/put
+	 * operations.
+	 * 
+	 * @param mode
+	 *            DMAAP or cambria
+	 * @param curator
+	 * @throws IOException
+	 * @throws KafkaConsumerCacheException
+	 */
+	public void startCache(String mode, CuratorFramework curator) throws KafkaConsumerCacheException {
+		try {
+
+			// CuratorFramework curator = null;
+
+			// Changed the class from where we are initializing the curator
+			// framework
+			if (mode != null && mode.equals(CambriaConstants.CAMBRIA)) {
+				curator = ConfigurationReader.getCurator();
+			} else if (mode != null && mode.equals(CambriaConstants.DMAAP)) {
+				curator = getCuratorFramework(curator);
+			}
+
+			curator.getConnectionStateListenable().addListener(listener);
+
+			setStatus(Status.CONNECTED);
+
+			curatorConsumerCache = new PathChildrenCache(curator, fBaseZkPath, true);
+			curatorConsumerCache.start();
+
+			curatorConsumerCache.getListenable().addListener(new PathChildrenCacheListener() {
+				public void childEvent(CuratorFramework client, PathChildrenCacheEvent event) throws Exception {
+					switch (event.getType()) {
+					case CHILD_ADDED: {
+						final String apiId = new String(event.getData().getData());
+						final String consumer = ZKPaths.getNodeFromPath(event.getData().getPath());
+
+						log.info(apiId + " started consumer " + consumer);
+						break;
+					}
+					case CHILD_UPDATED: {
+						final String apiId = new String(event.getData().getData());
+						final String consumer = ZKPaths.getNodeFromPath(event.getData().getPath());
+
+						if (fConsumers.containsKey(consumer)) {
+							log.info(apiId + " claimed consumer " + consumer + " from " + fApiId);
+
+							dropClaimedConsumer(consumer);
+						}
+
+						break;
+					}
+					case CHILD_REMOVED: {
+						final String consumer = ZKPaths.getNodeFromPath(event.getData().getPath());
+
+						if (fConsumers.containsKey(consumer)) {
+							log.info("Someone wanted consumer " + consumer + " gone;  removing it from the cache");
+							dropConsumer(consumer, false);
+						}
+
+						break;
+					}
+					default:
+						break;
+					}
+				}
+			});
+
+			// initialize the ZK path
+			EnsurePath ensurePath = new EnsurePath(fBaseZkPath);
+			ensurePath.ensure(curator.getZookeeperClient());
+
+			//final long freq = fSettings.getLong(kSetting_SweepEverySeconds, kDefault_SweepEverySeconds);
+			long freq = kDefault_SweepEverySeconds;
+			String strkSetting_SweepEverySeconds = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_SweepEverySeconds);
+			if(null==strkSetting_SweepEverySeconds) strkSetting_SweepEverySeconds = kDefault_SweepEverySeconds+"";
+			
+			  freq = Long.parseLong(strkSetting_SweepEverySeconds);
+					
+			fSweepScheduler.scheduleAtFixedRate(new sweeper(), freq, freq, TimeUnit.SECONDS);
+			log.info("KafkaConsumerCache started");
+			log.info("sweeping cached clients every " + freq + " seconds");
+		} catch (ZkException e) {
+			throw new KafkaConsumerCacheException(e);
+		} catch (Exception e) {
+			throw new KafkaConsumerCacheException(e);
+		}
+	}
+
+	/**
+	 * Getting the curator oject to start the zookeeper connection estabished
+	 * 
+	 * @param curator
+	 * @return curator object
+	 */
+	public static CuratorFramework getCuratorFramework(CuratorFramework curator) {
+		if (curator.getState() == CuratorFrameworkState.LATENT) {
+			curator.start();
+
+			try {
+				curator.blockUntilConnected();
+			} catch (InterruptedException e) {
+				// Ignore
+				log.error("error while setting curator framework :" + e.getMessage());
+			}
+		}
+
+		return curator;
+	}
+
+	/**
+	 * Stop the cache service.
+	 */
+	public void stopCache() {
+		setStatus(Status.DISCONNECTED);
+
+		final CuratorFramework curator = ConfigurationReader.getCurator();
+
+		if (curator != null) {
+			try {
+				curator.getConnectionStateListenable().removeListener(listener);
+				curatorConsumerCache.close();
+				log.info("Curator client closed");
+			} catch (ZkInterruptedException e) {
+				log.warn("Curator client close interrupted: " + e.getMessage());
+			} catch (IOException e) {
+				log.warn("Error while closing curator PathChildrenCache for KafkaConsumerCache" + e.getMessage());
+			}
+
+			curatorConsumerCache = null;
+		}
+
+		if (fSweepScheduler != null) {
+			fSweepScheduler.shutdownNow();
+			log.info("cache sweeper stopped");
+		}
+
+		if (fConsumers != null) {
+			fConsumers.clear();
+			fConsumers = null;
+		}
+
+		setStatus(Status.NOT_STARTED);
+
+		log.info("Consumer cache service stopped");
+	}
+
+	/**
+	 * Get a cached consumer by topic, group, and id, if it exists (and remains
+	 * valid) In addition, this method waits for all other consumer caches in
+	 * the cluster to release their ownership and delete their version of this
+	 * consumer.
+	 * 
+	 * @param topic
+	 * @param consumerGroupId
+	 * @param clientId
+	 * @return a consumer, or null
+	 */
+	public KafkaConsumer getConsumerFor(String topic, String consumerGroupId, String clientId)
+			throws KafkaConsumerCacheException {
+		if (getStatus() != KafkaConsumerCache.Status.CONNECTED)
+			throw new KafkaConsumerCacheException("The cache service is unavailable.");
+
+		final String consumerKey = makeConsumerKey(topic, consumerGroupId, clientId);
+		final KafkaConsumer kc = fConsumers.get(consumerKey);
+
+		if (kc != null) {
+			log.debug("Consumer cache hit for [" + consumerKey + "], last was at " + kc.getLastTouch());
+			kc.touch();
+			fMetrics.onKafkaConsumerCacheHit();
+		} else {
+			log.debug("Consumer cache miss for [" + consumerKey + "]");
+			fMetrics.onKafkaConsumerCacheMiss();
+		}
+
+		return kc;
+	}
+
+	/**
+	 * Put a consumer into the cache by topic, group and ID
+	 * 
+	 * @param topic
+	 * @param consumerGroupId
+	 * @param consumerId
+	 * @param consumer
+	 * @throws KafkaConsumerCacheException
+	 */
+	public void putConsumerFor(String topic, String consumerGroupId, String consumerId, KafkaConsumer consumer)
+			throws KafkaConsumerCacheException {
+		if (getStatus() != KafkaConsumerCache.Status.CONNECTED)
+			throw new KafkaConsumerCacheException("The cache service is unavailable.");
+
+		final String consumerKey = makeConsumerKey(topic, consumerGroupId, consumerId);
+		fConsumers.put(consumerKey, consumer);
+	}
+
+	public Collection<? extends Consumer> getConsumers() {
+		return new LinkedList<KafkaConsumer>(fConsumers.values());
+	}
+
+	/**
+	 * This method is to drop all the consumer
+	 */
+	public void dropAllConsumers() {
+		for (Entry<String, KafkaConsumer> entry : fConsumers.entrySet()) {
+			dropConsumer(entry.getKey(), true);
+		}
+
+		// consumers should be empty here
+		if (fConsumers.size() > 0) {
+			log.warn("During dropAllConsumers, the consumer map is not empty.");
+			fConsumers.clear();
+		}
+	}
+
+	/**
+	 * Drop a consumer from our cache due to a timeout
+	 * 
+	 * @param key
+	 */
+	private void dropTimedOutConsumer(String key) {
+		fMetrics.onKafkaConsumerTimeout();
+
+		if (!fConsumers.containsKey(key)) {
+			log.warn("Attempted to drop a timed out consumer which was not in our cache: " + key);
+			return;
+		}
+
+		// First, drop this consumer from our cache
+		dropConsumer(key, true);
+
+		final CuratorFramework curator = ConfigurationReader.getCurator();
+
+		try {
+			curator.delete().guaranteed().forPath(fBaseZkPath + "/" + key);
+		} catch (NoNodeException e) {
+			log.warn("A consumer was deleted from " + fApiId
+					+ "'s cache, but no Cambria API node had ownership of it in ZooKeeper");
+		} catch (Exception e) {
+			log.debug("Unexpected exception while deleting consumer: " + e.getMessage());
+		}
+
+		log.info("Dropped " + key + " consumer due to timeout");
+	}
+
+	/**
+	 * Drop a consumer from our cache due to another API node claiming it as
+	 * their own.
+	 * 
+	 * @param key
+	 */
+	private void dropClaimedConsumer(String key) {
+		// if the consumer is still in our cache, it implies a claim.
+		if (fConsumers.containsKey(key)) {
+			fMetrics.onKafkaConsumerClaimed();
+			log.info("Consumer [" + key + "] claimed by another node.");
+		}
+
+		dropConsumer(key, false);
+	}
+
+	/**
+	 * Removes the consumer from the cache and closes its connection to the
+	 * kafka broker(s).
+	 * 
+	 * @param key
+	 * @param dueToTimeout
+	 */
+	private void dropConsumer(String key, boolean dueToTimeout) {
+		final KafkaConsumer kc = fConsumers.remove(key);
+
+		if (kc != null) {
+			log.info("closing Kafka consumer " + key);
+			kc.close();
+		}
+	}
+
+//	private final rrNvReadable fSettings;
+	private final MetricsSet fMetrics;
+	private final String fBaseZkPath;
+	private final ScheduledExecutorService fSweepScheduler;
+	private final String fApiId;
+	private final ConnectionStateListener listener;
+
+	private ConcurrentHashMap<String, KafkaConsumer> fConsumers;
+	private PathChildrenCache curatorConsumerCache;
+
+	private volatile Status status;
+
+	private void handleReconnection() {
+
+		log.info("Reading current cache data from ZK and synchronizing local cache");
+
+		final List<ChildData> cacheData = curatorConsumerCache.getCurrentData();
+
+		// Remove all the consumers in this API nodes cache that now belong to
+		// other API nodes.
+		for (ChildData cachedConsumer : cacheData) {
+			final String consumerId = ZKPaths.getNodeFromPath(cachedConsumer.getPath());
+			final String owningApiId = (cachedConsumer.getData() != null) ? new String(cachedConsumer.getData())
+					: "undefined";
+
+			if (!fApiId.equals(owningApiId)) {
+				fConsumers.remove(consumerId);
+			}
+		}
+
+		setStatus(Status.CONNECTED);
+	}
+
+	private void handleConnectionSuspended() {
+		log.info("Suspending cache until ZK connection is re-established");
+
+		setStatus(Status.SUSPENDED);
+	}
+
+	private void handleConnectionLoss() {
+		log.info("Clearing consumer cache (shutting down all Kafka consumers on this node)");
+
+		setStatus(Status.DISCONNECTED);
+
+		closeAllCachedConsumers();
+		fConsumers.clear();
+	}
+
+	private void closeAllCachedConsumers() {
+		for (Entry<String, KafkaConsumer> entry : fConsumers.entrySet()) {
+			entry.getValue().close();
+		}
+	}
+
+	private static String makeConsumerKey(String topic, String consumerGroupId, String clientId) {
+		return topic + "::" + consumerGroupId + "::" + clientId;
+	}
+
+	/**
+	 * This method is to get a lock
+	 * 
+	 * @param topic
+	 * @param consumerGroupId
+	 * @param consumerId
+	 * @throws KafkaConsumerCacheException
+	 */
+	public void signalOwnership(final String topic, final String consumerGroupId, final String consumerId)
+			throws KafkaConsumerCacheException {
+		// get a lock at <base>/<topic>::<consumerGroupId>::<consumerId>
+		final String consumerKey = makeConsumerKey(topic, consumerGroupId, consumerId);
+
+		try {
+			final String consumerPath = fBaseZkPath + "/" + consumerKey;
+
+			log.debug(fApiId + " attempting to claim ownership of consumer " + consumerKey);
+
+			final CuratorFramework curator = ConfigurationReader.getCurator();
+
+			try {
+				curator.setData().forPath(consumerPath, fApiId.getBytes());
+			} catch (KeeperException.NoNodeException e) {
+				curator.create().creatingParentsIfNeeded().forPath(consumerPath, fApiId.getBytes());
+			}
+
+			log.info(fApiId + " successfully claimed ownership of consumer " + consumerKey);
+		} catch (Exception e) {
+			log.error(fApiId + " failed to claim ownership of consumer " + consumerKey);
+			throw new KafkaConsumerCacheException(e);
+		}
+
+		log.info("Backing off to give the Kafka broker time to clean up the ZK data for this consumer");
+
+		try {
+			int kSetting_ConsumerHandoverWaitMs = kDefault_ConsumerHandoverWaitMs;
+			String strkSetting_ConsumerHandoverWaitMs= AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_ConsumerHandoverWaitMs+"");
+			if(strkSetting_ConsumerHandoverWaitMs!=null) kSetting_ConsumerHandoverWaitMs = Integer.parseInt(strkSetting_ConsumerHandoverWaitMs);
+			
+					Thread.sleep(kSetting_ConsumerHandoverWaitMs);
+			//Thread.sleep(fSettings.getInt(kSetting_ConsumerHandoverWaitMs, kDefault_ConsumerHandoverWaitMs));
+		} catch (InterruptedException e) {
+			// Ignore
+		}
+	}
+
+	private void sweep() {
+		final LinkedList<String> removals = new LinkedList<String>();
+		long mustTouchEveryMs = kDefault_MustTouchEveryMs;
+		String strkSetting_TouchEveryMs = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_TouchEveryMs);
+		//if(null!=strkSetting_TouchEveryMs) strkSetting_TouchEveryMs = kDefault_MustTouchEveryMs+"";
+		if(null!=strkSetting_TouchEveryMs)
+		{
+		  mustTouchEveryMs = Long.parseLong(strkSetting_TouchEveryMs);	
+		}
+
+		//final long mustTouchEveryMs = fSettings.getLong(kSetting_TouchEveryMs, kDefault_MustTouchEveryMs);
+		final long oldestAllowedTouchMs = System.currentTimeMillis() - mustTouchEveryMs;
+
+		for (Entry<String, KafkaConsumer> e : fConsumers.entrySet()) {
+			final long lastTouchMs = e.getValue().getLastTouch();
+
+			log.debug("consumer " + e.getKey() + " last touched at " + lastTouchMs);
+
+			if (lastTouchMs < oldestAllowedTouchMs) {
+				log.info("consumer " + e.getKey() + " has expired");
+				removals.add(e.getKey());
+			}
+		}
+
+		for (String key : removals) {
+			dropTimedOutConsumer(key);
+		}
+	}
+
+	/**
+	 * Creating a thread to run the sweep method
+	 * 
+	 * @author author
+	 *
+	 */
+	private class sweeper implements Runnable {
+		/**
+		 * run method
+		 */
+		public void run() {
+			sweep();
+		}
+	}
+
+	/**
+	 * This method is to drop consumer
+	 * 
+	 * @param topic
+	 * @param consumerGroup
+	 * @param clientId
+	 */
+	public void dropConsumer(String topic, String consumerGroup, String clientId) {
+		dropConsumer(makeConsumerKey(topic, consumerGroup, clientId), false);
+	}
+
+	private Status getStatus() {
+		return this.status;
+	}
+
+	private void setStatus(Status status) {
+		this.status = status;
+	}
+
+	private static final EELFLogger log = EELFManager.getInstance().getLogger(KafkaConsumerCache.class);
+	//private static final Logger log = LoggerFactory.getLogger(KafkaConsumerCache.class);
+}
\ No newline at end of file
diff --git a/src/main/java/com/att/nsa/cambria/backends/kafka/KafkaPublisher.java b/src/main/java/com/att/nsa/cambria/backends/kafka/KafkaPublisher.java
new file mode 100644
index 0000000..42a6bb9
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/backends/kafka/KafkaPublisher.java
@@ -0,0 +1,168 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.backends.kafka;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Properties;
+
+import kafka.common.FailedToSendMessageException;
+import kafka.javaapi.producer.Producer;
+import kafka.producer.KeyedMessage;
+import kafka.producer.ProducerConfig;
+
+import org.json.JSONException;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.springframework.beans.factory.annotation.Qualifier;
+
+import com.att.nsa.cambria.backends.Publisher;
+import com.att.nsa.cambria.constants.CambriaConstants;
+import com.att.nsa.drumlin.till.nv.rrNvReadable;
+
+/**
+ * Sends raw JSON objects into Kafka.
+ * 
+ * Could improve space: BSON rather than JSON?
+ * 
+ * @author author
+ *
+ */
+
+public class KafkaPublisher implements Publisher {
+	/**
+	 * constructor initializing
+	 * 
+	 * @param settings
+	 * @throws rrNvReadable.missingReqdSetting
+	 */
+	public KafkaPublisher(@Qualifier("propertyReader") rrNvReadable settings) throws rrNvReadable.missingReqdSetting {
+		//fSettings = settings;
+
+		final Properties props = new Properties();
+		/*transferSetting(fSettings, props, "metadata.broker.list", "localhost:9092");
+		transferSetting(fSettings, props, "request.required.acks", "1");
+		transferSetting(fSettings, props, "message.send.max.retries", "5");
+		transferSetting(fSettings, props, "retry.backoff.ms", "150"); */
+		String kafkaConnUrl= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"kafka.metadata.broker.list"); 
+		System.out.println("kafkaConnUrl:- "+kafkaConnUrl);
+		if(null==kafkaConnUrl){ 
+ 
+			kafkaConnUrl="localhost:9092"; 
+		}		
+		transferSetting( props, "metadata.broker.list", kafkaConnUrl);
+		transferSetting( props, "request.required.acks", "1");
+		transferSetting( props, "message.send.max.retries", "5");
+		transferSetting(props, "retry.backoff.ms", "150"); 
+
+		props.put("serializer.class", "kafka.serializer.StringEncoder");
+
+		fConfig = new ProducerConfig(props);
+		fProducer = new Producer<String, String>(fConfig);
+	}
+
+	/**
+	 * Send a message with a given topic and key.
+	 * 
+	 * @param msg
+	 * @throws FailedToSendMessageException
+	 * @throws JSONException
+	 */
+	@Override
+	public void sendMessage(String topic, message msg) throws IOException, FailedToSendMessageException {
+		final List<message> msgs = new LinkedList<message>();
+		msgs.add(msg);
+		sendMessages(topic, msgs);
+	}
+
+	/**
+	 * method publishing batch messages
+	 * 
+	 * @param topic
+	 * @param kms
+	 * throws IOException
+	 */
+	public void sendBatchMessage(String topic, ArrayList<KeyedMessage<String, String>> kms) throws IOException {
+		try {
+			fProducer.send(kms);
+
+		} catch (FailedToSendMessageException excp) { 
+			log.error("Failed to send message(s) to topic [" + topic + "].", excp);
+			throw new FailedToSendMessageException(excp.getMessage(), excp);
+		}
+
+	}
+
+	/**
+	 * Send a set of messages. Each must have a "key" string value.
+	 * 
+	 * @param topic
+	 * @param msg
+	 * @throws FailedToSendMessageException
+	 * @throws JSONException
+	 */
+	@Override
+	public void sendMessages(String topic, List<? extends message> msgs)
+			throws IOException, FailedToSendMessageException {
+		log.info("sending " + msgs.size() + " events to [" + topic + "]");
+
+		final List<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>(msgs.size());
+		for (message o : msgs) {
+			final KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, o.getKey(), o.toString());
+			kms.add(data);
+		}
+		try {
+			fProducer.send(kms);
+
+		} catch (FailedToSendMessageException excp) {
+			log.error("Failed to send message(s) to topic [" + topic + "].", excp);
+			throw new FailedToSendMessageException(excp.getMessage(), excp);
+		}
+	}
+
+	//private final rrNvReadable fSettings;
+
+	private ProducerConfig fConfig;
+	private Producer<String, String> fProducer;
+
+  /**
+   * It sets the key value pair
+   * @param topic
+   * @param msg 
+   * @param key
+   * @param defVal
+   */
+	private void transferSetting(Properties props, String key, String defVal) {
+		String kafka_prop= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"kafka." + key);
+		if (null==kafka_prop) kafka_prop=defVal;
+		//props.put(key, settings.getString("kafka." + key, defVal));
+		props.put(key, kafka_prop);
+	}
+
+	//private static final Logger log = LoggerFactory.getLogger(KafkaPublisher.class);
+
+	private static final EELFLogger log = EELFManager.getInstance().getLogger(KafkaPublisher.class);
+}
diff --git a/src/main/java/com/att/nsa/cambria/backends/memory/MemoryConsumerFactory.java b/src/main/java/com/att/nsa/cambria/backends/memory/MemoryConsumerFactory.java
new file mode 100644
index 0000000..f0982a9
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/backends/memory/MemoryConsumerFactory.java
@@ -0,0 +1,160 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.backends.memory;
+
+import java.util.ArrayList;
+import java.util.Collection;
+
+import com.att.nsa.cambria.backends.Consumer;
+import com.att.nsa.cambria.backends.ConsumerFactory;
+/**
+ * 
+ * @author author
+ *
+ */
+public class MemoryConsumerFactory implements ConsumerFactory
+{
+	/**
+	 * 
+	 * Initializing constructor
+	 * @param q
+	 */
+	public MemoryConsumerFactory ( MemoryQueue q )
+	{
+		fQueue = q;
+	}
+
+	/**
+	 * 
+	 * @param topic
+	 * @param consumerGroupId
+	 * @param clientId
+	 * @param timeoutMs
+	 * @return Consumer
+	 */
+	@Override
+	public Consumer getConsumerFor ( String topic, String consumerGroupId, String clientId, int timeoutMs )
+	{
+		return new MemoryConsumer ( topic, consumerGroupId );
+	}
+
+	private final MemoryQueue fQueue;
+
+	/**
+	 * 
+	 * Define nested inner class
+	 *
+	 */
+	private class MemoryConsumer implements Consumer
+	{
+		/**
+		 * 
+		 * Initializing MemoryConsumer constructor 
+		 * @param topic
+		 * @param consumer
+		 * 
+		 */
+		public MemoryConsumer ( String topic, String consumer )
+		{
+			fTopic = topic;
+			fConsumer = consumer;
+			fCreateMs = System.currentTimeMillis ();
+			fLastAccessMs = fCreateMs;
+		}
+
+		@Override
+		/**
+		 * 
+		 * return consumer details  
+		 */
+		public Message nextMessage ()
+		{
+			return fQueue.get ( fTopic, fConsumer );
+		}
+
+		private final String fTopic;
+		private final String fConsumer;
+		private final long fCreateMs;
+		private long fLastAccessMs;
+
+		@Override
+		public void close() {
+			//Nothing to close/clean up.
+		}
+		/**
+		 * 
+		 */
+		public void commitOffsets()
+		{
+			// ignoring this aspect
+		}
+		/**
+		 * get offset
+		 */
+		public long getOffset()
+		{
+			return 0;
+		}
+
+		@Override
+		/**
+		 * get consumer topic name
+		 */
+		public String getName ()
+		{
+			return fTopic + "/" + fConsumer;
+		}
+
+		@Override
+		public long getCreateTimeMs ()
+		{
+			return fCreateMs;
+		}
+
+		@Override
+		public long getLastAccessMs ()
+		{
+			return fLastAccessMs;
+		}
+	}
+
+	@Override
+	public void destroyConsumer(String topic, String consumerGroupId,
+			String clientId) {
+		//No cache for memory consumers, so NOOP
+	}
+
+	@Override
+	public void dropCache ()
+	{
+		// nothing to do - there's no cache here
+	}
+
+	@Override
+	/**
+	 * @return ArrayList<MemoryConsumer>
+	 */
+	public Collection<? extends Consumer> getConsumers ()
+	{
+		return new ArrayList<MemoryConsumer> ();
+	}
+}
diff --git a/src/main/java/com/att/nsa/cambria/backends/memory/MemoryMetaBroker.java b/src/main/java/com/att/nsa/cambria/backends/memory/MemoryMetaBroker.java
new file mode 100644
index 0000000..87e59c2
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/backends/memory/MemoryMetaBroker.java
@@ -0,0 +1,199 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.backends.memory;
+
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Set;
+import java.util.TreeSet;
+
+import com.att.nsa.cambria.metabroker.Broker;
+import com.att.nsa.cambria.metabroker.Topic;
+import com.att.nsa.configs.ConfigDb;
+import com.att.nsa.drumlin.till.nv.rrNvReadable;
+import com.att.nsa.security.NsaAcl;
+import com.att.nsa.security.NsaApiKey;
+
+/**
+ * 
+ * @author author
+ *
+ */
+public class MemoryMetaBroker implements Broker {
+	/**
+	 * 
+	 * @param mq
+	 * @param configDb
+	 * @param settings
+	 */
+	public MemoryMetaBroker(MemoryQueue mq, ConfigDb configDb) {
+	//public MemoryMetaBroker(MemoryQueue mq, ConfigDb configDb, rrNvReadable settings) {
+		fQueue = mq;
+		fTopics = new HashMap<String, MemTopic>();
+	}
+
+	@Override
+	public List<Topic> getAllTopics() {
+		return new LinkedList<Topic>(fTopics.values());
+	}
+
+	@Override
+	public Topic getTopic(String topic) {
+		return fTopics.get(topic);
+	}
+
+	@Override
+	public Topic createTopic(String topic, String desc, String ownerApiId, int partitions, int replicas,
+			boolean transactionEnabled) throws TopicExistsException {
+		if (getTopic(topic) != null) {
+			throw new TopicExistsException(topic);
+		}
+		fQueue.createTopic(topic);
+		fTopics.put(topic, new MemTopic(topic, desc, ownerApiId, transactionEnabled));
+		return getTopic(topic);
+	}
+
+	@Override
+	public void deleteTopic(String topic) {
+		fTopics.remove(topic);
+		fQueue.removeTopic(topic);
+	}
+
+	private final MemoryQueue fQueue;
+	private final HashMap<String, MemTopic> fTopics;
+
+	private static class MemTopic implements Topic {
+		/**
+		 * constructor initialization
+		 * 
+		 * @param name
+		 * @param desc
+		 * @param owner
+		 * @param transactionEnabled
+		 */
+		public MemTopic(String name, String desc, String owner, boolean transactionEnabled) {
+			fName = name;
+			fDesc = desc;
+			fOwner = owner;
+			ftransactionEnabled = transactionEnabled;
+			fReaders = null;
+			fWriters = null;
+		}
+
+		@Override
+		public String getOwner() {
+			return fOwner;
+		}
+
+		@Override
+		public NsaAcl getReaderAcl() {
+			return fReaders;
+		}
+
+		@Override
+		public NsaAcl getWriterAcl() {
+			return fWriters;
+		}
+
+		@Override
+		public void checkUserRead(NsaApiKey user) throws AccessDeniedException {
+			if (fReaders != null && (user == null || !fReaders.canUser(user.getKey()))) {
+				throw new AccessDeniedException(user == null ? "" : user.getKey());
+			}
+		}
+
+		@Override
+		public void checkUserWrite(NsaApiKey user) throws AccessDeniedException {
+			if (fWriters != null && (user == null || !fWriters.canUser(user.getKey()))) {
+				throw new AccessDeniedException(user == null ? "" : user.getKey());
+			}
+		}
+
+		@Override
+		public String getName() {
+			return fName;
+		}
+
+		@Override
+		public String getDescription() {
+			return fDesc;
+		}
+
+		@Override
+		public void permitWritesFromUser(String publisherId, NsaApiKey asUser) throws AccessDeniedException {
+			if (!fOwner.equals(asUser.getKey())) {
+				throw new AccessDeniedException("User does not own this topic " + fName);
+			}
+			if (fWriters == null) {
+				fWriters = new NsaAcl();
+			}
+			fWriters.add(publisherId);
+		}
+
+		@Override
+		public void denyWritesFromUser(String publisherId, NsaApiKey asUser) throws AccessDeniedException {
+			if (!fOwner.equals(asUser.getKey())) {
+				throw new AccessDeniedException("User does not own this topic " + fName);
+			}
+			fWriters.remove(publisherId);
+		}
+
+		@Override
+		public void permitReadsByUser(String consumerId, NsaApiKey asUser) throws AccessDeniedException {
+			if (!fOwner.equals(asUser.getKey())) {
+				throw new AccessDeniedException("User does not own this topic " + fName);
+			}
+			if (fReaders == null) {
+				fReaders = new NsaAcl();
+			}
+			fReaders.add(consumerId);
+		}
+
+		@Override
+		public void denyReadsByUser(String consumerId, NsaApiKey asUser) throws AccessDeniedException {
+			if (!fOwner.equals(asUser.getKey())) {
+				throw new AccessDeniedException("User does not own this topic " + fName);
+			}
+			fReaders.remove(consumerId);
+		}
+
+		private final String fName;
+		private final String fDesc;
+		private final String fOwner;
+		private NsaAcl fReaders;
+		private NsaAcl fWriters;
+		private boolean ftransactionEnabled;
+
+		@Override
+		public boolean isTransactionEnabled() {
+			return ftransactionEnabled;
+		}
+
+		@Override
+		public Set<String> getOwners() {
+			final TreeSet<String> set = new TreeSet<String> ();
+			set.add ( fOwner );
+			return set;
+		}
+	}
+}
diff --git a/src/main/java/com/att/nsa/cambria/backends/memory/MemoryQueue.java b/src/main/java/com/att/nsa/cambria/backends/memory/MemoryQueue.java
new file mode 100644
index 0000000..a0dc8b8
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/backends/memory/MemoryQueue.java
@@ -0,0 +1,207 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.backends.memory;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+
+import com.att.nsa.cambria.backends.Consumer;
+import com.att.nsa.cambria.backends.Publisher.message;
+
+/**
+ * When broker type is memory, then this class is doing all the topic related
+ * operations
+ * 
+ * @author author
+ *
+ */
+public class MemoryQueue {
+	// map from topic to list of msgs
+	private HashMap<String, LogBuffer> fQueue;
+	private HashMap<String, HashMap<String, Integer>> fOffsets;
+
+	/**
+	 * constructor storing hashMap objects in Queue and Offsets object
+	 */
+	public MemoryQueue() {
+		fQueue = new HashMap<String, LogBuffer>();
+		fOffsets = new HashMap<String, HashMap<String, Integer>>();
+	}
+
+	/**
+	 * method used to create topic
+	 * 
+	 * @param topic
+	 */
+	public synchronized void createTopic(String topic) {
+		LogBuffer q = fQueue.get(topic);
+		if (q == null) {
+			q = new LogBuffer(1024 * 1024);
+			fQueue.put(topic, q);
+		}
+	}
+
+	/**
+	 * method used to remove topic
+	 * 
+	 * @param topic
+	 */
+	public synchronized void removeTopic(String topic) {
+		LogBuffer q = fQueue.get(topic);
+		if (q != null) {
+			fQueue.remove(topic);
+		}
+	}
+
+	/**
+	 * method to write message on topic
+	 * 
+	 * @param topic
+	 * @param m
+	 */
+	public synchronized void put(String topic, message m) {
+		LogBuffer q = fQueue.get(topic);
+		if (q == null) {
+			createTopic(topic);
+			q = fQueue.get(topic);
+		}
+		q.push(m.getMessage());
+	}
+
+	/**
+	 * method to read consumer messages
+	 * 
+	 * @param topic
+	 * @param consumerName
+	 * @return
+	 */
+	public synchronized Consumer.Message get(String topic, String consumerName) {
+		final LogBuffer q = fQueue.get(topic);
+		if (q == null) {
+			return null;
+		}
+
+		HashMap<String, Integer> offsetMap = fOffsets.get(consumerName);
+		if (offsetMap == null) {
+			offsetMap = new HashMap<String, Integer>();
+			fOffsets.put(consumerName, offsetMap);
+		}
+		Integer offset = offsetMap.get(topic);
+		if (offset == null) {
+			offset = 0;
+		}
+
+		final msgInfo result = q.read(offset);
+		if (result != null && result.msg != null) {
+			offsetMap.put(topic, result.offset + 1);
+		}
+		return result;
+	}
+
+	/**
+	 * static inner class used to details about consumed messages
+	 * 
+	 * @author author
+	 *
+	 */
+	private static class msgInfo implements Consumer.Message {
+		/**
+		 * published message which is consumed
+		 */
+		public String msg;
+		/**
+		 * offset associated with message
+		 */
+		public int offset;
+
+		/**
+		 * get offset of messages
+		 */
+		@Override
+		public long getOffset() {
+			return offset;
+		}
+
+		/**
+		 * get consumed message
+		 */
+		@Override
+		public String getMessage() {
+			return msg;
+		}
+	}
+
+ /**
+ * 
+ * @author author
+ *
+ * private LogBuffer class has synchronized push and read method
+ */
+	private class LogBuffer {
+		private int fBaseOffset;
+		private final int fMaxSize;
+		private final ArrayList<String> fList;
+
+		/**
+		 * constructor initializing the offset, maxsize and list
+		 * 
+		 * @param maxSize
+		 */
+		public LogBuffer(int maxSize) {
+			fBaseOffset = 0;
+			fMaxSize = maxSize;
+			fList = new ArrayList<String>();
+		}
+
+		/**
+		 * pushing message
+		 * 
+		 * @param msg
+		 */
+		public synchronized void push(String msg) {
+			fList.add(msg);
+			while (fList.size() > fMaxSize) {
+				fList.remove(0);
+				fBaseOffset++;
+			}
+		}
+
+		/**
+		 * reading messages
+		 * 
+		 * @param offset
+		 * @return
+		 */
+		public synchronized msgInfo read(int offset) {
+			final int actual = Math.max(0, offset - fBaseOffset);
+
+			final msgInfo mi = new msgInfo();
+			mi.msg = (actual >= fList.size()) ? null : fList.get(actual);
+			if (mi.msg == null)
+				return null;
+
+			mi.offset = actual + fBaseOffset;
+			return mi;
+		}
+
+	}
+}
diff --git a/src/main/java/com/att/nsa/cambria/backends/memory/MemoryQueuePublisher.java b/src/main/java/com/att/nsa/cambria/backends/memory/MemoryQueuePublisher.java
new file mode 100644
index 0000000..d653f6e
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/backends/memory/MemoryQueuePublisher.java
@@ -0,0 +1,90 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.backends.memory;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import com.att.nsa.cambria.backends.Publisher;
+import com.att.nsa.cambria.metabroker.Broker.TopicExistsException;
+
+import kafka.producer.KeyedMessage;
+
+/**
+ * 
+ * @author author
+ *
+ */
+public class MemoryQueuePublisher implements Publisher {
+	/**
+	 * 
+	 * @param q
+	 * @param b
+	 */
+	public MemoryQueuePublisher(MemoryQueue q, MemoryMetaBroker b) {
+		fBroker = b;
+		fQueue = q;
+	}
+
+	/**
+	 * sendBatchMessages
+	 * 
+	 * @param topic
+	 * @param kms
+	 */
+	public void sendBatchMessage(String topic, ArrayList<KeyedMessage<String, String>> kms) throws IOException {
+	}
+	
+	/**
+	 * 
+	 * @param topic
+	 * @param msg
+	 * @throws IOException
+	 */
+	@Override
+	public void sendMessage(String topic, message msg) throws IOException {
+		if (null == fBroker.getTopic(topic)) {
+			try {
+				fBroker.createTopic(topic, topic, null, 8, 3, false);
+			} catch (TopicExistsException e) {
+				throw new RuntimeException(e);
+			}
+		}
+		fQueue.put(topic, msg);
+	}
+
+	@Override
+	/**
+	 * @param topic
+	 * @param msgs
+	 * @throws IOException
+	 */
+	public void sendMessages(String topic, List<? extends message> msgs) throws IOException {
+		for (message m : msgs) {
+			sendMessage(topic, m);
+		}
+	}
+
+	private final MemoryMetaBroker fBroker;
+	private final MemoryQueue fQueue;
+}
diff --git a/src/main/java/com/att/nsa/cambria/backends/memory/MessageDropper.java b/src/main/java/com/att/nsa/cambria/backends/memory/MessageDropper.java
new file mode 100644
index 0000000..c49ac4f
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/backends/memory/MessageDropper.java
@@ -0,0 +1,61 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.backends.memory;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import com.att.nsa.cambria.backends.Publisher;
+
+import kafka.producer.KeyedMessage;
+
+/**
+ * class is used to message publishing
+ * 
+ * @author author
+ *
+ */
+public class MessageDropper implements Publisher {
+	/**
+	 * publish single messages
+	 * param topic
+	 * param msg
+	 */
+	@Override
+	public void sendMessage(String topic, message msg) throws IOException {
+	}
+
+	/**
+	 * publish multiple messages
+	 */
+	@Override
+	public void sendMessages(String topic, List<? extends message> msgs) throws IOException {
+	}
+
+	/**
+	 * publish batch messages
+	 */
+	@Override
+	public void sendBatchMessage(String topic, ArrayList<KeyedMessage<String, String>> kms) throws IOException {
+	}
+}
diff --git a/src/main/java/com/att/nsa/cambria/backends/memory/MessageLogger.java b/src/main/java/com/att/nsa/cambria/backends/memory/MessageLogger.java
new file mode 100644
index 0000000..9ff8bd6
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/backends/memory/MessageLogger.java
@@ -0,0 +1,101 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.backends.memory;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import com.att.nsa.cambria.backends.Publisher;
+
+import kafka.producer.KeyedMessage;
+
+/**
+ * class used for logging perspective
+ * 
+ * @author author
+ *
+ */
+public class MessageLogger implements Publisher {
+	public MessageLogger() {
+	}
+
+	public void setFile(File f) throws FileNotFoundException {
+		fStream = new FileOutputStream(f, true);
+	}
+
+	/** 
+	 * 
+	 * @param topic
+	 * @param msg
+	 * @throws IOException
+	 */
+	@Override
+	public void sendMessage(String topic, message msg) throws IOException {
+		logMsg(msg);
+	}
+
+	/**
+	 * @param topic
+	 * @param msgs
+	 * @throws IOException
+	 */
+	@Override
+	public void sendMessages(String topic, List<? extends message> msgs) throws IOException {
+		for (message m : msgs) {
+			logMsg(m);
+		}
+	}
+
+	/**
+	 * @param topic
+	 * @param kms
+	 * @throws IOException
+	 */
+	@Override
+	public void sendBatchMessage(String topic, ArrayList<KeyedMessage<String, String>> kms) throws
+
+	IOException {
+	}
+
+	private FileOutputStream fStream;
+
+	/**
+	 * 
+	 * @param msg
+	 * @throws IOException
+	 */
+	private void logMsg(message msg) throws IOException {
+		String key = msg.getKey();
+		if (key == null)
+			key = "<none>";
+
+		fStream.write('[');
+		fStream.write(key.getBytes());
+		fStream.write("] ".getBytes());
+		fStream.write(msg.getMessage().getBytes());
+		fStream.write('\n');
+	}
+}
diff --git a/src/main/java/com/att/nsa/cambria/beans/ApiKeyBean.java b/src/main/java/com/att/nsa/cambria/beans/ApiKeyBean.java
new file mode 100644
index 0000000..df4a2ed
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/beans/ApiKeyBean.java
@@ -0,0 +1,88 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.beans;
+
+import java.io.Serializable;
+
+import javax.xml.bind.annotation.XmlRootElement;
+
+import com.att.nsa.drumlin.till.data.uniqueStringGenerator;
+/**
+ * 
+ * @author author
+ *
+ */
+@XmlRootElement
+public class ApiKeyBean implements Serializable {
+
+	private static final long serialVersionUID = -8219849086890567740L;
+	
+	private static final String KEY_CHARS = "ABCDEFGHJIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
+	
+	private String email;
+	private String description;
+	/**
+	 * constructor
+	 */
+	public ApiKeyBean() {
+		super();
+	}
+/**
+ * 
+ * @param email
+ * @param description
+ */
+	public ApiKeyBean(String email, String description) {
+		super();
+		this.email = email;
+		this.description = description;
+	}
+
+	public String getEmail() {
+		return email;
+	}
+
+	public void setEmail(String email) {
+		this.email = email;
+	}
+
+	public String getDescription() {
+		return description;
+	}
+
+	public void setDescription(String description) {
+		this.description = description;
+	}
+
+	public String getKey() {
+		return generateKey(16);
+	}
+
+	public String getSharedSecret() {
+		return generateKey(24);
+	}
+	
+	private static String generateKey ( int length  ) {
+		return uniqueStringGenerator.createKeyUsingAlphabet ( KEY_CHARS, length );
+	}
+	
+}
diff --git a/src/main/java/com/att/nsa/cambria/beans/DMaaPCambriaLimiter.java b/src/main/java/com/att/nsa/cambria/beans/DMaaPCambriaLimiter.java
new file mode 100644
index 0000000..1b609b0
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/beans/DMaaPCambriaLimiter.java
@@ -0,0 +1,227 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.beans;
+
+import java.util.HashMap;
+import java.util.concurrent.TimeUnit;
+
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.stereotype.Component;
+
+import com.att.nsa.cambria.CambriaApiException;
+import com.att.nsa.cambria.constants.CambriaConstants;
+import com.att.nsa.cambria.exception.DMaaPResponseCode;
+import com.att.nsa.cambria.exception.ErrorResponse;
+import com.att.nsa.drumlin.service.standards.HttpStatusCodes;
+import com.att.nsa.drumlin.till.nv.rrNvReadable;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.invalidSettingValue;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
+import com.att.nsa.metrics.impl.CdmRateTicker;
+
+/**
+ * class provide rate information
+ * 
+ * @author author
+ *
+ */
+@Component
+public class DMaaPCambriaLimiter {
+	/**
+	 * constructor initializes
+	 * 
+	 * @param settings
+	 * @throws missingReqdSetting
+	 * @throws invalidSettingValue
+	 */
+	@Autowired
+	public DMaaPCambriaLimiter(@Qualifier("propertyReader") rrNvReadable settings)
+			throws missingReqdSetting, invalidSettingValue {
+		fRateInfo = new HashMap<String, RateInfo>();
+		fMaxEmptyPollsPerMinute = settings.getDouble(CambriaConstants.kSetting_MaxEmptyPollsPerMinute,
+				CambriaConstants.kDefault_MaxEmptyPollsPerMinute);
+		fWindowLengthMins = settings.getInt(CambriaConstants.kSetting_RateLimitWindowLength,
+				CambriaConstants.kDefault_RateLimitWindowLength);
+		fSleepMs = settings.getLong(CambriaConstants.kSetting_MaxEmptyPollsPerMinute,
+				CambriaConstants.kDefault_SleepMsOnRateLimit);
+	}
+
+	/**
+	 * static method provide the sleep time
+	 * 
+	 * @param ratePerMinute
+	 * @return
+	 */
+	public static long getSleepMsForRate(double ratePerMinute) {
+		if (ratePerMinute <= 0.0)
+			return 0;
+		return Math.max(1000, Math.round(60 * 1000 / ratePerMinute));
+	}
+
+	/**
+	 * Construct a rate limiter.
+	 * 
+	 * @param maxEmptyPollsPerMinute
+	 *            Pass <= 0 to deactivate rate limiting.
+	 *            @param windowLengthMins
+	 */
+	public DMaaPCambriaLimiter(double maxEmptyPollsPerMinute, int windowLengthMins) {
+		this(maxEmptyPollsPerMinute, windowLengthMins, getSleepMsForRate(maxEmptyPollsPerMinute));
+	}
+
+	/**
+	 * Construct a rate limiter
+	 * 
+	 * @param maxEmptyPollsPerMinute
+	 *            Pass <= 0 to deactivate rate limiting.
+	 * @param sleepMs
+	 * @param windowLengthMins
+	 */
+	public DMaaPCambriaLimiter(double maxEmptyPollsPerMinute, int windowLengthMins, long sleepMs) {
+		fRateInfo = new HashMap<String, RateInfo>();
+		fMaxEmptyPollsPerMinute = Math.max(0, maxEmptyPollsPerMinute);
+		fWindowLengthMins = windowLengthMins;
+		fSleepMs = Math.max(0, sleepMs);
+	}
+
+	/**
+	 * Tell the rate limiter about a call to a topic/group/id. If the rate is
+	 * too high, this call delays its return and throws an exception.
+	 * 
+	 * @param topic
+	 * @param consumerGroup
+	 * @param clientId
+	 * @throws CambriaApiException
+	 */
+	public void onCall(String topic, String consumerGroup, String clientId) throws CambriaApiException {
+		// do nothing if rate is configured 0 or less
+		if (fMaxEmptyPollsPerMinute <= 0) {
+			return;
+		}
+
+		// setup rate info for this tuple
+		final RateInfo ri = getRateInfo(topic, consumerGroup, clientId);
+
+		final double rate = ri.onCall();
+		log.info(ri.getLabel() + ": " + rate + " empty replies/minute.");
+
+		if (rate > fMaxEmptyPollsPerMinute) {
+			try {
+				log.warn(ri.getLabel() + ": " + rate + " empty replies/minute, limit is " + fMaxEmptyPollsPerMinute
+						+ ".");
+				if (fSleepMs > 0) {
+					log.warn(ri.getLabel() + ": " + "Slowing response with " + fSleepMs
+							+ " ms sleep, then responding in error.");
+					Thread.sleep(fSleepMs);
+				} else {
+					log.info(ri.getLabel() + ": " + "No sleep configured, just throwing error.");
+				}
+			} catch (InterruptedException e) {
+				// ignore
+			}
+			ErrorResponse errRes = new ErrorResponse(HttpStatusCodes.k429_tooManyRequests, 
+					DMaaPResponseCode.TOO_MANY_REQUESTS.getResponseCode(), 
+					"This client is making too many requests. Please use a long poll "
+							+ "setting to decrease the number of requests that result in empty responses. ");
+			log.info(errRes.toString());
+			throw new CambriaApiException(errRes);
+		}
+	}
+
+	/**
+	 * 
+	 * @param topic
+	 * @param consumerGroup
+	 * @param clientId
+	 * @param sentCount
+	 */
+	public void onSend(String topic, String consumerGroup, String clientId, long sentCount) {
+		// check for good replies
+		if (sentCount > 0) {
+			// that was a good send, reset the metric
+			getRateInfo(topic, consumerGroup, clientId).reset();
+		}
+	}
+
+	private static class RateInfo {
+		/**
+		 * constructor initialzes
+		 * 
+		 * @param label
+		 * @param windowLengthMinutes
+		 */
+		public RateInfo(String label, int windowLengthMinutes) {
+			fLabel = label;
+			fCallRateSinceLastMsgSend = new CdmRateTicker("Call rate since last msg send", 1, TimeUnit.MINUTES,
+					windowLengthMinutes, TimeUnit.MINUTES);
+		}
+
+		public String getLabel() {
+			return fLabel;
+		}
+
+		/**
+		 * CdmRateTicker is reset
+		 */
+		public void reset() {
+			fCallRateSinceLastMsgSend.reset();
+		}
+
+		/**
+		 * 
+		 * @return
+		 */
+		public double onCall() {
+			fCallRateSinceLastMsgSend.tick();
+			return fCallRateSinceLastMsgSend.getRate();
+		}
+
+		private final String fLabel;
+		private final CdmRateTicker fCallRateSinceLastMsgSend;
+	}
+
+	private final HashMap<String, RateInfo> fRateInfo;
+	private final double fMaxEmptyPollsPerMinute;
+	private final int fWindowLengthMins;
+	private final long fSleepMs;
+	//private static final Logger log = LoggerFactory.getLogger(DMaaPCambriaLimiter.class);
+	private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPCambriaLimiter.class);
+	private RateInfo getRateInfo(String topic, String consumerGroup, String clientId) {
+		final String key = makeKey(topic, consumerGroup, clientId);
+		RateInfo ri = fRateInfo.get(key);
+		if (ri == null) {
+			ri = new RateInfo(key, fWindowLengthMins);
+			fRateInfo.put(key, ri);
+		}
+		return ri;
+	}
+
+	private String makeKey(String topic, String group, String id) {
+		return topic + "::" + group + "::" + id;
+	}
+}
diff --git a/src/main/java/com/att/nsa/cambria/beans/DMaaPContext.java b/src/main/java/com/att/nsa/cambria/beans/DMaaPContext.java
new file mode 100644
index 0000000..79a8e1f
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/beans/DMaaPContext.java
@@ -0,0 +1,104 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.beans;
+
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.servlet.http.HttpSession;
+
+import com.att.nsa.cambria.utils.ConfigurationReader;
+
+/**
+ * DMaaPContext provide and maintain all the configurations , Http request/response
+ * Session and consumer Request Time
+ * @author author
+ *
+ */
+public class DMaaPContext {
+
+    private ConfigurationReader configReader;
+    private HttpServletRequest request;
+    private HttpServletResponse response;
+    private HttpSession session;
+    private String consumerRequestTime;
+    static int i=0;
+    
+    public synchronized static long getBatchID() {
+    	try{
+    		final long metricsSendTime = System.currentTimeMillis();
+    		final Date d = new Date(metricsSendTime);
+    		final String text = new SimpleDateFormat("ddMMyyyyHHmmss").format(d);
+    		long dt= Long.valueOf(text)+i;
+    		i++;
+    		return dt;
+    	}
+    	catch(NumberFormatException ex){
+    		return 0;
+    	}
+    }
+    
+    public HttpServletRequest getRequest() {
+        return request;
+    }
+
+    public void setRequest(HttpServletRequest request) {
+        this.request = request;
+    }
+
+    public HttpServletResponse getResponse() {
+        return response;
+    }
+
+    public void setResponse(HttpServletResponse response) {
+        this.response = response;
+    }
+
+    public HttpSession getSession() {
+        this.session = request.getSession();
+        return session;
+    }
+
+    public void setSession(HttpSession session) {
+        this.session = session;
+    }
+
+    public ConfigurationReader getConfigReader() {
+        return configReader;
+    }
+
+    public void setConfigReader(ConfigurationReader configReader) {
+        this.configReader = configReader;
+    }
+
+    public String getConsumerRequestTime() {
+        return consumerRequestTime;
+    }
+
+    public void setConsumerRequestTime(String consumerRequestTime) {
+        this.consumerRequestTime = consumerRequestTime;
+    }
+    
+    
+}
diff --git a/src/main/java/com/att/nsa/cambria/beans/DMaaPKafkaConsumerFactory.java b/src/main/java/com/att/nsa/cambria/beans/DMaaPKafkaConsumerFactory.java
new file mode 100644
index 0000000..28d48fa
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/beans/DMaaPKafkaConsumerFactory.java
@@ -0,0 +1,319 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.beans;
+
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Properties;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.recipes.locks.InterProcessMutex;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.springframework.beans.factory.annotation.Qualifier;
+
+import com.att.nsa.cambria.backends.Consumer;
+import com.att.nsa.cambria.backends.ConsumerFactory;
+import com.att.nsa.cambria.backends.MetricsSet;
+import com.att.nsa.cambria.backends.kafka.KafkaConsumer;
+import com.att.nsa.cambria.backends.kafka.KafkaConsumerCache;
+import com.att.nsa.cambria.backends.kafka.KafkaConsumerCache.KafkaConsumerCacheException;
+import com.att.nsa.cambria.constants.CambriaConstants;
+import com.att.nsa.cambria.utils.ConfigurationReader;
+import com.att.nsa.drumlin.till.nv.rrNvReadable;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+import kafka.consumer.ConsumerConfig;
+import kafka.javaapi.consumer.ConsumerConnector;
+
+/**
+ * @author author
+ *
+ */
+public class DMaaPKafkaConsumerFactory implements ConsumerFactory {
+
+	//private static final Logger log = LoggerFactory			.getLogger(DMaaPKafkaConsumerFactory.class);
+	private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPKafkaConsumerFactory.class);
+	/**
+	 * constructor initialization
+	 * 
+	 * @param settings
+	 * @param metrics
+	 * @param curator
+	 * @throws missingReqdSetting
+	 * @throws KafkaConsumerCacheException
+	 * @throws UnknownHostException
+	 */
+	public DMaaPKafkaConsumerFactory(
+			@Qualifier("propertyReader") rrNvReadable settings,
+			@Qualifier("dMaaPMetricsSet") MetricsSet metrics,
+			@Qualifier("curator") CuratorFramework curator)
+			throws missingReqdSetting, KafkaConsumerCacheException,
+			UnknownHostException {
+		/*final String apiNodeId = settings.getString(
+				CambriaConstants.kSetting_ApiNodeIdentifier,
+				InetAddress.getLocalHost().getCanonicalHostName()
+						+ ":"
+						+ settings.getInt(CambriaConstants.kSetting_Port,
+								CambriaConstants.kDefault_Port));*/
+		 String apiNodeId = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+		CambriaConstants.kSetting_ApiNodeIdentifier);
+		if (apiNodeId == null){
+			
+			apiNodeId=InetAddress.getLocalHost().getCanonicalHostName()
+			+ ":"
+			+ settings.getInt(CambriaConstants.kSetting_Port,
+					CambriaConstants.kDefault_Port);
+		}
+		
+		log.info("This Cambria API Node identifies itself as [" + apiNodeId
+				+ "].");
+		final String mode = CambriaConstants.DMAAP;
+		/*fSettings = settings;
+		fZooKeeper = fSettings.getString(kSettings_KafkaZookeeper, settings
+				.getString(CambriaConstants.kSetting_ZkConfigDbServers,
+						CambriaConstants.kDefault_ZkConfigDbServers));*/
+
+		String strkSettings_KafkaZookeeper = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSettings_KafkaZookeeper);
+		if(null==strkSettings_KafkaZookeeper){
+			 strkSettings_KafkaZookeeper = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kSetting_ZkConfigDbServers);
+			if (null==strkSettings_KafkaZookeeper) strkSettings_KafkaZookeeper = CambriaConstants.kDefault_ZkConfigDbServers;
+			
+		}
+		fZooKeeper=  strkSettings_KafkaZookeeper;
+		
+		//final boolean isCacheEnabled = fSettings.getBoolean(
+			//	kSetting_EnableCache, kDefault_IsCacheEnabled);
+		boolean kSetting_EnableCache= kDefault_IsCacheEnabled;
+		String strkSetting_EnableCache = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_EnableCache+"");
+		if(null!=strkSetting_EnableCache)kSetting_EnableCache=Boolean.parseBoolean(strkSetting_EnableCache);
+			
+				final boolean isCacheEnabled = kSetting_EnableCache;
+				
+				
+		fCache = (isCacheEnabled) ? new KafkaConsumerCache(apiNodeId, 
+				metrics) : null;
+		if (fCache != null) {
+			fCache.startCache(mode, curator);
+		}
+	}
+
+	@Override
+	public Consumer getConsumerFor(String topic, String consumerGroupName,
+			String consumerId, int timeoutMs) throws UnavailableException {
+		KafkaConsumer kc;
+
+		try {
+			kc = (fCache != null) ? fCache.getConsumerFor(topic,
+					consumerGroupName, consumerId) : null;
+		} catch (KafkaConsumerCacheException e) {
+			throw new UnavailableException(e);
+		}
+
+		if (kc == null) {
+			
+			final InterProcessMutex ipLock = new InterProcessMutex( ConfigurationReader.getCurator(), "/consumerFactory/" + topic + "/" + consumerGroupName + "/" + consumerId);
+//			final InterProcessMutex fLock = new InterProcessMutex(
+//					ConfigurationReader.getCurator(), "/consumerFactory/"
+//							+ topic + "/" + consumerGroupName + "/"
+//							+ consumerId);
+			boolean locked = false;
+			try {
+			
+				locked = ipLock.acquire(30, TimeUnit.SECONDS);
+				if (!locked) {
+					// FIXME: this seems to cause trouble in some cases. This exception
+					// gets thrown routinely. Possibly a consumer trying multiple servers
+					// at once, producing a never-ending cycle of overlapping locks?
+					// The problem is that it throws and winds up sending a 503 to the
+					// client, which would be incorrect if the client is causing trouble
+					// by switching back and forth.
+					
+					throw new UnavailableException("Could not acquire lock in order to create (topic, group, consumer) = " + "(" + topic + ", " + consumerGroupName + ", " + consumerId + ")");
+				}
+				
+//				if (!fLock.acquire(30, TimeUnit.SECONDS)) {
+//					throw new UnavailableException(
+//							"Could not acquire lock in order to create (topic, group, consumer) = "
+//									+ "(" + topic + ", " + consumerGroupName
+//									+ ", " + consumerId + ")");
+//				}
+
+				fCache.signalOwnership(topic, consumerGroupName, consumerId);
+
+				log.info("Creating Kafka consumer for group ["
+						+ consumerGroupName + "], consumer [" + consumerId
+						+ "], on topic [" + topic + "].");
+
+				final String fakeGroupName = consumerGroupName + "--" + topic;
+
+				final ConsumerConfig ccc = createConsumerConfig(fakeGroupName,
+						consumerId);
+				final ConsumerConnector cc = kafka.consumer.Consumer
+						.createJavaConsumerConnector(ccc);
+				kc = new KafkaConsumer(topic, consumerGroupName, consumerId, cc);
+
+				if (fCache != null) {
+					fCache.putConsumerFor(topic, consumerGroupName, consumerId,
+							kc);
+				}
+			} catch (org.I0Itec.zkclient.exception.ZkTimeoutException x) {
+				log.warn("Kafka consumer couldn't connect to ZK.");
+				throw new UnavailableException("Couldn't connect to ZK.");
+			} catch (KafkaConsumerCacheException e) {
+				log.warn("Failed to cache consumer (this may have performance implications): "
+						+ e.getMessage());
+			} catch (Exception e) {
+				throw new UnavailableException(
+						"Error while acquiring consumer factory lock", e);
+			} finally {
+				if ( locked )
+				{
+					try {
+						ipLock.release();
+					} catch (Exception e) {
+						throw new UnavailableException("Error while releasing consumer factory lock", e);
+					}
+				}	
+			}
+		}
+
+		return kc;
+	}
+
+	@Override
+	public synchronized void destroyConsumer(String topic,
+			String consumerGroup, String clientId) {
+		if (fCache != null) {
+			fCache.dropConsumer(topic, consumerGroup, clientId);
+		}
+	}
+
+	@Override
+	public synchronized Collection<? extends Consumer> getConsumers() {
+		return fCache.getConsumers();
+	}
+
+	@Override
+	public synchronized void dropCache() {
+		fCache.dropAllConsumers();
+	}
+
+	private ConsumerConfig createConsumerConfig(String groupId,
+			String consumerId) {
+		final Properties props = new Properties();
+		props.put("zookeeper.connect", fZooKeeper);
+		props.put("group.id", groupId);
+		props.put("consumer.id", consumerId);
+		//props.put("auto.commit.enable", "false");
+		// additional settings: start with our defaults, then pull in configured
+		// overrides
+		props.putAll(KafkaInternalDefaults);
+		for (String key : KafkaConsumerKeys) {
+			transferSettingIfProvided(props, key, "kafka");
+		}
+
+		return new ConsumerConfig(props);
+	}
+
+	//private final rrNvReadable fSettings;
+	private final KafkaConsumerCache fCache;
+
+	private String fZooKeeper;
+
+	private static final String kSettings_KafkaZookeeper = "kafka.client.zookeeper";
+
+	private static final HashMap<String, String> KafkaInternalDefaults = new HashMap<String, String>();
+
+	/**
+	 * putting values in hashmap like consumer timeout, zookeeper time out, etc
+	 * 
+	 * @param setting
+	 */
+	public static void populateKafkaInternalDefaultsMap() {
+			//@Qualifier("propertyReader") rrNvReadable setting) {
+		try {
+			
+			HashMap<String, String> map1= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperties(CambriaConstants.msgRtr_prop);
+        				
+			KafkaInternalDefaults.put("consumer.timeout.ms",
+							//	AJSCPropertiesMap.get(CambriaConstants.msgRtr_prop, "consumer.timeout.ms"));
+			map1.get( "consumer.timeout.ms"));
+			
+			KafkaInternalDefaults.put("zookeeper.connection.timeout.ms",
+					//AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "zookeeper.connection.timeout.ms"));
+					map1.get("zookeeper.connection.timeout.ms"));
+			KafkaInternalDefaults.put("zookeeper.session.timeout.ms",
+					//AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "zookeeper.session.timeout.ms"));
+			map1.get("zookeeper.session.timeout.ms"));
+			KafkaInternalDefaults.put("zookeeper.sync.time.ms",
+				//	AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "zookeeper.sync.time.ms"));
+			map1.get( "zookeeper.sync.time.ms"));
+			KafkaInternalDefaults.put("auto.commit.interval.ms",
+					//AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "auto.commit.interval.ms"));
+			map1.get( "auto.commit.interval.ms"));
+			KafkaInternalDefaults.put("fetch.message.max.bytes",
+					//AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "fetch.message.max.bytes"));
+			map1.get("fetch.message.max.bytes"));
+			KafkaInternalDefaults.put("auto.commit.enable",
+			//		AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "auto.commit.enable"));
+			map1.get("auto.commit.enable"));
+		} catch (Exception e) {
+			log.error("Failed to load Kafka Internal Properties.", e);
+		}
+	}
+
+	private static final String KafkaConsumerKeys[] = { "socket.timeout.ms",
+			"socket.receive.buffer.bytes", "fetch.message.max.bytes",
+			"auto.commit.interval.ms", "queued.max.message.chunks",
+			"rebalance.max.retries", "fetch.min.bytes", "fetch.wait.max.bytes",
+			"rebalance.backoff.ms", "refresh.leader.backoff.ms",
+			"auto.offset.reset", "consumer.timeout.ms",
+			"zookeeper.session.timeout.ms", "zookeeper.connection.timeout.ms",
+			"zookeeper.sync.time.ms" };
+
+	private static String makeLongKey(String key, String prefix) {
+		return prefix + "." + key;
+	}
+
+	private void transferSettingIfProvided(Properties target, String key,
+			String prefix) {
+		String keyVal= AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,makeLongKey(key, prefix));
+	
+	//	if (fSettings.hasValueFor(makeLongKey(key, prefix))) {
+		if (null!=keyVal) {
+	//		final String val = fSettings
+		//			.getString(makeLongKey(key, prefix), "");
+			log.info("Setting [" + key + "] to " + keyVal + ".");
+			target.put(key, keyVal);
+		}
+	}
+
+	}
+
+
diff --git a/src/main/java/com/att/nsa/cambria/beans/DMaaPKafkaMetaBroker.java b/src/main/java/com/att/nsa/cambria/beans/DMaaPKafkaMetaBroker.java
new file mode 100644
index 0000000..9d53ef2
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/beans/DMaaPKafkaMetaBroker.java
@@ -0,0 +1,462 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.beans;
+
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Properties;
+import java.util.Set;
+import java.util.TreeSet;
+
+import org.I0Itec.zkclient.ZkClient;
+import org.I0Itec.zkclient.exception.ZkNoNodeException;
+//import org.apache.log4-j.Logger;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+import org.json.JSONArray;
+import org.json.JSONObject;
+import org.springframework.beans.factory.annotation.Qualifier;
+
+import com.att.nsa.cambria.CambriaApiException;
+import com.att.nsa.cambria.metabroker.Broker;
+import com.att.nsa.cambria.metabroker.Topic;
+import com.att.nsa.cambria.utils.ConfigurationReader;
+import com.att.nsa.configs.ConfigDb;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.configs.ConfigPath;
+import com.att.nsa.drumlin.service.standards.HttpStatusCodes;
+import com.att.nsa.drumlin.till.nv.rrNvReadable;
+import com.att.nsa.security.NsaAcl;
+import com.att.nsa.security.NsaAclUtils;
+import com.att.nsa.security.NsaApiKey;
+
+import kafka.admin.AdminUtils;
+import kafka.utils.ZKStringSerializer$;
+
+/**
+ * class performing all topic operations
+ * 
+ * @author author
+ *
+ */
+
+public class DMaaPKafkaMetaBroker implements Broker {
+
+	//private static final Logger log = Logger.getLogger(DMaaPKafkaMetaBroker.class);
+	private static final EELFLogger log = EELFManager.getInstance().getLogger(ConfigurationReader.class);
+	
+
+	/**
+	 * DMaaPKafkaMetaBroker constructor initializing
+	 * 
+	 * @param settings
+	 * @param zk
+	 * @param configDb
+	 */
+	public DMaaPKafkaMetaBroker(@Qualifier("propertyReader") rrNvReadable settings,
+			@Qualifier("dMaaPZkClient") ZkClient zk, @Qualifier("dMaaPZkConfigDb") ConfigDb configDb) {
+		//fSettings = settings;
+		fZk = zk;
+		fCambriaConfig = configDb;
+		fBaseTopicData = configDb.parse("/topics");
+	}
+
+	@Override
+	public List<Topic> getAllTopics() throws ConfigDbException {
+		log.info("Retrieving list of all the topics.");
+		final LinkedList<Topic> result = new LinkedList<Topic>();
+		try {
+			log.info("Retrieving all topics from root: " + zkTopicsRoot);
+			final List<String> topics = fZk.getChildren(zkTopicsRoot);
+			for (String topic : topics) {
+				result.add(new KafkaTopic(topic, fCambriaConfig, fBaseTopicData));
+			}
+
+			JSONObject dataObj = new JSONObject();
+			dataObj.put("topics", new JSONObject());
+
+			for (String topic : topics) {
+				dataObj.getJSONObject("topics").put(topic, new JSONObject());
+			}
+		} catch (ZkNoNodeException excp) {
+			// very fresh kafka doesn't have any topics or a topics node
+			log.error("ZK doesn't have a Kakfa topics node at " + zkTopicsRoot, excp);
+		}
+		return result;
+	}
+
+	@Override
+	public Topic getTopic(String topic) throws ConfigDbException {
+		if (fZk.exists(zkTopicsRoot + "/" + topic)) {
+			return getKafkaTopicConfig(fCambriaConfig, fBaseTopicData, topic);
+		}
+		// else: no such topic in kafka
+		return null;
+	}
+
+	/**
+	 * static method get KafkaTopic object
+	 * 
+	 * @param db
+	 * @param base
+	 * @param topic
+	 * @return
+	 * @throws ConfigDbException
+	 */
+	public static KafkaTopic getKafkaTopicConfig(ConfigDb db, ConfigPath base, String topic) throws ConfigDbException {
+		return new KafkaTopic(topic, db, base);
+	}
+
+	/**
+	 * creating topic
+	 */
+	@Override
+	public Topic createTopic(String topic, String desc, String ownerApiKey, int partitions, int replicas,
+			boolean transactionEnabled) throws TopicExistsException, CambriaApiException {
+		log.info("Creating topic: " + topic);
+		try {
+			log.info("Check if topic [" + topic + "] exist.");
+			// first check for existence "our way"
+			final Topic t = getTopic(topic);
+			if (t != null) {
+				log.info("Could not create topic [" + topic + "]. Topic Already exists.");
+				throw new TopicExistsException("Could not create topic [" + topic + "]. Topic Alreay exists.");
+			}
+		} catch (ConfigDbException e1) {
+			log.error("Topic [" + topic + "] could not be created. Couldn't check topic data in config db.", e1);
+			throw new CambriaApiException(HttpStatusCodes.k503_serviceUnavailable,
+					"Couldn't check topic data in config db.");
+		}
+
+		// we only allow 3 replicas. (If we don't test this, we get weird
+		// results from the cluster,
+		// so explicit test and fail.)
+		if (replicas < 1 || replicas > 3) {
+			log.info("Topic [" + topic + "] could not be created. The replica count must be between 1 and 3.");
+			throw new CambriaApiException(HttpStatusCodes.k400_badRequest,
+					"The replica count must be between 1 and 3.");
+		}
+		if (partitions < 1) {
+			log.info("Topic [" + topic + "] could not be created. The partition count must be at least 1.");
+			throw new CambriaApiException(HttpStatusCodes.k400_badRequest, "The partition count must be at least 1.");
+		}
+
+		// create via kafka
+		try {
+			ZkClient zkClient = null;
+			try {
+				log.info("Loading zookeeper client for creating topic.");
+				// FIXME: use of this scala module$ thing is a goofy hack to
+				// make Kafka aware of the
+				// topic creation. (Otherwise, the topic is only partially
+				// created in ZK.)
+				zkClient = new ZkClient(ConfigurationReader.getMainZookeeperConnectionString(), 10000, 10000,
+						ZKStringSerializer$.MODULE$);
+
+				log.info("Zookeeper client loaded successfully. Creating topic.");
+				AdminUtils.createTopic(zkClient, topic, partitions, replicas, new Properties());
+			} catch (kafka.common.TopicExistsException e) {
+				log.error("Topic [" + topic + "] could not be created. " + e.getMessage(), e);
+				throw new TopicExistsException(topic);
+			} catch (ZkNoNodeException e) {
+				log.error("Topic [" + topic + "] could not be created. The Kafka cluster is not setup.", e);
+				// Kafka throws this when the server isn't running (and perhaps
+				// hasn't ever run)
+				throw new CambriaApiException(HttpStatusCodes.k503_serviceUnavailable,
+						"The Kafka cluster is not setup.");
+			} catch (kafka.admin.AdminOperationException e) {
+				// Kafka throws this when the server isn't running (and perhaps
+				// hasn't ever run)
+				log.error("The Kafka cluster can't handle your request. Talk to the administrators: " + e.getMessage(),
+						e);
+				throw new CambriaApiException(HttpStatusCodes.k503_serviceUnavailable,
+						"The Kafka cluster can't handle your request. Talk to the administrators.");
+			} finally {
+				log.info("Closing zookeeper connection.");
+				if (zkClient != null)
+					zkClient.close();
+			}
+
+			log.info("Creating topic entry for topic: " + topic);
+			// underlying Kafka topic created. now setup our API info
+			return createTopicEntry(topic, desc, ownerApiKey, transactionEnabled);
+		} catch (ConfigDbException excp) {
+			log.error("Failed to create topic data. Talk to the administrators: " + excp.getMessage(), excp);
+			throw new CambriaApiException(HttpStatusCodes.k503_serviceUnavailable,
+					"Failed to create topic data. Talk to the administrators.");
+		}
+	}
+
+	@Override
+	public void deleteTopic(String topic) throws CambriaApiException, TopicExistsException {
+		log.info("Deleting topic: " + topic);
+		ZkClient zkClient = null;
+		try {
+			log.info("Loading zookeeper client for topic deletion.");
+			// FIXME: use of this scala module$ thing is a goofy hack to make
+			// Kafka aware of the
+			// topic creation. (Otherwise, the topic is only partially created
+			// in ZK.)
+			zkClient = new ZkClient(ConfigurationReader.getMainZookeeperConnectionString(), 10000, 10000,
+					ZKStringSerializer$.MODULE$);
+
+			log.info("Zookeeper client loaded successfully. Deleting topic.");
+			AdminUtils.deleteTopic(zkClient, topic);
+		} catch (kafka.common.TopicExistsException e) {
+			log.error("Failed to delete topic [" + topic + "]. " + e.getMessage(), e);
+			throw new TopicExistsException(topic);
+		} catch (ZkNoNodeException e) {
+			log.error("Failed to delete topic [" + topic + "]. The Kafka cluster is not setup." + e.getMessage(), e);
+			// Kafka throws this when the server isn't running (and perhaps
+			// hasn't ever run)
+			throw new CambriaApiException(HttpStatusCodes.k503_serviceUnavailable, "The Kafka cluster is not setup.");
+		} catch (kafka.admin.AdminOperationException e) {
+			// Kafka throws this when the server isn't running (and perhaps
+			// hasn't ever run)
+			log.error("The Kafka cluster can't handle your request. Talk to the administrators." + e.getMessage(), e);
+			throw new CambriaApiException(HttpStatusCodes.k503_serviceUnavailable,
+					"The Kafka cluster can't handle your request. Talk to the administrators.");
+		} finally {
+			log.info("Closing zookeeper connection.");
+			if (zkClient != null)
+				zkClient.close();
+		}
+
+		// throw new UnsupportedOperationException ( "We can't programmatically
+		// delete Kafka topics yet." );
+	}
+
+	//private final rrNvReadable fSettings;
+	private final ZkClient fZk;
+	private final ConfigDb fCambriaConfig;
+	private final ConfigPath fBaseTopicData;
+
+	private static final String zkTopicsRoot = "/brokers/topics";
+	private static final JSONObject kEmptyAcl = new JSONObject();
+
+	/**
+	 * method Providing KafkaTopic Object associated with owner and
+	 * transactionenabled or not
+	 * 
+	 * @param name
+	 * @param desc
+	 * @param owner
+	 * @param transactionEnabled
+	 * @return
+	 * @throws ConfigDbException
+	 */
+	public KafkaTopic createTopicEntry(String name, String desc, String owner, boolean transactionEnabled)
+			throws ConfigDbException {
+		return createTopicEntry(fCambriaConfig, fBaseTopicData, name, desc, owner, transactionEnabled);
+	}
+
+	/**
+	 * static method giving kafka topic object
+	 * 
+	 * @param db
+	 * @param basePath
+	 * @param name
+	 * @param desc
+	 * @param owner
+	 * @param transactionEnabled
+	 * @return
+	 * @throws ConfigDbException
+	 */
+	public static KafkaTopic createTopicEntry(ConfigDb db, ConfigPath basePath, String name, String desc, String owner,
+			boolean transactionEnabled) throws ConfigDbException {
+		final JSONObject o = new JSONObject();
+		o.put("owner", owner);
+		o.put("description", desc);
+		o.put("txenabled", transactionEnabled);
+		db.store(basePath.getChild(name), o.toString());
+		return new KafkaTopic(name, db, basePath);
+	}
+
+	/**
+	 * class performing all user opearation like user is eligible to read,
+	 * write. permitting a user to write and read,
+	 * 
+	 * @author author
+	 *
+	 */
+	public static class KafkaTopic implements Topic {
+		/**
+		 * constructor initializes
+		 * 
+		 * @param name
+		 * @param configdb
+		 * @param baseTopic
+		 * @throws ConfigDbException
+		 */
+		public KafkaTopic(String name, ConfigDb configdb, ConfigPath baseTopic) throws ConfigDbException {
+			fName = name;
+			fConfigDb = configdb;
+			fBaseTopicData = baseTopic;
+
+			String data = fConfigDb.load(fBaseTopicData.getChild(fName));
+			if (data == null) {
+				data = "{}";
+			}
+
+			final JSONObject o = new JSONObject(data);
+			fOwner = o.optString("owner", "");
+			fDesc = o.optString("description", "");
+			fTransactionEnabled = o.optBoolean("txenabled", false);// default
+																	// value is
+																	// false
+			// if this topic has an owner, it needs both read/write ACLs. If there's no
+						// owner (or it's empty), null is okay -- this is for existing or implicitly
+						// created topics.
+						JSONObject readers = o.optJSONObject ( "readers" );
+						if ( readers == null && fOwner.length () > 0 ) readers = kEmptyAcl;
+						fReaders = fromJson ( readers );
+
+						JSONObject writers = o.optJSONObject ( "writers" );
+						if ( writers == null && fOwner.length () > 0 ) writers = kEmptyAcl;
+						fWriters = fromJson ( writers );
+		}
+		 private NsaAcl fromJson(JSONObject o) {
+				NsaAcl acl = new NsaAcl();
+				if (o != null) {
+					JSONArray a = o.optJSONArray("allowed");
+					if (a != null) {
+						for (int i = 0; i < a.length(); ++i) {
+							String user = a.getString(i);
+							acl.add(user);
+						}
+					}
+				}
+				return acl;
+			}
+		@Override
+		public String getName() {
+			return fName;
+		}
+
+		@Override
+		public String getOwner() {
+			return fOwner;
+		}
+
+		@Override
+		public String getDescription() {
+			return fDesc;
+		}
+
+		@Override
+		public NsaAcl getReaderAcl() {
+			return fReaders;
+		}
+
+		@Override
+		public NsaAcl getWriterAcl() {
+			return fWriters;
+		}
+
+		@Override
+		public void checkUserRead(NsaApiKey user) throws AccessDeniedException  {
+			NsaAclUtils.checkUserAccess ( fOwner, getReaderAcl(), user );
+		}
+
+		@Override
+		public void checkUserWrite(NsaApiKey user) throws AccessDeniedException  {
+			NsaAclUtils.checkUserAccess ( fOwner, getWriterAcl(), user );
+		}
+
+		@Override
+		public void permitWritesFromUser(String pubId, NsaApiKey asUser)
+				throws ConfigDbException, AccessDeniedException {
+			updateAcl(asUser, false, true, pubId);
+		}
+
+		@Override
+		public void denyWritesFromUser(String pubId, NsaApiKey asUser) throws ConfigDbException, AccessDeniedException {
+			updateAcl(asUser, false, false, pubId);
+		}
+
+		@Override
+		public void permitReadsByUser(String consumerId, NsaApiKey asUser)
+				throws ConfigDbException, AccessDeniedException {
+			updateAcl(asUser, true, true, consumerId);
+		}
+
+		@Override
+		public void denyReadsByUser(String consumerId, NsaApiKey asUser)
+				throws ConfigDbException, AccessDeniedException {
+			updateAcl(asUser, true, false, consumerId);
+		}
+
+		private void updateAcl(NsaApiKey asUser, boolean reader, boolean add, String key)
+				throws ConfigDbException, AccessDeniedException{
+			try
+			{
+				final NsaAcl acl = NsaAclUtils.updateAcl ( this, asUser, key, reader, add );
+	
+				// we have to assume we have current data, or load it again. for the expected use
+				// case, assuming we can overwrite the data is fine.
+				final JSONObject o = new JSONObject ();
+				o.put ( "owner", fOwner );
+				o.put ( "readers", safeSerialize ( reader ? acl : fReaders ) );
+				o.put ( "writers", safeSerialize ( reader ? fWriters : acl ) );
+				fConfigDb.store ( fBaseTopicData.getChild ( fName ), o.toString () );
+				
+				log.info ( "ACL_UPDATE: " + asUser.getKey () + " " + ( add ? "added" : "removed" ) + ( reader?"subscriber":"publisher" ) + " " + key + " on " + fName );
+	
+			}
+			catch ( ConfigDbException x )
+			{
+				throw x;
+			}
+			catch ( AccessDeniedException x )
+			{
+				throw x;
+			}
+			
+		}
+
+		private JSONObject safeSerialize(NsaAcl acl) {
+			return acl == null ? null : acl.serialize();
+		}
+
+		private final String fName;
+		private final ConfigDb fConfigDb;
+		private final ConfigPath fBaseTopicData;
+		private final String fOwner;
+		private final String fDesc;
+		private final NsaAcl fReaders;
+		private final NsaAcl fWriters;
+		private boolean fTransactionEnabled;
+
+		public boolean isTransactionEnabled() {
+			return fTransactionEnabled;
+		}
+
+		@Override
+		public Set<String> getOwners() {
+			final TreeSet<String> owners = new TreeSet<String> ();
+			owners.add ( fOwner );
+			return owners;
+		}
+	}
+
+}
diff --git a/src/main/java/com/att/nsa/cambria/beans/DMaaPMetricsSet.java b/src/main/java/com/att/nsa/cambria/beans/DMaaPMetricsSet.java
new file mode 100644
index 0000000..3c3aa6d
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/beans/DMaaPMetricsSet.java
@@ -0,0 +1,232 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.beans;
+
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.TimeUnit;
+
+import com.att.nsa.apiServer.metrics.cambria.DMaaPMetricsSender;
+import com.att.nsa.cambria.CambriaApiVersionInfo;
+import com.att.nsa.cambria.backends.MetricsSet;
+import com.att.nsa.drumlin.till.nv.rrNvReadable;
+import com.att.nsa.metrics.impl.CdmConstant;
+import com.att.nsa.metrics.impl.CdmCounter;
+import com.att.nsa.metrics.impl.CdmMetricsRegistryImpl;
+import com.att.nsa.metrics.impl.CdmMovingAverage;
+import com.att.nsa.metrics.impl.CdmRateTicker;
+import com.att.nsa.metrics.impl.CdmSimpleMetric;
+import com.att.nsa.metrics.impl.CdmStringConstant;
+import com.att.nsa.metrics.impl.CdmTimeSince;
+
+/*@Component("dMaaPMetricsSet")*/
+/**
+ * Metrics related information
+ * 
+ * @author author
+ *
+ */
+public class DMaaPMetricsSet extends CdmMetricsRegistryImpl implements MetricsSet {
+
+	private final CdmStringConstant fVersion;
+	private final CdmConstant fStartTime;
+	private final CdmTimeSince fUpTime;
+
+	private final CdmCounter fRecvTotal;
+	private final CdmRateTicker fRecvEpsInstant;
+	private final CdmRateTicker fRecvEpsShort;
+	private final CdmRateTicker fRecvEpsLong;
+
+	private final CdmCounter fSendTotal;
+	private final CdmRateTicker fSendEpsInstant;
+	private final CdmRateTicker fSendEpsShort;
+	private final CdmRateTicker fSendEpsLong;
+
+	private final CdmCounter fKafkaConsumerCacheMiss;
+	private final CdmCounter fKafkaConsumerCacheHit;
+
+	private final CdmCounter fKafkaConsumerClaimed;
+	private final CdmCounter fKafkaConsumerTimeout;
+
+	private final CdmSimpleMetric fFanOutRatio;
+
+	private final HashMap<String, CdmRateTicker> fPathUseRates;
+	private final HashMap<String, CdmMovingAverage> fPathAvgs;
+
+	private rrNvReadable fSettings;
+
+	private final ScheduledExecutorService fScheduler;
+
+	/**
+	 * Constructor initialization
+	 * 
+	 * @param cs
+	 */
+	//public DMaaPMetricsSet() {
+		public DMaaPMetricsSet(rrNvReadable cs) {
+		//fSettings = cs;
+
+		fVersion = new CdmStringConstant("Version " + CambriaApiVersionInfo.getVersion());
+		super.putItem("version", fVersion);
+
+		final long startTime = System.currentTimeMillis();
+		final Date d = new Date(startTime);
+		final String text = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssz").format(d);
+		fStartTime = new CdmConstant(startTime / 1000, "Start Time (epoch); " + text);
+		super.putItem("startTime", fStartTime);
+
+		fUpTime = new CdmTimeSince("seconds since start");
+		super.putItem("upTime", fUpTime);
+
+		fRecvTotal = new CdmCounter("Total events received since start");
+		super.putItem("recvTotalEvents", fRecvTotal);
+
+		fRecvEpsInstant = new CdmRateTicker("recv eps (1 min)", 1, TimeUnit.SECONDS, 1, TimeUnit.MINUTES);
+		super.putItem("recvEpsInstant", fRecvEpsInstant);
+
+		fRecvEpsShort = new CdmRateTicker("recv eps (10 mins)", 1, TimeUnit.SECONDS, 10, TimeUnit.MINUTES);
+		super.putItem("recvEpsShort", fRecvEpsShort);
+
+		fRecvEpsLong = new CdmRateTicker("recv eps (1 hr)", 1, TimeUnit.SECONDS, 1, TimeUnit.HOURS);
+		super.putItem("recvEpsLong", fRecvEpsLong);
+
+		fSendTotal = new CdmCounter("Total events sent since start");
+		super.putItem("sendTotalEvents", fSendTotal);
+
+		fSendEpsInstant = new CdmRateTicker("send eps (1 min)", 1, TimeUnit.SECONDS, 1, TimeUnit.MINUTES);
+		super.putItem("sendEpsInstant", fSendEpsInstant);
+
+		fSendEpsShort = new CdmRateTicker("send eps (10 mins)", 1, TimeUnit.SECONDS, 10, TimeUnit.MINUTES);
+		super.putItem("sendEpsShort", fSendEpsShort);
+
+		fSendEpsLong = new CdmRateTicker("send eps (1 hr)", 1, TimeUnit.SECONDS, 1, TimeUnit.HOURS);
+		super.putItem("sendEpsLong", fSendEpsLong);
+
+		fKafkaConsumerCacheMiss = new CdmCounter("Kafka Consumer Cache Misses");
+		super.putItem("kafkaConsumerCacheMiss", fKafkaConsumerCacheMiss);
+
+		fKafkaConsumerCacheHit = new CdmCounter("Kafka Consumer Cache Hits");
+		super.putItem("kafkaConsumerCacheHit", fKafkaConsumerCacheHit);
+
+		fKafkaConsumerClaimed = new CdmCounter("Kafka Consumers Claimed");
+		super.putItem("kafkaConsumerClaims", fKafkaConsumerClaimed);
+
+		fKafkaConsumerTimeout = new CdmCounter("Kafka Consumers Timedout");
+		super.putItem("kafkaConsumerTimeouts", fKafkaConsumerTimeout);
+
+		// FIXME: CdmLevel is not exactly a great choice
+		fFanOutRatio = new CdmSimpleMetric() {
+			@Override
+			public String getRawValueString() {
+				return getRawValue().toString();
+			}
+
+			@Override
+			public Number getRawValue() {
+				final double s = fSendTotal.getValue();
+				final double r = fRecvTotal.getValue();
+				return r == 0.0 ? 0.0 : s / r;
+			}
+
+			@Override
+			public String summarize() {
+				return getRawValueString() + " sends per recv";
+			}
+
+		};
+		super.putItem("fanOut", fFanOutRatio);
+
+		// these are added to the metrics catalog as they're discovered
+		fPathUseRates = new HashMap<String, CdmRateTicker>();
+		fPathAvgs = new HashMap<String, CdmMovingAverage>();
+
+		fScheduler = Executors.newScheduledThreadPool(1);
+	}
+
+	@Override
+	public void setupCambriaSender() {
+		DMaaPMetricsSender.sendPeriodically(fScheduler, this,  "cambria.apinode.metrics.dmaap");
+	}
+
+	@Override
+	public void onRouteComplete(String name, long durationMs) {
+		CdmRateTicker ticker = fPathUseRates.get(name);
+		if (ticker == null) {
+			ticker = new CdmRateTicker("calls/min on path " + name + "", 1, TimeUnit.MINUTES, 1, TimeUnit.HOURS);
+			fPathUseRates.put(name, ticker);
+			super.putItem("pathUse_" + name, ticker);
+		}
+		ticker.tick();
+
+		CdmMovingAverage durs = fPathAvgs.get(name);
+		if (durs == null) {
+			durs = new CdmMovingAverage("ms avg duration on path " + name + ", last 10 minutes", 10, TimeUnit.MINUTES);
+			fPathAvgs.put(name, durs);
+			super.putItem("pathDurationMs_" + name, durs);
+		}
+		durs.tick(durationMs);
+	}
+
+	@Override
+	public void publishTick(int amount) {
+		if (amount > 0) {
+			fRecvTotal.bumpBy(amount);
+			fRecvEpsInstant.tick(amount);
+			fRecvEpsShort.tick(amount);
+			fRecvEpsLong.tick(amount);
+		}
+	}
+
+	@Override
+	public void consumeTick(int amount) {
+		if (amount > 0) {
+			fSendTotal.bumpBy(amount);
+			fSendEpsInstant.tick(amount);
+			fSendEpsShort.tick(amount);
+			fSendEpsLong.tick(amount);
+		}
+	}
+
+	@Override
+	public void onKafkaConsumerCacheMiss() {
+		fKafkaConsumerCacheMiss.bump();
+	}
+
+	@Override
+	public void onKafkaConsumerCacheHit() {
+		fKafkaConsumerCacheHit.bump();
+	}
+
+	@Override
+	public void onKafkaConsumerClaimed() {
+		fKafkaConsumerClaimed.bump();
+	}
+
+	@Override
+	public void onKafkaConsumerTimeout() {
+		fKafkaConsumerTimeout.bump();
+	}
+
+}
diff --git a/src/main/java/com/att/nsa/cambria/beans/DMaaPNsaApiDb.java b/src/main/java/com/att/nsa/cambria/beans/DMaaPNsaApiDb.java
new file mode 100644
index 0000000..ce257d4
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/beans/DMaaPNsaApiDb.java
@@ -0,0 +1,139 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.beans;
+
+import java.security.Key;
+
+//import org.apache.log4-j.Logger;
+import org.springframework.beans.factory.annotation.Autowired;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.att.nsa.cambria.constants.CambriaConstants;
+import com.att.nsa.configs.ConfigDb;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.configs.confimpl.EncryptingLayer;
+import com.att.nsa.drumlin.till.nv.rrNvReadable;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
+import com.att.nsa.security.db.BaseNsaApiDbImpl;
+import com.att.nsa.security.db.EncryptingApiDbImpl;
+import com.att.nsa.security.db.NsaApiDb;
+import com.att.nsa.security.db.simple.NsaSimpleApiKey;
+import com.att.nsa.security.db.simple.NsaSimpleApiKeyFactory;
+import com.att.nsa.util.rrConvertor;
+
+/**
+ * 
+ * @author author
+ *
+ */
+public class DMaaPNsaApiDb {
+	
+	//private rrNvReadable settings;
+	private DMaaPZkConfigDb cdb;
+	
+	//private static final Logger log = Logger
+		//	.getLogger(DMaaPNsaApiDb.class.toString());
+	private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPNsaApiDb.class);
+	
+/**
+ * 
+ * Constructor initialized
+ * @param settings
+ * @param cdb
+ */
+	@Autowired
+	public DMaaPNsaApiDb(rrNvReadable settings, DMaaPZkConfigDb cdb) {
+		//this.setSettings(settings);
+		this.setCdb(cdb);
+	}
+	/**
+	 * 
+	 * @param settings
+	 * @param cdb
+	 * @return
+	 * @throws ConfigDbException
+	 * @throws missingReqdSetting
+	 */
+	public static NsaApiDb<NsaSimpleApiKey> buildApiKeyDb(
+			rrNvReadable settings, ConfigDb cdb) throws ConfigDbException,
+			missingReqdSetting {
+		// Cambria uses an encrypted api key db
+
+		//final String keyBase64 = settings.getString("cambria.secureConfig.key",			null);
+		final String keyBase64 =com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"cambria.secureConfig.key");
+		
+		
+	//	final String initVectorBase64 = settings.getString(				"cambria.secureConfig.iv", null);
+	final String initVectorBase64 =com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"cambria.secureConfig.iv");
+		// if neither value was provided, don't encrypt api key db
+		if (keyBase64 == null && initVectorBase64 == null) {
+			log.info("This server is configured to use an unencrypted API key database. See the settings documentation.");
+			return new BaseNsaApiDbImpl<NsaSimpleApiKey>(cdb,
+					new NsaSimpleApiKeyFactory());
+		} else if (keyBase64 == null) {
+			// neither or both, otherwise something's goofed
+			throw new missingReqdSetting("cambria.secureConfig.key");
+		} else if (initVectorBase64 == null) {
+			// neither or both, otherwise something's goofed
+			throw new missingReqdSetting("cambria.secureConfig.iv");
+		} else {
+			log.info("This server is configured to use an encrypted API key database.");
+			final Key key = EncryptingLayer.readSecretKey(keyBase64);
+			final byte[] iv = rrConvertor.base64Decode(initVectorBase64);
+			return new EncryptingApiDbImpl<NsaSimpleApiKey>(cdb,
+					new NsaSimpleApiKeyFactory(), key, iv);
+		}
+	}
+
+	/**
+	 * @return
+	 * returns settings
+	 */
+/*	public rrNvReadable getSettings() {
+		return settings;
+	}*/
+
+	/**
+	 * @param settings
+	 * set settings
+	 */
+	/*public void setSettings(rrNvReadable settings) {
+		this.settings = settings;
+	}*/
+
+	 /**
+	 * @return
+	 * returns cbd
+	 */
+	public DMaaPZkConfigDb getCdb() {
+		return cdb;
+	}
+	/**
+	 * @param cdb
+	 * set cdb
+	 */
+	public void setCdb(DMaaPZkConfigDb cdb) {
+		this.cdb = cdb;
+	}
+
+
+}
diff --git a/src/main/java/com/att/nsa/cambria/beans/DMaaPZkClient.java b/src/main/java/com/att/nsa/cambria/beans/DMaaPZkClient.java
new file mode 100644
index 0000000..590ecd6
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/beans/DMaaPZkClient.java
@@ -0,0 +1,45 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.beans;
+
+import org.I0Itec.zkclient.ZkClient;
+import org.springframework.beans.factory.annotation.Qualifier;
+
+import com.att.nsa.cambria.utils.ConfigurationReader;
+import com.att.nsa.drumlin.till.nv.rrNvReadable;
+
+/**
+ * Created for Zookeeper client which will read configuration and settings parameter
+ * @author author
+ *
+ */
+public class DMaaPZkClient extends ZkClient {
+
+	/**
+	 * This constructor will get the settings value from rrNvReadable
+     * and ConfigurationReader's zookeeper connection
+	 * @param settings
+	 */
+	public DMaaPZkClient(@Qualifier("propertyReader") rrNvReadable settings) {
+		super(ConfigurationReader.getMainZookeeperConnectionString());
+	}
+}
diff --git a/src/main/java/com/att/nsa/cambria/beans/DMaaPZkConfigDb.java b/src/main/java/com/att/nsa/cambria/beans/DMaaPZkConfigDb.java
new file mode 100644
index 0000000..8fe96e9
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/beans/DMaaPZkConfigDb.java
@@ -0,0 +1,52 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.beans;
+
+import org.springframework.beans.factory.annotation.Qualifier;
+
+import com.att.nsa.cambria.constants.CambriaConstants;
+import com.att.nsa.cambria.utils.ConfigurationReader;
+import com.att.nsa.configs.confimpl.ZkConfigDb;
+import com.att.nsa.drumlin.till.nv.rrNvReadable;
+//import com.att.nsa.configs.confimpl.ZkConfigDb;
+/**
+ * Provide the zookeeper config db connection 
+ * @author author
+ *
+ */
+public class DMaaPZkConfigDb extends ZkConfigDb {
+	/**
+	 * This Constructor will provide the configuration details from the property reader
+     * and DMaaPZkClient
+	 * @param zk
+	 * @param settings
+	 */
+	public DMaaPZkConfigDb(@Qualifier("dMaaPZkClient") DMaaPZkClient zk,
+			@Qualifier("propertyReader") rrNvReadable settings) {
+		
+		//super(com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kSetting_ZkConfigDbRoot)==null?CambriaConstants.kDefault_ZkConfigDbRoot:com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kSetting_ZkConfigDbRoot));
+		super(ConfigurationReader.getMainZookeeperConnectionString(),ConfigurationReader.getMainZookeeperConnectionSRoot());
+		
+	}
+	
+	
+}
diff --git a/src/main/java/com/att/nsa/cambria/beans/LogDetails.java b/src/main/java/com/att/nsa/cambria/beans/LogDetails.java
new file mode 100644
index 0000000..5a195e9
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/beans/LogDetails.java
@@ -0,0 +1,214 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+/**
+ * 
+ */
+package com.att.nsa.cambria.beans;
+
+import java.util.Date;
+
+import com.att.nsa.cambria.constants.CambriaConstants;
+import com.att.nsa.cambria.utils.Utils;
+
+/**
+ * @author author
+ *
+ */
+
+public class LogDetails {
+	
+	private String publisherId;
+	private String topicId;
+	private String subscriberGroupId;
+	private String subscriberId;
+	private String publisherIp;
+	private String messageBatchId;
+	private String messageSequence;
+	private String messageTimestamp;
+	private String consumeTimestamp;
+	private String transactionIdTs;	
+	private String serverIp;
+	
+	private long messageLengthInBytes; 
+	private long totalMessageCount;
+	
+	private boolean transactionEnabled;
+	/**
+	 * This is for transaction enabled logging details
+	 *
+	 */
+	public LogDetails() {
+		super();
+	}
+
+	public String getTransactionId() {
+		StringBuilder transactionId = new StringBuilder();
+		transactionId.append(transactionIdTs);
+		transactionId.append(CambriaConstants.TRANSACTION_ID_SEPARATOR);
+		transactionId.append(publisherIp);
+		transactionId.append(CambriaConstants.TRANSACTION_ID_SEPARATOR);
+		transactionId.append(messageBatchId);
+		transactionId.append(CambriaConstants.TRANSACTION_ID_SEPARATOR);
+		transactionId.append(messageSequence);
+
+		return transactionId.toString();
+	}
+
+	public String getPublisherId() {
+		return publisherId;
+	}
+
+	public void setPublisherId(String publisherId) {
+		this.publisherId = publisherId;
+	}
+
+	public String getTopicId() {
+		return topicId;
+	}
+
+	public void setTopicId(String topicId) {
+		this.topicId = topicId;
+	}
+
+	public String getSubscriberGroupId() {
+		return subscriberGroupId;
+	}
+
+	public void setSubscriberGroupId(String subscriberGroupId) {
+		this.subscriberGroupId = subscriberGroupId;
+	}
+
+	public String getSubscriberId() {
+		return subscriberId;
+	}
+
+	public void setSubscriberId(String subscriberId) {
+		this.subscriberId = subscriberId;
+	}
+
+	public String getPublisherIp() {
+		return publisherIp;
+	}
+
+	public void setPublisherIp(String publisherIp) {
+		this.publisherIp = publisherIp;
+	}
+
+	public String getMessageBatchId() {
+		return messageBatchId;
+	}
+
+	public void setMessageBatchId(Long messageBatchId) {
+		this.messageBatchId = Utils.getFromattedBatchSequenceId(messageBatchId);
+	}
+
+	public String getMessageSequence() {
+		return messageSequence;
+	}
+
+	public void setMessageSequence(String messageSequence) {
+		this.messageSequence = messageSequence;
+	}
+
+	public String getMessageTimestamp() {
+		return messageTimestamp;
+	}
+
+	public void setMessageTimestamp(String messageTimestamp) {
+		this.messageTimestamp = messageTimestamp;
+	}
+
+	public String getPublishTimestamp() {
+		return Utils.getFormattedDate(new Date());
+	}
+
+	public String getConsumeTimestamp() {
+		return consumeTimestamp;
+	}
+
+	public void setConsumeTimestamp(String consumeTimestamp) {
+		this.consumeTimestamp = consumeTimestamp;
+	}
+
+	public long getMessageLengthInBytes() {
+		return messageLengthInBytes;
+	}
+
+	public void setMessageLengthInBytes(long messageLengthInBytes) {
+		this.messageLengthInBytes = messageLengthInBytes;
+	}
+
+	public long getTotalMessageCount() {
+		return totalMessageCount;
+	}
+
+	public void setTotalMessageCount(long totalMessageCount) {
+		this.totalMessageCount = totalMessageCount;
+	}
+
+	public boolean isTransactionEnabled() {
+		return transactionEnabled;
+	}
+
+	public void setTransactionEnabled(boolean transactionEnabled) {
+		this.transactionEnabled = transactionEnabled;
+	}
+
+	public String getTransactionIdTs() {
+		return transactionIdTs;
+	}
+
+	public void setTransactionIdTs(String transactionIdTs) {
+		this.transactionIdTs = transactionIdTs;
+	}
+
+	public String getPublisherLogDetails() {
+		
+			StringBuilder buffer = new StringBuilder();
+			buffer.append("[publisherId=" + publisherId);
+			buffer.append(", topicId=" + topicId);
+			buffer.append(", messageTimestamp=" + messageTimestamp);
+			buffer.append(", publisherIp=" + publisherIp);
+			buffer.append(", messageBatchId=" + messageBatchId);
+			buffer.append(", messageSequence=" + messageSequence );
+			buffer.append(", messageLengthInBytes=" + messageLengthInBytes);
+			buffer.append(", transactionEnabled=" + transactionEnabled);
+			buffer.append(", transactionId=" + getTransactionId());
+			buffer.append(", publishTimestamp=" + getPublishTimestamp());		
+			buffer.append(", serverIp=" + getServerIp()+"]");
+		return buffer.toString();
+		
+	}
+
+	public String getServerIp() {
+		return serverIp;
+	}
+
+	public void setServerIp(String serverIp) {
+		this.serverIp = serverIp;
+	}
+
+	public void setMessageBatchId(String messageBatchId) {
+		this.messageBatchId = messageBatchId;
+	}
+	
+}
diff --git a/src/main/java/com/att/nsa/cambria/beans/TopicBean.java b/src/main/java/com/att/nsa/cambria/beans/TopicBean.java
new file mode 100644
index 0000000..3303c07
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/beans/TopicBean.java
@@ -0,0 +1,155 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+/**
+ * 
+ */
+package com.att.nsa.cambria.beans;
+
+import java.io.Serializable;
+
+import javax.xml.bind.annotation.XmlRootElement;
+
+/**
+ * @author author
+ *
+ */
+@XmlRootElement
+public class TopicBean implements Serializable {
+
+	private static final long serialVersionUID = -8620390377775457949L;
+	private String topicName;
+	private String topicDescription;
+
+	private int partitionCount = 1; //default values
+	private int replicationCount = 1; //default value
+
+	private boolean transactionEnabled;
+
+	/**
+	 * constructor
+	 */
+	public TopicBean() {
+		super();
+	}
+
+	/**
+	 * constructor initialization with topic details name, description,
+	 * partition, replication, transaction
+	 * 
+	 * @param topicName
+	 * @param description
+	 * @param partitionCount
+	 * @param replicationCount
+	 * @param transactionEnabled
+	 */
+	public TopicBean(String topicName, String topicDescription, int partitionCount, int replicationCount,
+			boolean transactionEnabled) {
+		super();
+		this.topicName = topicName;
+		this.topicDescription = topicDescription;
+		this.partitionCount = partitionCount;
+		this.replicationCount = replicationCount;
+		this.transactionEnabled = transactionEnabled;
+	}
+
+	/**
+	 * @return
+	 * returns topic name which is of String type
+	 */
+	public String getTopicName() {
+		return topicName;
+	}
+
+	/**
+	 * @param topicName
+	 * set topic name  
+	 */
+	public void setTopicName(String topicName) {
+		this.topicName = topicName;
+	}
+
+
+	/**
+	 * @return
+	 * returns partition count which is of int type
+	 */
+	public int getPartitionCount() {
+		return partitionCount;
+	}
+
+	/**
+	 * @param partitionCount
+	 * set partition Count 
+	 */
+	public void setPartitionCount(int partitionCount) {
+		this.partitionCount = partitionCount;
+	}
+	
+	/**
+	 * @return
+	 * returns replication count which is of int type
+	 */
+	public int getReplicationCount() {
+		return replicationCount;
+	}
+	
+	/**
+	 * @param
+	 * set replication count which is of int type
+	 */
+	public void setReplicationCount(int replicationCount) {
+		this.replicationCount = replicationCount;
+	}
+	
+	/**
+	 * @return
+	 * returns boolean value which indicates whether transaction is Enabled 
+	 */
+	public boolean isTransactionEnabled() {
+		return transactionEnabled;
+	}
+	
+	/**
+	 * @param
+	 * sets boolean value which indicates whether transaction is Enabled 
+	 */
+	public void setTransactionEnabled(boolean transactionEnabled) {
+		this.transactionEnabled = transactionEnabled;
+	}
+
+	/**
+	 * 
+	 * @return returns description which is of String type
+	 */
+	public String getTopicDescription() {
+		return topicDescription;
+	}
+	/**
+	 * 
+	 * @param topicDescription
+	 * set description which is of String type
+	 */
+	public void setTopicDescription(String topicDescription) {
+		this.topicDescription = topicDescription;
+	}
+
+}
diff --git a/src/main/java/com/att/nsa/cambria/constants/CambriaConstants.java b/src/main/java/com/att/nsa/cambria/constants/CambriaConstants.java
new file mode 100644
index 0000000..019fa38
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/constants/CambriaConstants.java
@@ -0,0 +1,126 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.constants;
+
+import org.apache.coyote.http11.Http11NioProtocol;
+
+import com.att.nsa.cambria.utils.Utils;
+
+/**
+ * This is the constant files for all the property or parameters.
+ * @author author
+ *
+ */
+public interface CambriaConstants {
+
+	String CAMBRIA = "Cambria";
+	String DMAAP = "DMaaP";
+
+	String kDefault_ZkRoot = "/fe3c/cambria";
+
+	String kSetting_ZkConfigDbRoot = "config.zk.root";
+	String kDefault_ZkConfigDbRoot = kDefault_ZkRoot + "/config";
+String msgRtr_prop="MsgRtrApi.properties";
+	String kBrokerType = "broker.type";
+	
+	/**
+	 * value to use to signal kafka broker type.
+	 */
+	String kBrokerType_Kafka = "kafka";
+	String kBrokerType_Memory = "memory";
+	String kSetting_AdminSecret = "authentication.adminSecret";
+
+	String kSetting_ApiNodeIdentifier = "cambria.api.node.identifier";
+
+	/**
+	 * value to use to signal max empty poll per minute
+	 */
+	String kSetting_MaxEmptyPollsPerMinute = "cambria.rateLimit.maxEmptyPollsPerMinute";
+	double kDefault_MaxEmptyPollsPerMinute = 10.0;
+
+	String kSetting_SleepMsOnRateLimit = "cambria.rateLimit.delay.ms";
+	long kDefault_SleepMsOnRateLimit = Utils.getSleepMsForRate ( kDefault_MaxEmptyPollsPerMinute );
+
+	String kSetting_RateLimitWindowLength = "cambria.rateLimit.window.minutes";
+	int kDefault_RateLimitWindowLength = 5;
+
+	String kConfig = "c";
+
+	String kSetting_Port = "cambria.service.port";
+	/**
+	 * value to use to signal default port
+	 */
+	int kDefault_Port = 3904;
+
+	String kSetting_MaxThreads = "tomcat.maxthreads";
+	int kDefault_MaxThreads = -1;
+	
+	
+//	String kSetting_TomcatProtocolClass = "tomcat.protocolClass";
+	//String kDefault_TomcatProtocolClass = Http11NioProtocol.class.getName ();
+
+	String kSetting_ZkConfigDbServers = "config.zk.servers";
+	
+	/**
+	 * value to indicate localhost port number
+	 */
+	String kDefault_ZkConfigDbServers = "localhost:2181";
+
+	/**
+	 * value to use to signal Session time out
+	 */
+	String kSetting_ZkSessionTimeoutMs = "cambria.consumer.cache.zkSessionTimeout";
+	int kDefault_ZkSessionTimeoutMs = 20 * 1000;
+
+	/**
+	 * value to use to signal connection time out 
+	 */
+	String kSetting_ZkConnectionTimeoutMs = "cambria.consumer.cache.zkConnectionTimeout";
+	int kDefault_ZkConnectionTimeoutMs = 5 * 1000;
+
+	String TRANSACTION_ID_SEPARATOR = "::";
+
+	/**
+	 * value to use to signal there's no timeout on the consumer request.
+	 */
+	public static final int kNoTimeout = 10000;
+
+	/**
+	 * value to use to signal no limit in the number of messages returned.
+	 */
+	public static final int kNoLimit = 0;
+
+	/**
+	 * value to use to signal that the caller wants the next set of events
+	 */
+	public static final int kNextOffset = -1;
+
+	/**
+	 * value to use to signal there's no filter on the response stream.
+	 */
+	public static final String kNoFilter = "";
+
+	//Added for Metric publish
+	public static final int kStdCambriaServicePort = 3904;
+	public static final String kBasePath = "/events/";
+
+}
diff --git a/src/main/java/com/att/nsa/cambria/exception/DMaaPAccessDeniedException.java b/src/main/java/com/att/nsa/cambria/exception/DMaaPAccessDeniedException.java
new file mode 100644
index 0000000..7558b25
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/exception/DMaaPAccessDeniedException.java
@@ -0,0 +1,42 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.exception;
+
+import com.att.nsa.cambria.CambriaApiException;
+
+public class DMaaPAccessDeniedException extends CambriaApiException{
+	
+	
+	
+	public DMaaPAccessDeniedException(ErrorResponse errRes) {
+		super(errRes);
+		
+	}
+
+	/**
+	 * 
+	 */
+	private static final long serialVersionUID = 1L;
+
+	
+
+}
diff --git a/src/main/java/com/att/nsa/cambria/exception/DMaaPCambriaExceptionMapper.java b/src/main/java/com/att/nsa/cambria/exception/DMaaPCambriaExceptionMapper.java
new file mode 100644
index 0000000..8838a49
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/exception/DMaaPCambriaExceptionMapper.java
@@ -0,0 +1,94 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.exception;
+
+import javax.inject.Singleton;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.ext.ExceptionMapper;
+import javax.ws.rs.ext.Provider;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.apache.http.HttpStatus;
+
+import org.springframework.beans.factory.annotation.Autowired;
+
+import com.att.nsa.cambria.CambriaApiException;
+
+/**
+ * Exception Mapper class to handle
+ * CambriaApiException 
+ * @author author
+ *
+ */
+@Provider
+@Singleton
+public class DMaaPCambriaExceptionMapper implements ExceptionMapper<CambriaApiException>{
+
+private ErrorResponse errRes;
+
+//private static final Logger LOGGER = Logger.getLogger(DMaaPCambriaExceptionMapper.class);
+private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(DMaaPCambriaExceptionMapper.class);
+	
+	@Autowired
+	private DMaaPErrorMessages msgs;
+	
+	public DMaaPCambriaExceptionMapper() {
+		super();
+		LOGGER.info("Cambria Exception Mapper Created..");
+	}
+	
+	@Override
+	public Response toResponse(CambriaApiException ex) {
+
+		LOGGER.info("Reached Cambria Exception Mapper..");
+		
+		/**
+		 * Cambria Generic Exception
+		 */
+		if(ex instanceof CambriaApiException)
+		{
+			
+			errRes = ex.getErrRes();
+			if(errRes!=null) {
+				
+				return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON)
+		            .build();
+			}
+			else
+			{
+				return Response.status(ex.getStatus()).entity(ex.getMessage()).type(MediaType.APPLICATION_JSON)
+			            .build();
+			}
+			
+			
+		}
+		else
+		{
+			errRes = new ErrorResponse(HttpStatus.SC_EXPECTATION_FAILED, DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(), msgs.getServerUnav());
+			return Response.status(HttpStatus.SC_EXPECTATION_FAILED).entity(errRes).type(MediaType.APPLICATION_JSON).build();
+		}
+		
+	}
+
+	
+}
diff --git a/src/main/java/com/att/nsa/cambria/exception/DMaaPErrorMessages.java b/src/main/java/com/att/nsa/cambria/exception/DMaaPErrorMessages.java
new file mode 100644
index 0000000..eb9be06
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/exception/DMaaPErrorMessages.java
@@ -0,0 +1,239 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.exception;
+
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.stereotype.Component;
+
+/**
+ * This Class reads the error message properties
+ * from the properties file
+ * @author author
+ *
+ */
+@Component
+public class DMaaPErrorMessages {
+
+	@Value("${resource.not.found}")
+	private String notFound;
+	
+	@Value("${server.unavailable}")
+	private String serverUnav;
+	
+	@Value("${http.method.not.allowed}")
+	private String methodNotAllowed;
+	
+	@Value("${incorrect.request.json}")
+	private String badRequest;
+	
+	@Value("${network.time.out}")
+	private String nwTimeout;
+	
+	@Value("${get.topic.failure}")
+	private String topicsfailure;
+	
+	@Value("${not.permitted.access.1}")
+	private String notPermitted1;
+	
+	@Value("${not.permitted.access.2}")
+	private String notPermitted2;
+	
+	@Value("${get.topic.details.failure}")
+	private String topicDetailsFail;
+	
+	@Value("${create.topic.failure}")
+	private String createTopicFail;
+	
+	@Value("${delete.topic.failure}")
+	private String deleteTopicFail;
+	
+	@Value("${incorrect.json}")
+	private String incorrectJson;
+	
+	@Value("${consume.msg.error}")
+	private String consumeMsgError;
+	
+	@Value("${publish.msg.error}")
+	private String publishMsgError;
+	
+	
+	@Value("${publish.msg.count}")
+	private String publishMsgCount;
+	
+	
+	@Value("${authentication.failure}")
+	private String authFailure;
+	@Value("${msg_size_exceeds}")
+	private String msgSizeExceeds;
+	
+	
+	@Value("${topic.not.exist}")
+	private String topicNotExist;
+	
+	public String getMsgSizeExceeds() {
+		return msgSizeExceeds;
+	}
+
+	public void setMsgSizeExceeds(String msgSizeExceeds) {
+		this.msgSizeExceeds = msgSizeExceeds;
+	}
+
+	public String getNotFound() {
+		return notFound;
+	}
+
+	public void setNotFound(String notFound) {
+		this.notFound = notFound;
+	}
+
+	public String getServerUnav() {
+		return serverUnav;
+	}
+
+	public void setServerUnav(String serverUnav) {
+		this.serverUnav = serverUnav;
+	}
+
+	public String getMethodNotAllowed() {
+		return methodNotAllowed;
+	}
+
+	public void setMethodNotAllowed(String methodNotAllowed) {
+		this.methodNotAllowed = methodNotAllowed;
+	}
+
+	public String getBadRequest() {
+		return badRequest;
+	}
+
+	public void setBadRequest(String badRequest) {
+		this.badRequest = badRequest;
+	}
+
+	public String getNwTimeout() {
+		return nwTimeout;
+	}
+
+	public void setNwTimeout(String nwTimeout) {
+		this.nwTimeout = nwTimeout;
+	}
+
+	public String getNotPermitted1() {
+		return notPermitted1;
+	}
+
+	public void setNotPermitted1(String notPermitted1) {
+		this.notPermitted1 = notPermitted1;
+	}
+
+	public String getNotPermitted2() {
+		return notPermitted2;
+	}
+
+	public void setNotPermitted2(String notPermitted2) {
+		this.notPermitted2 = notPermitted2;
+	}
+
+	public String getTopicsfailure() {
+		return topicsfailure;
+	}
+
+	public void setTopicsfailure(String topicsfailure) {
+		this.topicsfailure = topicsfailure;
+	}
+
+	public String getTopicDetailsFail() {
+		return topicDetailsFail;
+	}
+
+	public void setTopicDetailsFail(String topicDetailsFail) {
+		this.topicDetailsFail = topicDetailsFail;
+	}
+
+	public String getCreateTopicFail() {
+		return createTopicFail;
+	}
+
+	public void setCreateTopicFail(String createTopicFail) {
+		this.createTopicFail = createTopicFail;
+	}
+
+	public String getIncorrectJson() {
+		return incorrectJson;
+	}
+
+	public void setIncorrectJson(String incorrectJson) {
+		this.incorrectJson = incorrectJson;
+	}
+
+	public String getDeleteTopicFail() {
+		return deleteTopicFail;
+	}
+
+	public void setDeleteTopicFail(String deleteTopicFail) {
+		this.deleteTopicFail = deleteTopicFail;
+	}
+
+	public String getConsumeMsgError() {
+		return consumeMsgError;
+	}
+
+	public void setConsumeMsgError(String consumeMsgError) {
+		this.consumeMsgError = consumeMsgError;
+	}
+
+	public String getPublishMsgError() {
+		return publishMsgError;
+	}
+
+	public void setPublishMsgError(String publishMsgError) {
+		this.publishMsgError = publishMsgError;
+	}
+
+	public String getPublishMsgCount() {
+		return publishMsgCount;
+	}
+
+	public String getAuthFailure() {
+		return authFailure;
+	}
+
+	public void setAuthFailure(String authFailure) {
+		this.authFailure = authFailure;
+	}
+
+	public void setPublishMsgCount(String publishMsgCount) {
+		this.publishMsgCount = publishMsgCount;
+	}
+
+	public String getTopicNotExist() {
+		return topicNotExist;
+	}
+
+	public void setTopicNotExist(String topicNotExist) {
+		this.topicNotExist = topicNotExist;
+	}
+	
+	
+	
+	
+}
diff --git a/src/main/java/com/att/nsa/cambria/exception/DMaaPResponseCode.java b/src/main/java/com/att/nsa/cambria/exception/DMaaPResponseCode.java
new file mode 100644
index 0000000..4011112
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/exception/DMaaPResponseCode.java
@@ -0,0 +1,93 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.exception;
+
+/**
+ * Define the Error Response Codes for MR
+ * using this enumeration
+ * @author author
+ *
+ */
+public enum DMaaPResponseCode {
+	
+	  
+	  /**
+	   * GENERIC
+	   */
+	  RESOURCE_NOT_FOUND(3001),
+	  SERVER_UNAVAILABLE(3002),
+	  METHOD_NOT_ALLOWED(3003),
+	  GENERIC_INTERNAL_ERROR(1004),
+	  /**
+	   * AAF
+	   */
+	  INVALID_CREDENTIALS(4001),
+	  ACCESS_NOT_PERMITTED(4002),
+	  UNABLE_TO_AUTHORIZE(4003),
+	  /**
+	   * PUBLISH AND SUBSCRIBE
+	   */
+	  MSG_SIZE_EXCEEDS_BATCH_LIMIT(5001),
+	  UNABLE_TO_PUBLISH(5002),
+	  INCORRECT_BATCHING_FORMAT(5003),
+	  MSG_SIZE_EXCEEDS_MSG_LIMIT(5004),
+	  INCORRECT_JSON(5005),
+	  CONN_TIMEOUT(5006),
+	  PARTIAL_PUBLISH_MSGS(5007),
+	  CONSUME_MSG_ERROR(5008),
+	  PUBLISH_MSG_ERROR(5009), 
+	  RETRIEVE_TRANSACTIONS(5010),
+	  RETRIEVE_TRANSACTIONS_DETAILS(5011),
+	  TOO_MANY_REQUESTS(5012),
+	  
+	  RATE_LIMIT_EXCEED(301),
+	 
+	  /**
+	   * TOPICS
+	   */
+	GET_TOPICS_FAIL(6001),
+	GET_TOPICS_DETAILS_FAIL(6002),
+	CREATE_TOPIC_FAIL(6003),
+	DELETE_TOPIC_FAIL(6004),
+	GET_PUBLISHERS_BY_TOPIC(6005),
+	GET_CONSUMERS_BY_TOPIC(6006),
+	PERMIT_PUBLISHER_FOR_TOPIC(6007),
+	REVOKE_PUBLISHER_FOR_TOPIC(6008),
+	PERMIT_CONSUMER_FOR_TOPIC(6009),
+	REVOKE_CONSUMER_FOR_TOPIC(6010),
+	GET_CONSUMER_CACHE(6011),
+	DROP_CONSUMER_CACHE(6012),
+	GET_METRICS_ERROR(6013),
+	GET_BLACKLIST(6014),
+	ADD_BLACKLIST(6015),
+	REMOVE_BLACKLIST(6016),
+	TOPIC_NOT_IN_AAF(6017);
+	private int responseCode;
+	
+	public int getResponseCode() {
+		return responseCode;
+	}
+	private DMaaPResponseCode (final int code) {
+		responseCode = code;
+	}
+
+}
diff --git a/src/main/java/com/att/nsa/cambria/exception/DMaaPWebExceptionMapper.java b/src/main/java/com/att/nsa/cambria/exception/DMaaPWebExceptionMapper.java
new file mode 100644
index 0000000..59ede30
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/exception/DMaaPWebExceptionMapper.java
@@ -0,0 +1,137 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.exception;
+
+import javax.inject.Singleton;
+import javax.ws.rs.BadRequestException;
+import javax.ws.rs.InternalServerErrorException;
+import javax.ws.rs.NotAllowedException;
+import javax.ws.rs.NotAuthorizedException;
+import javax.ws.rs.NotFoundException;
+import javax.ws.rs.ServiceUnavailableException;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.ext.ExceptionMapper;
+import javax.ws.rs.ext.Provider;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.apache.http.HttpStatus;
+//import org.apache.log-4j.Logger;
+import org.springframework.beans.factory.annotation.Autowired;
+
+/**
+ * Exception Mapper class to handle
+ * Jersey Exceptions
+ * @author author
+ *
+ */
+@Provider
+@Singleton
+public class DMaaPWebExceptionMapper implements ExceptionMapper<WebApplicationException>{
+	
+	//private static final Logger LOGGER = Logger
+		//	.getLogger(DMaaPWebExceptionMapper.class);
+	private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(DMaaPWebExceptionMapper.class);
+	private ErrorResponse errRes;
+	
+	@Autowired
+	private DMaaPErrorMessages msgs;
+	
+	public DMaaPWebExceptionMapper() {
+		super();
+		LOGGER.info("WebException Mapper Created..");
+	}
+
+	@Override
+	public Response toResponse(WebApplicationException ex) {
+		
+		LOGGER.info("Reached WebException Mapper");
+		
+		/**
+		 * Resource Not Found
+		 */
+		if(ex instanceof NotFoundException)
+		{
+			errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND,DMaaPResponseCode.RESOURCE_NOT_FOUND.getResponseCode(),msgs.getNotFound());
+			
+			LOGGER.info(errRes.toString());
+			
+			return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON)
+		            .build();
+			
+		}
+		
+		if(ex instanceof InternalServerErrorException)
+		{
+			errRes = new ErrorResponse(HttpStatus.SC_INTERNAL_SERVER_ERROR,DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(),msgs.getServerUnav());
+			
+			LOGGER.info(errRes.toString());
+			return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON)
+		            .build();
+			
+		}
+		
+		if(ex instanceof NotAuthorizedException)
+		{
+			errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),msgs.getAuthFailure());
+			
+			LOGGER.info(errRes.toString());
+			return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON)
+		            .build();
+		}
+		
+		if(ex instanceof BadRequestException)
+		{
+			errRes = new ErrorResponse(HttpStatus.SC_BAD_REQUEST,DMaaPResponseCode.INCORRECT_JSON.getResponseCode(),msgs.getBadRequest());
+			
+			LOGGER.info(errRes.toString());
+			return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON)
+		            .build();
+		}
+		if(ex instanceof NotAllowedException)
+		{
+			errRes = new ErrorResponse(HttpStatus.SC_METHOD_NOT_ALLOWED,DMaaPResponseCode.METHOD_NOT_ALLOWED.getResponseCode(),msgs.getMethodNotAllowed());
+			
+			LOGGER.info(errRes.toString());
+			return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON)
+		            .build();
+		}
+		
+		if(ex instanceof ServiceUnavailableException)
+		{
+			errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE,DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(),msgs.getServerUnav());
+			
+			LOGGER.info(errRes.toString());
+			return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON)
+		            .build();
+		}
+		
+		
+		return Response.serverError().build();
+	}
+
+	
+
+	
+}
diff --git a/src/main/java/com/att/nsa/cambria/exception/ErrorResponse.java b/src/main/java/com/att/nsa/cambria/exception/ErrorResponse.java
new file mode 100644
index 0000000..d62d5a8
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/exception/ErrorResponse.java
@@ -0,0 +1,135 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.exception;
+import org.json.JSONObject;
+/**
+ * Represents the Error Response Object 
+ * that is rendered as a JSON object when
+ * an exception or error occurs on MR Rest Service.
+ * @author author
+ *
+ */
+//@XmlRootElement
+public class ErrorResponse {
+	
+	private int httpStatusCode;
+	private int mrErrorCode;
+    private String errorMessage;
+    private String helpURL;
+    private String statusTs;
+    private String topic;
+    private String publisherId;
+    private String publisherIp;
+    private String subscriberId;
+    private String subscriberIp;
+	
+
+	public ErrorResponse(int httpStatusCode, int mrErrorCode,
+			String errorMessage, String helpURL, String statusTs, String topic,
+			String publisherId, String publisherIp, String subscriberId,
+			String subscriberIp) {
+		super();
+		this.httpStatusCode = httpStatusCode;
+		this.mrErrorCode = mrErrorCode;
+		this.errorMessage = errorMessage;
+		this.helpURL = "https://wiki.web.att.com/display/DMAAP/DMaaP+Home";
+		this.statusTs = statusTs;
+		this.topic = topic;
+		this.publisherId = publisherId;
+		this.publisherIp = publisherIp;
+		this.subscriberId = subscriberId;
+		this.subscriberIp = subscriberIp;
+	}
+
+	public ErrorResponse(int httpStatusCode, int mrErrorCode,
+			String errorMessage) {
+		super();
+		this.httpStatusCode = httpStatusCode;
+		this.mrErrorCode = mrErrorCode;
+		this.errorMessage = errorMessage;
+		this.helpURL = "https://wiki.web.att.com/display/DMAAP/DMaaP+Home";
+		
+	}
+	
+	public int getHttpStatusCode() {
+		return httpStatusCode;
+	}
+
+	public void setHttpStatusCode(int httpStatusCode) {
+		this.httpStatusCode = httpStatusCode;
+	}
+	
+	public int getMrErrorCode() {
+		return mrErrorCode;
+	}
+
+
+	public void setMrErrorCode(int mrErrorCode) {
+		this.mrErrorCode = mrErrorCode;
+	}
+
+	
+	public String getErrorMessage() {
+		return errorMessage;
+	}
+
+	public void setErrorMessage(String errorMessage) {
+		this.errorMessage = errorMessage;
+	}
+
+	public String getHelpURL() {
+		return helpURL;
+	}
+
+	public void setHelpURL(String helpURL) {
+		this.helpURL = helpURL;
+	}
+
+	@Override
+	public String toString() {
+		return "ErrorResponse {\"httpStatusCode\":\"" + httpStatusCode
+				+ "\", \"mrErrorCode\":\"" + mrErrorCode + "\", \"errorMessage\":\""
+				+ errorMessage + "\", \"helpURL\":\"" + helpURL + "\", \"statusTs\":\""+statusTs+"\""
+				+ ", \"topicId\":\""+topic+"\", \"publisherId\":\""+publisherId+"\""
+				+ ", \"publisherIp\":\""+publisherIp+"\", \"subscriberId\":\""+subscriberId+"\""
+				+ ", \"subscriberIp\":\""+subscriberIp+"\"}";
+	}
+	
+	public String getErrMapperStr1() {
+		return "ErrorResponse [httpStatusCode=" + httpStatusCode + ", mrErrorCode=" + mrErrorCode + ", errorMessage="
+				+ errorMessage + ", helpURL=" + helpURL + "]";
+	}
+
+	
+	
+	public JSONObject getErrMapperStr() {
+		JSONObject o = new JSONObject();
+		o.put("status", getHttpStatusCode());
+		o.put("mrstatus", getMrErrorCode());
+		o.put("message", getErrorMessage());
+		o.put("helpURL", getHelpURL());
+		return o;
+	}
+	
+    
+	
+}
diff --git a/src/main/java/com/att/nsa/cambria/listener/CambriaServletContextListener.java b/src/main/java/com/att/nsa/cambria/listener/CambriaServletContextListener.java
new file mode 100644
index 0000000..9fbfee8
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/listener/CambriaServletContextListener.java
@@ -0,0 +1,64 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.listener;
+
+import javax.servlet.ServletContextEvent;
+import javax.servlet.ServletContextListener;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+/**
+ * This is the Cambria Servlet Context Listner which helpes while loading the app which provide the endpoints 
+ * @author author
+ *
+ */
+public class CambriaServletContextListener implements ServletContextListener {
+	
+	DME2EndPointLoader loader = DME2EndPointLoader.getInstance();
+//	private static Logger log = Logger.getLogger(CambriaServletContextListener.class);
+	private static final EELFLogger log = EELFManager.getInstance().getLogger(CambriaServletContextListener.class);
+	
+
+	@Override
+	
+	/**
+	 * contextDestroyed() loads unpublished end points
+	 * @param arg0
+	 */
+	public void contextDestroyed(ServletContextEvent arg0) {
+		log.info("CambriaServletContextListener contextDestroyed");
+		
+		loader.unPublishEndPoints();
+	}
+
+	@Override
+	/**
+	 * contextInitialized() loads published end points
+	 * @param arg0
+	 */
+	public void contextInitialized(ServletContextEvent arg0) {
+		log.info("CambriaServletContextListener contextInitialized");
+		loader.publishEndPoints();
+	}
+
+}
diff --git a/src/main/java/com/att/nsa/cambria/listener/DME2EndPointLoader.java b/src/main/java/com/att/nsa/cambria/listener/DME2EndPointLoader.java
new file mode 100644
index 0000000..20871e5
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/listener/DME2EndPointLoader.java
@@ -0,0 +1,123 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.listener;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Properties;
+
+import com.att.aft.dme2.api.DME2Exception;
+import com.att.aft.dme2.api.DME2Manager;
+import com.att.aft.dme2.manager.registry.DME2EndpointRegistry;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.att.nsa.cambria.service.impl.EventsServiceImpl;
+
+/**
+ * 
+ * @author author
+ *
+ */
+public class DME2EndPointLoader {
+
+	private String latitude;
+	private String longitude;
+	private String version;
+	private String serviceName;
+	private String env;
+	private String routeOffer;
+	private String hostName;
+	private String port;
+	private String contextPath;
+	private String protocol;
+	private String serviceURL;
+	private static DME2EndPointLoader loader = new DME2EndPointLoader();
+//	private static final Logger LOG = LoggerFactory.getLogger(EventsServiceImpl.class);
+	private static final EELFLogger LOG = EELFManager.getInstance().getLogger(EventsServiceImpl.class);
+	private DME2EndPointLoader() {
+	}
+
+	public static DME2EndPointLoader getInstance() {
+		return loader;
+	}
+
+	/**
+	 * publishing endpoints
+	 */
+	public void publishEndPoints() {
+
+		try {
+			InputStream input = this.getClass().getResourceAsStream("/endpoint.properties");
+			Properties props = new Properties();
+			props.load(input);
+
+			latitude = props.getProperty("Latitude");
+			longitude = props.getProperty("Longitude");
+			version = props.getProperty("Version");
+			serviceName = props.getProperty("ServiceName");
+			env = props.getProperty("Environment");
+			routeOffer = props.getProperty("RouteOffer");
+			hostName = props.getProperty("HostName");
+			port = props.getProperty("Port");
+			contextPath = props.getProperty("ContextPath");
+			protocol = props.getProperty("Protocol");
+
+			System.setProperty("AFT_LATITUDE", latitude);
+			System.setProperty("AFT_LONGITUDE", longitude);
+			System.setProperty("AFT_ENVIRONMENT", "AFTUAT");
+
+			serviceURL = "service=" + serviceName + "/" + "version=" + version + "/" + "envContext=" + env + "/"
+					+ "routeOffer=" + routeOffer;
+
+			DME2Manager manager = new DME2Manager("testEndpointPublish", props);
+			manager.setClientCredentials("sh301n", "");
+			DME2EndpointRegistry svcRegistry = manager.getEndpointRegistry();
+			// Publish API takes service name, context path, hostname, port and
+			// protocol as args
+			svcRegistry.publish(serviceURL, contextPath, hostName, Integer.parseInt(port), protocol);
+
+		} catch (IOException | DME2Exception e) {
+			LOG.error("Failed due to :" + e);
+		}
+
+	}
+/**
+ * unpublishing endpoints
+ */
+	public void unPublishEndPoints() {
+
+		DME2Manager manager;
+		try {
+			System.setProperty("AFT_LATITUDE", latitude);
+			System.setProperty("AFT_LONGITUDE", longitude);
+			System.setProperty("AFT_ENVIRONMENT", "AFTUAT");
+
+			manager = DME2Manager.getDefaultInstance();
+			DME2EndpointRegistry svcRegistry = manager.getEndpointRegistry();
+			svcRegistry.unpublish(serviceURL, hostName, Integer.parseInt(port));
+		} catch (DME2Exception e) {
+			LOG.error("Failed due to DME2Exception" + e);
+		}
+
+	}
+
+}
diff --git a/src/main/java/com/att/nsa/cambria/metabroker/Broker.java b/src/main/java/com/att/nsa/cambria/metabroker/Broker.java
new file mode 100644
index 0000000..8c1fff5
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/metabroker/Broker.java
@@ -0,0 +1,92 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.metabroker;
+
+import java.util.List;
+
+import com.att.nsa.cambria.CambriaApiException;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+
+/**
+ * A broker interface to manage metadata around topics, etc.
+ * 
+ * @author author
+ *
+ */
+public interface Broker {
+	/**
+	 * 
+	 * @author author
+	 *
+	 */
+	public class TopicExistsException extends Exception {
+		/**
+		 * 
+		 * @param topicName
+		 */
+		public TopicExistsException(String topicName) {
+			super("Topic " + topicName + " exists.");
+		}
+
+		private static final long serialVersionUID = 1L;
+	}
+
+	/**
+	 * Get all topics in the underlying broker.
+	 * 
+	 * @return
+	 * @throws ConfigDbException
+	 */
+	List<Topic> getAllTopics() throws ConfigDbException;
+
+	/**
+	 * Get a specific topic from the underlying broker.
+	 * 
+	 * @param topic
+	 * @return a topic, or null
+	 */
+	Topic getTopic(String topic) throws ConfigDbException;
+
+	/**
+	 * create a  topic
+	 * 
+	 * @param topic
+	 * @param description
+	 * @param ownerApiKey
+	 * @param partitions
+	 * @param replicas
+	 * @param transactionEnabled
+	 * @return
+	 * @throws TopicExistsException
+	 * @throws CambriaApiException
+	 */
+	Topic createTopic(String topic, String description, String ownerApiKey, int partitions, int replicas,
+			boolean transactionEnabled) throws TopicExistsException, CambriaApiException;
+
+	/**
+	 * Delete a topic by name
+	 * 
+	 * @param topic
+	 */
+	void deleteTopic(String topic) throws AccessDeniedException, CambriaApiException, TopicExistsException;
+}
diff --git a/src/main/java/com/att/nsa/cambria/metabroker/Topic.java b/src/main/java/com/att/nsa/cambria/metabroker/Topic.java
new file mode 100644
index 0000000..b53736d
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/metabroker/Topic.java
@@ -0,0 +1,133 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.metabroker;
+
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.security.NsaAcl;
+import com.att.nsa.security.NsaApiKey;
+import com.att.nsa.security.ReadWriteSecuredResource;
+/**
+ * This is the interface for topic and all the topic related operations
+ * get topic name, owner, description, transactionEnabled etc.
+ * @author author
+ *
+ */
+public interface Topic extends ReadWriteSecuredResource
+{	
+	/**
+	 * User defined exception for access denied while access the topic for Publisher and consumer
+	 * @author author
+	 *
+	 *//*
+	public class AccessDeniedException extends Exception
+	{	
+		*//**
+		 * AccessDenied Description
+		 *//*
+		public AccessDeniedException () { super ( "Access denied." ); } 
+		*//**
+		 * AccessDenied Exception for the user while authenticating the user request
+		 * @param user
+		 *//*
+		public AccessDeniedException ( String user ) { super ( "Access denied for " + user ); } 
+		private static final long serialVersionUID = 1L;
+	}*/
+
+	/**
+	 * Get this topic's name
+	 * @return
+	 */
+	String getName ();
+
+	/**
+	 * Get the API key of the owner of this topic.
+	 * @return
+	 */
+	String getOwner ();
+
+	/**
+	 * Get a description of the topic, as set by the owner at creation time.
+	 * @return
+	 */
+	String getDescription ();
+	
+	/**
+	 * If the topic is transaction enabled
+	 * @return boolean true/false
+	 */
+	boolean isTransactionEnabled();
+	
+	/**
+	 * Get the ACL for reading on this topic. Can be null.
+	 * @return
+	 */
+	NsaAcl getReaderAcl ();
+
+	/**
+	 * Get the ACL for writing on this topic.  Can be null.
+	 * @return
+	 */
+	NsaAcl getWriterAcl ();
+
+	/**
+	 * Check if this user can read the topic. Throw otherwise. Note that
+	 * user may be null.
+	 * @param user
+	 */
+	void checkUserRead ( NsaApiKey user ) throws AccessDeniedException;
+
+	/**
+	 * Check if this user can write to the topic. Throw otherwise. Note
+	 * that user may be null.
+	 * @param user
+	 */
+	void checkUserWrite ( NsaApiKey user ) throws AccessDeniedException;
+
+	/**
+	 * allow the given user to publish
+	 * @param publisherId
+	 * @param asUser
+	 */
+	void permitWritesFromUser ( String publisherId, NsaApiKey asUser ) throws AccessDeniedException, ConfigDbException;
+
+	/**
+	 * deny the given user from publishing
+	 * @param publisherId
+	 * @param asUser
+	 */
+	void denyWritesFromUser ( String publisherId, NsaApiKey asUser ) throws AccessDeniedException, ConfigDbException;
+
+	/**
+	 * allow the given user to read the topic
+	 * @param consumerId
+	 * @param asUser
+	 */
+	void permitReadsByUser ( String consumerId, NsaApiKey asUser ) throws AccessDeniedException, ConfigDbException;
+
+	/**
+	 * deny the given user from reading the topic
+	 * @param consumerId
+	 * @param asUser
+	 * @throws ConfigDbException 
+	 */
+	void denyReadsByUser ( String consumerId, NsaApiKey asUser ) throws AccessDeniedException, ConfigDbException;
+}
diff --git a/src/main/java/com/att/nsa/cambria/metrics/publisher/CambriaBatchingPublisher.java b/src/main/java/com/att/nsa/cambria/metrics/publisher/CambriaBatchingPublisher.java
new file mode 100644
index 0000000..7e01fac
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/metrics/publisher/CambriaBatchingPublisher.java
@@ -0,0 +1,52 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.metrics.publisher;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * A Cambria batching publisher is a publisher with additional functionality
+ * for managing delayed sends.
+ * 
+ * @author author
+ *
+ */
+public interface CambriaBatchingPublisher extends CambriaPublisher
+{
+	/**
+	 * Get the number of messages that have not yet been sent.
+	 * @return the number of pending messages
+	 */
+	int getPendingMessageCount ();
+
+	/**
+	 * Close this publisher, sending any remaining messages.
+	 * @param timeout an amount of time to wait for unsent messages to be sent
+	 * @param timeoutUnits the time unit for the timeout arg
+	 * @return a list of any unsent messages after the timeout
+	 * @throws IOException
+	 * @throws InterruptedException 
+	 */
+	List<message> close ( long timeout, TimeUnit timeoutUnits ) throws IOException, InterruptedException;
+}
diff --git a/src/main/java/com/att/nsa/cambria/metrics/publisher/CambriaClient.java b/src/main/java/com/att/nsa/cambria/metrics/publisher/CambriaClient.java
new file mode 100644
index 0000000..e80235e
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/metrics/publisher/CambriaClient.java
@@ -0,0 +1,89 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.metrics.publisher;
+
+//import org.slf4j.Logger;
+
+//
+import com.att.eelf.configuration.EELFLogger;
+//import com.att.eelf.configuration.EELFManager;
+
+/**
+ * 
+ * @author author
+ *
+ */
+public interface CambriaClient {
+	/**
+	 * An exception at the Cambria layer. This is used when the HTTP transport
+	 * layer returns a success code but the transaction is not completed as
+	 * expected.
+	 */
+	public class CambriaApiException extends Exception {
+		/**
+		 * 
+		 * @param msg
+		 */
+		public CambriaApiException(String msg) {
+			super(msg);
+		}
+
+		/**
+		 * 
+		 * @param msg
+		 * @param t
+		 */
+		public CambriaApiException(String msg, Throwable t) {
+			super(msg, t);
+		}
+
+		private static final long serialVersionUID = 1L;
+	}
+
+	/**
+	 * Optionally set the Logger to use
+	 * 
+	 * @param log
+	 */
+	void logTo(EELFLogger  log);
+
+	/**
+	 * Set the API credentials for this client connection. Subsequent calls will
+	 *  include authentication headers.who i
+	 * 
+	 * @param apiKey
+	 * @param apiSecret
+	 */
+	void setApiCredentials(String apiKey, String apiSecret);
+
+	/**
+	 * Remove API credentials, if any, on this connection. Subsequent calls will
+	 * not include authentication headers.
+	 */
+	void clearApiCredentials();
+
+	/**
+	 * Close this connection. Some client interfaces have additional close
+	 * capability.
+	 */
+	void close();
+}
diff --git a/src/main/java/com/att/nsa/cambria/metrics/publisher/CambriaConsumer.java b/src/main/java/com/att/nsa/cambria/metrics/publisher/CambriaConsumer.java
new file mode 100644
index 0000000..f7c5f89
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/metrics/publisher/CambriaConsumer.java
@@ -0,0 +1,52 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.metrics.publisher;
+
+import java.io.IOException;
+
+/**
+ * This interface will provide fetch mechanism for consumer
+ * @author author
+ *
+ */
+public interface CambriaConsumer extends CambriaClient
+{
+	/**
+	 * Fetch a set of messages. The consumer's timeout and message limit are used if set in the constructor call. 
+
+	 * @return a set of messages
+	 * @throws IOException
+	 */
+	Iterable<String> fetch () throws IOException;
+
+	/**
+	 * Fetch a set of messages with an explicit timeout and limit for this call. These values
+	 * override any set in the constructor call.
+	 * 
+	 * @param timeoutMs	The amount of time in milliseconds that the server should keep the connection
+	 * open while waiting for message traffic. Use -1 for default timeout (controlled on the server-side).
+	 * @param limit A limit on the number of messages returned in a single call. Use -1 for no limit.
+	 * @return a set messages
+	 * @throws IOException if there's a problem connecting to the server
+	 */
+	Iterable<String> fetch ( int timeoutMs, int limit ) throws IOException;
+}
diff --git a/src/main/java/com/att/nsa/cambria/metrics/publisher/CambriaPublisher.java b/src/main/java/com/att/nsa/cambria/metrics/publisher/CambriaPublisher.java
new file mode 100644
index 0000000..1873f7f
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/metrics/publisher/CambriaPublisher.java
@@ -0,0 +1,101 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.metrics.publisher;
+
+import java.io.IOException;
+import java.util.Collection;
+
+/**
+ * A Cambria publishing interface.
+ * 
+ * @author author
+ *
+ */
+public interface CambriaPublisher extends CambriaClient {
+	/**
+	 * A simple message container
+	 */
+	public static class message {
+		/**
+		 * 
+		 * @param partition
+		 * @param msg
+		 */
+		public message(String partition, String msg) {
+			fPartition = partition == null ? "" : partition;
+			fMsg = msg;
+			if (fMsg == null) {
+				throw new IllegalArgumentException("Can't send a null message.");
+			}
+		}
+
+		/**
+		 * 
+		 * @param msg
+		 */
+		public message(message msg) {
+			this(msg.fPartition, msg.fMsg);
+		}
+
+		/**
+		 *  declaring partition string
+		 */
+		public final String fPartition;
+		/**
+		 * declaring fMsg String
+		 */
+		public final String fMsg;
+	}
+
+	/**
+	 * Send the given message using the given partition.
+	 * 
+	 * @param partition
+	 * @param msg
+	 * @return the number of pending messages
+	 * @throws IOException
+	 */
+	int send(String partition, String msg) throws IOException;
+
+	/**
+	 * Send the given message using its partition.
+	 * 
+	 * @param msg
+	 * @return the number of pending messages
+	 * @throws IOException
+	 */
+	int send(message msg) throws IOException;
+
+	/**
+	 * Send the given messages using their partitions.
+	 * 
+	 * @param msgs
+	 * @return the number of pending messages
+	 * @throws IOException
+	 */
+	int send(Collection<message> msgs) throws IOException;
+
+	/**
+	 * Close this publisher. It's an error to call send() after close()
+	 */
+	void close();
+}
diff --git a/src/main/java/com/att/nsa/cambria/metrics/publisher/CambriaPublisherUtility.java b/src/main/java/com/att/nsa/cambria/metrics/publisher/CambriaPublisherUtility.java
new file mode 100644
index 0000000..8e54ee5
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/metrics/publisher/CambriaPublisherUtility.java
@@ -0,0 +1,146 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.metrics.publisher;
+
+import java.io.UnsupportedEncodingException;
+import java.net.URLEncoder;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+
+import org.apache.http.HttpHost;
+/**
+ * 
+ * @author author
+ *
+ */
+public class CambriaPublisherUtility
+{
+	public static final String kBasePath = "/events/";
+	public static final int kStdCambriaServicePort = 3904;
+/**
+ * 
+ * Translates a string into <code>application/x-www-form-urlencoded</code>
+ * format using a specific encoding scheme.
+ * @param s
+ * @return
+ * 
+ */
+	public static String escape ( String s )
+	{
+		try
+		{
+			return URLEncoder.encode ( s, "UTF-8");
+		}
+		catch ( UnsupportedEncodingException e )
+		{
+			throw new RuntimeException ( e );
+		}
+	}
+/**
+ * 
+ * building url
+ * @param rawTopic
+ * @return
+ */
+	public static String makeUrl ( String rawTopic )
+	{
+		final String cleanTopic = escape ( rawTopic );
+		
+		final StringBuffer url = new StringBuffer().
+			append ( CambriaPublisherUtility.kBasePath ).
+			append ( cleanTopic );
+		return url.toString ();
+	}
+/**
+ * 
+ * building consumerUrl
+ * @param topic
+ * @param rawConsumerGroup
+ * @param rawConsumerId
+ * @return
+ */
+	public static String makeConsumerUrl ( String topic, String rawConsumerGroup, String rawConsumerId )
+	{
+		final String cleanConsumerGroup = escape ( rawConsumerGroup );
+		final String cleanConsumerId = escape ( rawConsumerId );
+		return CambriaPublisherUtility.kBasePath + topic + "/" + cleanConsumerGroup + "/" + cleanConsumerId;
+	}
+
+	/**
+	 * Create a list of HttpHosts from an input list of strings. Input strings have
+	 * host[:port] as format. If the port section is not provided, the default port is used.
+	 * 
+	 * @param hosts
+	 * @return a list of hosts
+	 */
+	public static List<HttpHost> createHostsList(Collection<String> hosts)
+	{
+		final ArrayList<HttpHost> convertedHosts = new ArrayList<HttpHost> ();
+		for ( String host : hosts )
+		{
+			if ( host.length () == 0 ) continue;
+			convertedHosts.add ( hostForString ( host ) );
+		}
+		return convertedHosts;
+	}
+
+	/**
+	 * Return an HttpHost from an input string. Input string has
+	 * host[:port] as format. If the port section is not provided, the default port is used.
+	 * 
+	 * @param hosts
+	 * @return a list of hosts
+	 * if host.length<1 throws IllegalArgumentException
+	 * 
+	 */
+	public static HttpHost hostForString ( String host )
+	{
+		if ( host.length() < 1 ) throw new IllegalArgumentException ( "An empty host entry is invalid." );
+		
+		String hostPart = host;
+		int port = kStdCambriaServicePort;
+
+		final int colon = host.indexOf ( ':' );
+		if ( colon == 0 ) throw new IllegalArgumentException ( "Host entry '" + host + "' is invalid." );
+		if ( colon > 0 )
+		{
+			hostPart = host.substring ( 0, colon ).trim();
+
+			final String portPart = host.substring ( colon + 1 ).trim();
+			if ( portPart.length () > 0 )
+			{
+				try
+				{
+					port = Integer.parseInt ( portPart );
+				}
+				catch ( NumberFormatException x )
+				{
+					throw new IllegalArgumentException ( "Host entry '" + host + "' is invalid.", x );
+				}
+			}
+			// else: use default port on "foo:"
+		}
+
+		return new HttpHost ( hostPart, port );
+	}
+}
diff --git a/src/main/java/com/att/nsa/cambria/metrics/publisher/DMaaPCambriaClientFactory.java b/src/main/java/com/att/nsa/cambria/metrics/publisher/DMaaPCambriaClientFactory.java
new file mode 100644
index 0000000..98e16f7
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/metrics/publisher/DMaaPCambriaClientFactory.java
@@ -0,0 +1,423 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.metrics.publisher;
+
+import java.net.MalformedURLException;
+import java.util.Collection;
+import java.util.TreeSet;
+import java.util.UUID;
+
+import com.att.nsa.cambria.metrics.publisher.impl.DMaaPCambriaConsumerImpl;
+import com.att.nsa.cambria.metrics.publisher.impl.DMaaPCambriaSimplerBatchPublisher;
+
+/**
+ * A factory for Cambria clients.<br/>
+ * <br/>
+ * Use caution selecting a consumer creator factory. If the call doesn't accept
+ * a consumer group name, then it creates a consumer that is not restartable.
+ * That is, if you stop your process and start it again, your client will NOT
+ * receive any missed messages on the topic. If you need to ensure receipt of
+ * missed messages, then you must use a consumer that's created with a group
+ * name and ID. (If you create multiple consumer processes using the same group,
+ * load is split across them. Be sure to use a different ID for each instance.)<br/>
+ * <br/>
+ * Publishers
+ * 
+ * @author author
+ */
+public class DMaaPCambriaClientFactory {
+	/**
+	 * Create a consumer instance with the default timeout and no limit on
+	 * messages returned. This consumer operates as an independent consumer
+	 * (i.e., not in a group) and is NOT re-startable across sessions.
+	 * 
+	 * @param hostList
+	 *            A comma separated list of hosts to use to connect to Cambria.
+	 *            You can include port numbers (3904 is the default). 
+	 * 
+	 * @param topic
+	 *            The topic to consume
+	 * 
+	 * @return a consumer
+	 */
+	public static CambriaConsumer createConsumer(String hostList, String topic) {
+		return createConsumer(DMaaPCambriaConsumerImpl.stringToList(hostList),
+				topic);
+	}
+
+	/**
+	 * Create a consumer instance with the default timeout and no limit on
+	 * messages returned. This consumer operates as an independent consumer
+	 * (i.e., not in a group) and is NOT re-startable across sessions.
+	 * 
+	 * @param hostSet
+	 *            The host used in the URL to Cambria. Entries can be
+	 *            "host:port".
+	 * @param topic
+	 *            The topic to consume
+	 * 
+	 * @return a consumer
+	 */
+	public static CambriaConsumer createConsumer(Collection<String> hostSet,
+			String topic) {
+		return createConsumer(hostSet, topic, null);
+	}
+
+	/**
+	 * Create a consumer instance with server-side filtering, the default
+	 * timeout, and no limit on messages returned. This consumer operates as an
+	 * independent consumer (i.e., not in a group) and is NOT re-startable
+	 * across sessions.
+	 * 
+	 * @param hostSet
+	 *            The host used in the URL to Cambria. Entries can be
+	 *            "host:port".
+	 * @param topic
+	 *            The topic to consume
+	 * @param filter
+	 *            a filter to use on the server side
+	 * 
+	 * @return a consumer
+	 */
+	public static CambriaConsumer createConsumer(Collection<String> hostSet,
+			String topic, String filter) {
+		return createConsumer(hostSet, topic, UUID.randomUUID().toString(),
+				"0", -1, -1, filter, null, null);
+	}
+
+	/**
+	 * Create a consumer instance with the default timeout, and no limit on
+	 * messages returned. This consumer can operate in a logical group and is
+	 * re-startable across sessions when you use the same group and ID on
+	 * restart.
+	 * 
+	 * @param hostSet
+	 *            The host used in the URL to Cambria. Entries can be
+	 *            "host:port".
+	 * @param topic
+	 *            The topic to consume
+	 * @param consumerGroup
+	 *            The name of the consumer group this consumer is part of
+	 * @param consumerId
+	 *            The unique id of this consume in its group
+	 * 
+	 * @return a consumer
+	 */
+	public static CambriaConsumer createConsumer(Collection<String> hostSet,
+			final String topic, final String consumerGroup,
+			final String consumerId) {
+		return createConsumer(hostSet, topic, consumerGroup, consumerId, -1, -1);
+	}
+
+	/**
+	 * Create a consumer instance with the default timeout, and no limit on
+	 * messages returned. This consumer can operate in a logical group and is
+	 * re-startable across sessions when you use the same group and ID on
+	 * restart.
+	 * 
+	 * @param hostSet
+	 *            The host used in the URL to Cambria. Entries can be
+	 *            "host:port".
+	 * @param topic
+	 *            The topic to consume
+	 * @param consumerGroup
+	 *            The name of the consumer group this consumer is part of
+	 * @param consumerId
+	 *            The unique id of this consume in its group
+	 * @param timeoutMs
+	 *            The amount of time in milliseconds that the server should keep
+	 *            the connection open while waiting for message traffic. Use -1
+	 *            for default timeout.
+	 * @param limit
+	 *            A limit on the number of messages returned in a single call.
+	 *            Use -1 for no limit.
+	 * 
+	 * @return a consumer
+	 */
+	public static CambriaConsumer createConsumer(Collection<String> hostSet,
+			final String topic, final String consumerGroup,
+			final String consumerId, int timeoutMs, int limit) {
+		return createConsumer(hostSet, topic, consumerGroup, consumerId,
+				timeoutMs, limit, null, null, null);
+	}
+
+	/**
+	 * Create a consumer instance with the default timeout, and no limit on
+	 * messages returned. This consumer can operate in a logical group and is
+	 * re-startable across sessions when you use the same group and ID on
+	 * restart. This consumer also uses server-side filtering.
+	 * 
+	 * @param hostList
+	 *            A comma separated list of hosts to use to connect to Cambria.
+	 *            You can include port numbers (3904 is the default). 
+	 * @param topic
+	 *            The topic to consume
+	 * @param consumerGroup
+	 *            The name of the consumer group this consumer is part of
+	 * @param consumerId
+	 *            The unique id of this consume in its group
+	 * @param timeoutMs
+	 *            The amount of time in milliseconds that the server should keep
+	 *            the connection open while waiting for message traffic. Use -1
+	 *            for default timeout.
+	 * @param limit
+	 *            A limit on the number of messages returned in a single call.
+	 *            Use -1 for no limit.
+	 * @param filter
+	 *            A Highland Park filter expression using only built-in filter
+	 *            components. Use null for "no filter".
+	 * @param apiKey
+	 *            key associated with a user
+	 * @param apiSecret
+	 *            of a user
+	 * 
+	 * @return a consumer
+	 */
+	public static CambriaConsumer createConsumer(String hostList,
+			final String topic, final String consumerGroup,
+			final String consumerId, int timeoutMs, int limit, String filter,
+			String apiKey, String apiSecret) {
+		return createConsumer(DMaaPCambriaConsumerImpl.stringToList(hostList),
+				topic, consumerGroup, consumerId, timeoutMs, limit, filter,
+				apiKey, apiSecret);
+	}
+
+	/**
+	 * Create a consumer instance with the default timeout, and no limit on
+	 * messages returned. This consumer can operate in a logical group and is
+	 * re-startable across sessions when you use the same group and ID on
+	 * restart. This consumer also uses server-side filtering.
+	 * 
+	 * @param hostSet
+	 *            The host used in the URL to Cambria. Entries can be
+	 *            "host:port".
+	 * @param topic
+	 *            The topic to consume
+	 * @param consumerGroup
+	 *            The name of the consumer group this consumer is part of
+	 * @param consumerId
+	 *            The unique id of this consume in its group
+	 * @param timeoutMs
+	 *            The amount of time in milliseconds that the server should keep
+	 *            the connection open while waiting for message traffic. Use -1
+	 *            for default timeout.
+	 * @param limit
+	 *            A limit on the number of messages returned in a single call.
+	 *            Use -1 for no limit.
+	 * @param filter
+	 *            A Highland Park filter expression using only built-in filter
+	 *            components. Use null for "no filter".
+	 * @param apiKey
+	 *            key associated with a user
+	 * @param apiSecret
+	 *            of a user
+	 * @return a consumer
+	 */
+	public static CambriaConsumer createConsumer(Collection<String> hostSet,
+			final String topic, final String consumerGroup,
+			final String consumerId, int timeoutMs, int limit, String filter,
+			String apiKey, String apiSecret) {
+		if (sfMock != null)
+			return sfMock;
+		try {
+			return new DMaaPCambriaConsumerImpl(hostSet, topic, consumerGroup,
+					consumerId, timeoutMs, limit, filter, apiKey, apiSecret);
+		} catch (MalformedURLException e) {
+			throw new RuntimeException(e);
+		}
+	}
+
+	/*************************************************************************/
+	/*************************************************************************/
+	/*************************************************************************/
+
+	/**
+	 * Create a publisher that sends each message (or group of messages)
+	 * immediately. Most applications should favor higher latency for much
+	 * higher message throughput and the "simple publisher" is not a good
+	 * choice.
+	 * 
+	 * @param hostlist
+	 *            The host used in the URL to Cambria. Can be "host:port", can
+	 *            be multiple comma-separated entries.
+	 * @param topic
+	 *            The topic on which to publish messages.
+	 * @return a publisher
+	 */
+	public static CambriaBatchingPublisher createSimplePublisher(
+			String hostlist, String topic) {
+		return createBatchingPublisher(hostlist, topic, 1, 1);
+	}
+
+	/**
+	 * Create a publisher that batches messages. Be sure to close the publisher
+	 * to send the last batch and ensure a clean shutdown. Message payloads are
+	 * not compressed.
+	 * 
+	 * @param hostlist
+	 *            The host used in the URL to Cambria. Can be "host:port", can
+	 *            be multiple comma-separated entries.
+	 * @param topic
+	 *            The topic on which to publish messages.
+	 * @param maxBatchSize
+	 *            The largest set of messages to batch
+	 * @param maxAgeMs
+	 *            The maximum age of a message waiting in a batch
+	 * 
+	 * @return a publisher
+	 */
+	public static CambriaBatchingPublisher createBatchingPublisher(
+			String hostlist, String topic, int maxBatchSize, long maxAgeMs) {
+		return createBatchingPublisher(hostlist, topic, maxBatchSize, maxAgeMs,
+				false);
+	}
+
+	/**
+	 * Create a publisher that batches messages. Be sure to close the publisher
+	 * to send the last batch and ensure a clean shutdown.
+	 * 
+	 * @param hostlist
+	 *            The host used in the URL to Cambria. Can be "host:port", can
+	 *            be multiple comma-separated entries.
+	 * @param topic
+	 *            The topic on which to publish messages.
+	 * @param maxBatchSize
+	 *            The largest set of messages to batch
+	 * @param maxAgeMs
+	 *            The maximum age of a message waiting in a batch
+	 * @param compress
+	 *            use gzip compression
+	 * 
+	 * @return a publisher
+	 */
+	public static CambriaBatchingPublisher createBatchingPublisher(
+			String hostlist, String topic, int maxBatchSize, long maxAgeMs,
+			boolean compress) {
+		return createBatchingPublisher(
+				DMaaPCambriaConsumerImpl.stringToList(hostlist), topic,
+				maxBatchSize, maxAgeMs, compress);
+	}
+
+	/**
+	 * Create a publisher that batches messages. Be sure to close the publisher
+	 * to send the last batch and ensure a clean shutdown.
+	 * 
+	 * @param hostSet
+	 *            A set of hosts to be used in the URL to Cambria. Can be
+	 *            "host:port". Use multiple entries to enable failover.
+	 * @param topic
+	 *            The topic on which to publish messages.
+	 * @param maxBatchSize
+	 *            The largest set of messages to batch
+	 * @param maxAgeMs
+	 *            The maximum age of a message waiting in a batch
+	 * @param compress
+	 *            use gzip compression
+	 * 
+	 * @return a publisher
+	 */
+	public static CambriaBatchingPublisher createBatchingPublisher(
+			String[] hostSet, String topic, int maxBatchSize, long maxAgeMs,
+			boolean compress) {
+		final TreeSet<String> hosts = new TreeSet<String>();
+		for (String hp : hostSet) {
+			hosts.add(hp);
+		}
+		return createBatchingPublisher(hosts, topic, maxBatchSize, maxAgeMs,
+				compress);
+	}
+
+	/**
+	 * Create a publisher that batches messages. Be sure to close the publisher
+	 * to send the last batch and ensure a clean shutdown.
+	 * 
+	 * @param hostSet
+	 *            A set of hosts to be used in the URL to Cambria. Can be
+	 *            "host:port". Use multiple entries to enable failover.
+	 * @param topic
+	 *            The topic on which to publish messages.
+	 * @param maxBatchSize
+	 *            The largest set of messages to batch
+	 * @param maxAgeMs
+	 *            The maximum age of a message waiting in a batch
+	 * @param compress
+	 *            use gzip compression
+	 * 
+	 * @return a publisher
+	 */
+	public static CambriaBatchingPublisher createBatchingPublisher(
+			Collection<String> hostSet, String topic, int maxBatchSize,
+			long maxAgeMs, boolean compress) {
+		return new DMaaPCambriaSimplerBatchPublisher.Builder()
+				.againstUrls(hostSet).onTopic(topic)
+				.batchTo(maxBatchSize, maxAgeMs).compress(compress).build();
+	}
+
+	/**
+	 * Create an identity manager client to work with API keys.
+	 * 
+	 * @param hostSet
+	 *            A set of hosts to be used in the URL to Cambria. Can be
+	 *            "host:port". Use multiple entries to enable failover.
+	 * @param apiKey
+	 *            Your API key
+	 * @param apiSecret
+	 *            Your API secret
+	 * @return an identity manager
+	 */
+	/*
+	 * public static CambriaIdentityManager createIdentityManager (
+	 * Collection<String> hostSet, String apiKey, String apiSecret ) { final
+	 * CambriaIdentityManager cim = new CambriaMetaClient ( hostSet );
+	 * cim.setApiCredentials ( apiKey, apiSecret ); return cim; }
+	 */
+
+	/**
+	 * Create a topic manager for working with topics.
+	 * 
+	 * @param hostSet
+	 *            A set of hosts to be used in the URL to Cambria. Can be
+	 *            "host:port". Use multiple entries to enable failover.
+	 * @param apiKey
+	 *            Your API key
+	 * @param apiSecret
+	 *            Your API secret
+	 * @return a topic manager
+	 */
+	/*
+	 * public static CambriaTopicManager createTopicManager ( Collection<String>
+	 * hostSet, String apiKey, String apiSecret ) { final CambriaMetaClient tmi
+	 * = new CambriaMetaClient ( hostSet ); tmi.setApiCredentials ( apiKey,
+	 * apiSecret ); return tmi; }
+	 */
+
+	/**
+	 * Inject a consumer. Used to support unit tests.
+	 * 
+	 * @param cc
+	 */
+	public static void $testInject(CambriaConsumer cc) {
+		sfMock = cc;
+	}
+
+	private static CambriaConsumer sfMock = null;
+}
diff --git a/src/main/java/com/att/nsa/cambria/metrics/publisher/impl/CambriaBaseClient.java b/src/main/java/com/att/nsa/cambria/metrics/publisher/impl/CambriaBaseClient.java
new file mode 100644
index 0000000..bd480e3
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/metrics/publisher/impl/CambriaBaseClient.java
@@ -0,0 +1,98 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.metrics.publisher.impl;
+
+import java.net.MalformedURLException;
+import java.util.Collection;
+import java.util.Set;
+import java.util.TreeSet;
+import java.util.concurrent.TimeUnit;
+
+import org.json.JSONArray;
+import org.json.JSONException;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.att.nsa.apiClient.http.CacheUse;
+import com.att.nsa.apiClient.http.HttpClient;
+import com.att.nsa.cambria.constants.CambriaConstants;
+
+/**
+ * 
+ * @author author
+ *
+ */
+public class CambriaBaseClient extends HttpClient implements com.att.nsa.cambria.metrics.publisher.CambriaClient 
+{
+	protected CambriaBaseClient ( Collection<String> hosts ) throws MalformedURLException
+	{
+		this ( hosts, null );
+	}
+
+	protected CambriaBaseClient ( Collection<String> hosts, String clientSignature ) throws MalformedURLException
+	{
+//		super ( hosts, CambriaConstants.kStdCambriaServicePort, clientSignature,
+//			CacheUse.NONE, 1, 1, TimeUnit.MILLISECONDS );
+		super(ConnectionType.HTTP, hosts, CambriaConstants.kStdCambriaServicePort, clientSignature, CacheUse.NONE, 1, 1L, TimeUnit.MILLISECONDS, 32, 32, 600000);
+
+		//fLog = LoggerFactory.getLogger ( this.getClass().getName () );
+		fLog = EELFManager.getInstance().getLogger(this.getClass().getName());
+		//( this.getClass().getName () );
+	}
+
+	@Override
+	public void close ()
+	{
+	}
+
+	protected Set<String> jsonArrayToSet ( JSONArray a ) throws JSONException
+	{
+		if ( a == null ) return null;
+
+		final TreeSet<String> set = new TreeSet<String> ();
+		for ( int i=0; i<a.length (); i++ )
+		{
+			set.add ( a.getString ( i ));
+		}
+		return set;
+	}
+	/**
+	 * @param log
+	 */
+	public void logTo ( EELFLogger  log )
+	{
+		fLog = log; 
+		
+		//replaceLogger ( log );
+	}
+
+	protected EELFLogger  getLog ()
+	{
+		return fLog;
+	}
+	
+	private EELFLogger  fLog;
+	
+	
+
+}
diff --git a/src/main/java/com/att/nsa/cambria/metrics/publisher/impl/Clock.java b/src/main/java/com/att/nsa/cambria/metrics/publisher/impl/Clock.java
new file mode 100644
index 0000000..1702ec8
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/metrics/publisher/impl/Clock.java
@@ -0,0 +1,74 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.metrics.publisher.impl;
+
+/**
+ * 
+ * This class maintains the system clocks
+ * @author author
+ *
+ */
+public class Clock
+{
+	public synchronized static Clock getIt ()
+	{
+		if ( sfClock == null )
+		{
+			sfClock = new Clock ();
+		}
+		return sfClock;
+	}
+
+	/**
+	 * 
+	 * Get the system's current time in milliseconds.
+	 * @return the current time
+	 * 
+	 */
+	public static long now ()
+	{
+		return getIt().nowImpl ();
+	}
+
+	/**
+	 * Get current time in milliseconds
+	 * @return current time in ms
+	 */
+	protected long nowImpl ()
+	{
+		return System.currentTimeMillis ();
+	}
+
+	/**
+	 * Initialize constructor
+	 */
+	protected Clock ()
+	{
+	}
+
+	private static Clock sfClock = null;
+
+	protected synchronized static void register ( Clock testClock )
+	{
+		sfClock = testClock;
+	}
+}
diff --git a/src/main/java/com/att/nsa/cambria/metrics/publisher/impl/DMaaPCambriaConsumerImpl.java b/src/main/java/com/att/nsa/cambria/metrics/publisher/impl/DMaaPCambriaConsumerImpl.java
new file mode 100644
index 0000000..adff2a7
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/metrics/publisher/impl/DMaaPCambriaConsumerImpl.java
@@ -0,0 +1,170 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.metrics.publisher.impl;
+
+import java.io.IOException;
+import java.io.UnsupportedEncodingException;
+import java.net.MalformedURLException;
+import java.net.URLEncoder;
+import java.util.Collection;
+import java.util.LinkedList;
+import java.util.List;
+
+import jline.internal.Log;
+
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+
+import com.att.nsa.apiClient.http.HttpException;
+import com.att.nsa.apiClient.http.HttpObjectNotFoundException;
+import com.att.nsa.cambria.metrics.publisher.CambriaPublisherUtility;
+
+/**
+ * 
+ * @author author
+ *
+ */
+public class DMaaPCambriaConsumerImpl extends CambriaBaseClient
+		implements com.att.nsa.cambria.metrics.publisher.CambriaConsumer {
+	private final String fTopic;
+	private final String fGroup;
+	private final String fId;
+	private final int fTimeoutMs;
+	private final int fLimit;
+	private final String fFilter;
+
+	/**
+	 * 
+	 * @param hostPart
+	 * @param topic
+	 * @param consumerGroup
+	 * @param consumerId
+	 * @param timeoutMs
+	 * @param limit
+	 * @param filter
+	 * @param apiKey
+	 * @param apiSecret
+	 * @throws MalformedURLException 
+	 */
+	public DMaaPCambriaConsumerImpl(Collection<String> hostPart, final String topic, final String consumerGroup,
+			final String consumerId, int timeoutMs, int limit, String filter, String apiKey, String apiSecret) throws MalformedURLException {
+		super(hostPart, topic + "::" + consumerGroup + "::" + consumerId);
+
+		fTopic = topic;
+		fGroup = consumerGroup;
+		fId = consumerId;
+		fTimeoutMs = timeoutMs;
+		fLimit = limit;
+		fFilter = filter;
+
+		setApiCredentials(apiKey, apiSecret);
+	}
+
+	/**
+	 * method converts String to list
+	 * 
+	 * @param str
+	 * @return
+	 */
+	public static List<String> stringToList(String str) {
+		final LinkedList<String> set = new LinkedList<String>();
+		if (str != null) {
+			final String[] parts = str.trim().split(",");
+			for (String part : parts) {
+				final String trimmed = part.trim();
+				if (trimmed.length() > 0) {
+					set.add(trimmed);
+				}
+			}
+		}
+		return set;
+	}
+
+	@Override
+	public Iterable<String> fetch() throws IOException {
+		// fetch with the timeout and limit set in constructor
+		return fetch(fTimeoutMs, fLimit);
+	}
+
+	@Override
+	public Iterable<String> fetch(int timeoutMs, int limit) throws IOException {
+		final LinkedList<String> msgs = new LinkedList<String>();
+
+		final String urlPath = createUrlPath(timeoutMs, limit);
+
+		getLog().info("UEB GET " + urlPath);
+		try {
+			final JSONObject o = get(urlPath);
+
+			if (o != null) {
+				final JSONArray a = o.getJSONArray("result");
+				if (a != null) {
+					for (int i = 0; i < a.length(); i++) {
+						msgs.add(a.getString(i));
+					}
+				}
+			}
+		} catch (HttpObjectNotFoundException e) {
+			// this can happen if the topic is not yet created. ignore.
+			Log.error("Failed due to topic is not yet created" + e);
+		} catch (JSONException e) {
+			// unexpected response
+			reportProblemWithResponse();
+			Log.error("Failed due to jsonException", e);
+		} catch (HttpException e) {
+			throw new IOException(e);
+		}
+
+		return msgs;
+	}
+
+	protected String createUrlPath(int timeoutMs, int limit) {
+		final StringBuilder url = new StringBuilder(CambriaPublisherUtility.makeConsumerUrl(fTopic, fGroup, fId));
+		final StringBuilder adds = new StringBuilder();
+		if (timeoutMs > -1) {
+			adds.append("timeout=").append(timeoutMs);
+		}
+
+		if (limit > -1) {
+			if (adds.length() > 0) {
+				adds.append("&");
+			}
+			adds.append("limit=").append(limit);
+		}
+		if (fFilter != null && fFilter.length() > 0) {
+			try {
+				if (adds.length() > 0) {
+					adds.append("&");
+				}
+				adds.append("filter=").append(URLEncoder.encode(fFilter, "UTF-8"));
+			} catch (UnsupportedEncodingException e) {
+				Log.error("Failed due to UnsupportedEncodingException" + e);
+			}
+		}
+		if (adds.length() > 0) {
+			url.append("?").append(adds.toString());
+		}
+		return url.toString();
+	}
+
+}
diff --git a/src/main/java/com/att/nsa/cambria/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisher.java b/src/main/java/com/att/nsa/cambria/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisher.java
new file mode 100644
index 0000000..052cc78
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisher.java
@@ -0,0 +1,429 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.metrics.publisher.impl;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.net.MalformedURLException;
+import java.util.Collection;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.ScheduledThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
+import java.util.zip.GZIPOutputStream;
+
+import javax.ws.rs.client.Client;
+import javax.ws.rs.client.ClientBuilder;
+import javax.ws.rs.client.Entity;
+import javax.ws.rs.client.WebTarget;
+import javax.ws.rs.core.Response;
+
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+import com.att.nsa.cambria.constants.CambriaConstants;
+import com.att.nsa.cambria.metrics.publisher.CambriaPublisherUtility;
+
+/**
+ * 
+ * class DMaaPCambriaSimplerBatchPublisher used to send the publish the messages
+ * in batch
+ * 
+ * @author author
+ *
+ */
+public class DMaaPCambriaSimplerBatchPublisher extends CambriaBaseClient
+		implements com.att.nsa.cambria.metrics.publisher.CambriaBatchingPublisher {
+	/**
+	 * 
+	 * static inner class initializes with urls, topic,batchSize
+	 * 
+	 * @author author
+	 *
+	 */
+	public static class Builder {
+		public Builder() {
+		}
+
+		/**
+		 * constructor initialize with url
+		 * 
+		 * @param baseUrls
+		 * @return
+		 * 
+		 */
+		public Builder againstUrls(Collection<String> baseUrls) {
+			fUrls = baseUrls;
+			return this;
+		}
+
+		/**
+		 * constructor initializes with topics
+		 * 
+		 * @param topic
+		 * @return
+		 * 
+		 */
+		public Builder onTopic(String topic) {
+			fTopic = topic;
+			return this;
+		}
+
+		/**
+		 * constructor initilazes with batch size and batch time
+		 * 
+		 * @param maxBatchSize
+		 * @param maxBatchAgeMs
+		 * @return
+		 * 
+		 */
+		public Builder batchTo(int maxBatchSize, long maxBatchAgeMs) {
+			fMaxBatchSize = maxBatchSize;
+			fMaxBatchAgeMs = maxBatchAgeMs;
+			return this;
+		}
+
+		/**
+		 * constructor initializes with compress
+		 * 
+		 * @param compress
+		 * @return
+		 */
+		public Builder compress(boolean compress) {
+			fCompress = compress;
+			return this;
+		}
+
+		/**
+		 * method returns DMaaPCambriaSimplerBatchPublisher object
+		 * 
+		 * @return
+		 */
+		public DMaaPCambriaSimplerBatchPublisher build() {
+			try {
+				return new DMaaPCambriaSimplerBatchPublisher(fUrls, fTopic, fMaxBatchSize, fMaxBatchAgeMs, fCompress);
+			} catch (MalformedURLException e) {
+				throw new RuntimeException(e);
+			}
+		}
+
+		private Collection<String> fUrls;
+		private String fTopic;
+		private int fMaxBatchSize = 100;
+		private long fMaxBatchAgeMs = 1000;
+		private boolean fCompress = false;
+	};
+
+	/**
+	 * 
+	 * @param partition
+	 * @param msg
+	 */
+	@Override
+	public int send(String partition, String msg) {
+		return send(new message(partition, msg));
+	}
+
+	/**
+	 * @param msg
+	 */
+	@Override
+	public int send(message msg) {
+		final LinkedList<message> list = new LinkedList<message>();
+		list.add(msg);
+		return send(list);
+	}
+
+	/**
+	 * @param msgs
+	 */
+	@Override
+	public synchronized int send(Collection<message> msgs) {
+		if (fClosed) {
+			throw new IllegalStateException("The publisher was closed.");
+		}
+
+		for (message userMsg : msgs) {
+			fPending.add(new TimestampedMessage(userMsg));
+		}
+		return getPendingMessageCount();
+	}
+
+	/**
+	 * getPending message count
+	 */
+	@Override
+	public synchronized int getPendingMessageCount() {
+		return fPending.size();
+	}
+
+	/**
+	 * 
+	 * @exception InterruptedException
+	 * @exception IOException
+	 */
+	@Override
+	public void close() {
+		try {
+			final List<message> remains = close(Long.MAX_VALUE, TimeUnit.MILLISECONDS);
+			if (remains.size() > 0) {
+				getLog().warn("Closing publisher with " + remains.size() + " messages unsent. "
+						+ "Consider using CambriaBatchingPublisher.close( long timeout, TimeUnit timeoutUnits ) to recapture unsent messages on close.");
+			}
+		} catch (InterruptedException e) {
+			getLog().warn("Possible message loss. " + e.getMessage(), e);
+		} catch (IOException e) {
+			getLog().warn("Possible message loss. " + e.getMessage(), e);
+		}
+	}
+
+	/**
+	 * @param time
+	 * @param unit
+	 */
+	@Override
+	public List<message> close(long time, TimeUnit unit) throws IOException, InterruptedException {
+		synchronized (this) {
+			fClosed = true;
+
+			// stop the background sender
+			fExec.setContinueExistingPeriodicTasksAfterShutdownPolicy(false);
+			fExec.setExecuteExistingDelayedTasksAfterShutdownPolicy(false);
+			fExec.shutdown();
+		}
+
+		final long now = Clock.now();
+		final long waitInMs = TimeUnit.MILLISECONDS.convert(time, unit);
+		final long timeoutAtMs = now + waitInMs;
+
+		while (Clock.now() < timeoutAtMs && getPendingMessageCount() > 0) {
+			send(true);
+			Thread.sleep(250);
+		}
+		// synchronizing the current object
+		synchronized (this) {
+			final LinkedList<message> result = new LinkedList<message>();
+			fPending.drainTo(result);
+			return result;
+		}
+	}
+
+	/**
+	 * Possibly send a batch to the cambria server. This is called by the
+	 * background thread and the close() method
+	 * 
+	 * @param force
+	 */
+	private synchronized void send(boolean force) {
+		if (force || shouldSendNow()) {
+			if (!sendBatch()) {
+				getLog().warn("Send failed, " + fPending.size() + " message to send.");
+
+				// note the time for back-off
+				fDontSendUntilMs = sfWaitAfterError + Clock.now();
+			}
+		}
+	}
+
+	/**
+	 * 
+	 * @return
+	 */
+	private synchronized boolean shouldSendNow() {
+		boolean shouldSend = false;
+		if (fPending.size() > 0) {
+			final long nowMs = Clock.now();
+
+			shouldSend = (fPending.size() >= fMaxBatchSize);
+			if (!shouldSend) {
+				final long sendAtMs = fPending.peek().timestamp + fMaxBatchAgeMs;
+				shouldSend = sendAtMs <= nowMs;
+			}
+
+			// however, wait after an error
+			shouldSend = shouldSend && nowMs >= fDontSendUntilMs;
+		}
+		return shouldSend;
+	}
+
+	/**
+	 * 
+	 * @return
+	 */
+	private synchronized boolean sendBatch() {
+		// it's possible for this call to be made with an empty list. in this
+		// case, just return.
+		if (fPending.size() < 1) {
+			return true;
+		}
+
+		final long nowMs = Clock.now();
+		final String url = CambriaPublisherUtility.makeUrl(fTopic);
+
+		getLog().info("sending " + fPending.size() + " msgs to " + url + ". Oldest: "
+				+ (nowMs - fPending.peek().timestamp) + " ms");
+
+		try {
+
+			final ByteArrayOutputStream baseStream = new ByteArrayOutputStream();
+			OutputStream os = baseStream;
+			if (fCompress) {
+				os = new GZIPOutputStream(baseStream);
+			}
+			for (TimestampedMessage m : fPending) {
+				os.write(("" + m.fPartition.length()).getBytes());
+				os.write('.');
+				os.write(("" + m.fMsg.length()).getBytes());
+				os.write('.');
+				os.write(m.fPartition.getBytes());
+				os.write(m.fMsg.getBytes());
+				os.write('\n');
+			}
+			os.close();
+
+			final long startMs = Clock.now();
+
+			// code from REST Client Starts
+
+			// final String serverCalculatedSignature = sha1HmacSigner.sign
+			// ("2015-09-21T11:38:19-0700", "iHAxArrj6Ve9JgmHvR077QiV");
+
+			Client client = ClientBuilder.newClient();
+			String metricTopicname = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"metrics.send.cambria.topic");
+			 if (null==metricTopicname) {
+				 
+        		 metricTopicname="msgrtr.apinode.metrics.dmaap";
+			 }
+			WebTarget target = client
+					.target("http://localhost:" + CambriaConstants.kStdCambriaServicePort);
+			target = target.path("/events/" + fTopic);
+			getLog().info("url : " + target.getUri().toString());
+			// API Key
+
+			Entity<byte[]> data = Entity.entity(baseStream.toByteArray(), "application/cambria");
+
+			Response response = target.request().post(data);
+			// header("X-CambriaAuth",
+			// "2OH46YIWa329QpEF:"+serverCalculatedSignature).
+			// header("X-CambriaDate", "2015-09-21T11:38:19-0700").
+			// post(Entity.json(baseStream.toByteArray()));
+
+			getLog().info("Response received :: " + response.getStatus());
+			getLog().info("Response received :: " + response.toString());
+
+			// code from REST Client Ends
+
+			/*
+			 * final JSONObject result = post ( url, contentType,
+			 * baseStream.toByteArray(), true ); final String logLine =
+			 * "cambria reply ok (" + (Clock.now()-startMs) + " ms):" +
+			 * result.toString (); getLog().info ( logLine );
+			 */
+			fPending.clear();
+			return true;
+		} catch (IllegalArgumentException x) {
+			getLog().warn(x.getMessage(), x);
+		}
+		/*
+		 * catch ( HttpObjectNotFoundException x ) { getLog().warn (
+		 * x.getMessage(), x ); } catch ( HttpException x ) { getLog().warn (
+		 * x.getMessage(), x ); }
+		 */
+		catch (IOException x) {
+			getLog().warn(x.getMessage(), x);
+		}
+		return false;
+	}
+
+	private final String fTopic;
+	private final int fMaxBatchSize;
+	private final long fMaxBatchAgeMs;
+	private final boolean fCompress;
+	private boolean fClosed;
+
+	private final LinkedBlockingQueue<TimestampedMessage> fPending;
+	private long fDontSendUntilMs;
+	private final ScheduledThreadPoolExecutor fExec;
+
+	private static final long sfWaitAfterError = 1000;
+
+	/**
+	 * 
+	 * @param hosts
+	 * @param topic
+	 * @param maxBatchSize
+	 * @param maxBatchAgeMs
+	 * @param compress
+	 * @throws MalformedURLException 
+	 */
+	private DMaaPCambriaSimplerBatchPublisher(Collection<String> hosts, String topic, int maxBatchSize,
+			long maxBatchAgeMs, boolean compress) throws MalformedURLException {
+
+		super(hosts);
+
+		if (topic == null || topic.length() < 1) {
+			throw new IllegalArgumentException("A topic must be provided.");
+		}
+
+		fClosed = false;
+		fTopic = topic;
+		fMaxBatchSize = maxBatchSize;
+		fMaxBatchAgeMs = maxBatchAgeMs;
+		fCompress = compress;
+
+		fPending = new LinkedBlockingQueue<TimestampedMessage>();
+		fDontSendUntilMs = 0;
+
+		fExec = new ScheduledThreadPoolExecutor(1);
+		fExec.scheduleAtFixedRate(new Runnable() {
+			@Override
+			public void run() {
+				send(false);
+			}
+		}, 100, 50, TimeUnit.MILLISECONDS);
+	}
+
+	/**
+	 * 
+	 * 
+	 * @author author
+	 *
+	 */
+	private static class TimestampedMessage extends message {
+		/**
+		 * to store timestamp value
+		 */
+		public final long timestamp;
+
+		/**
+		 * constructor initialize with message
+		 * 
+		 * @param m
+		 * 
+		 */
+		public TimestampedMessage(message m) {
+			super(m);
+			timestamp = Clock.now();
+		}
+	}
+
+}
diff --git a/src/main/java/com/att/nsa/cambria/resources/CambriaEventSet.java b/src/main/java/com/att/nsa/cambria/resources/CambriaEventSet.java
new file mode 100644
index 0000000..85cc902
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/resources/CambriaEventSet.java
@@ -0,0 +1,114 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.resources;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.zip.GZIPInputStream;
+
+import javax.servlet.http.HttpServletResponse;
+
+import com.att.nsa.apiServer.streams.ChunkedInputStream;
+import com.att.nsa.cambria.CambriaApiException;
+import com.att.nsa.cambria.backends.Publisher.message;
+import com.att.nsa.cambria.resources.streamReaders.CambriaJsonStreamReader;
+import com.att.nsa.cambria.resources.streamReaders.CambriaRawStreamReader;
+import com.att.nsa.cambria.resources.streamReaders.CambriaStreamReader;
+import com.att.nsa.cambria.resources.streamReaders.CambriaTextStreamReader;
+import com.att.nsa.drumlin.service.standards.HttpStatusCodes;
+
+/**
+ * An inbound event set.
+ * 
+ * @author author
+ */
+public class CambriaEventSet {
+	private final reader fReader;
+
+	/**
+	 * constructor initialization
+	 * 
+	 * @param mediaType
+	 * @param originalStream
+	 * @param chunked
+	 * @param defPartition
+	 * @throws CambriaApiException
+	 */
+	public CambriaEventSet(String mediaType, InputStream originalStream,
+			boolean chunked, String defPartition) throws CambriaApiException {
+		InputStream is = originalStream;
+		if (chunked) {
+			is = new ChunkedInputStream(originalStream);
+		}
+
+		if (("application/json").equals(mediaType)) {
+			if (chunked) {
+				throw new CambriaApiException(
+						HttpServletResponse.SC_BAD_REQUEST,
+						"The JSON stream reader doesn't support chunking.");
+			}
+			fReader = new CambriaJsonStreamReader(is, defPartition);
+		} else if (("application/cambria").equals(mediaType)) {
+			fReader = new CambriaStreamReader(is);
+		} else if (("application/cambria-zip").equals(mediaType)) {
+			try {
+				is = new GZIPInputStream(is);
+			} catch (IOException e) {
+				throw new CambriaApiException(HttpStatusCodes.k400_badRequest,
+						"Couldn't read compressed format: " + e);
+			}
+			fReader = new CambriaStreamReader(is);
+		} else if (("text/plain").equals(mediaType)) {
+			fReader = new CambriaTextStreamReader(is, defPartition);
+		} else {
+			fReader = new CambriaRawStreamReader(is, defPartition);
+		}
+	}
+
+	/**
+	 * Get the next message from this event set. Returns null when the end of
+	 * stream is reached. Will block until a message arrives (or the stream is
+	 * closed/broken).
+	 * 
+	 * @return a message, or null
+	 * @throws IOException
+	 * @throws CambriaApiException
+	 */
+	public message next() throws IOException, CambriaApiException {
+		return fReader.next();
+	}
+
+	/**
+	 * 
+	 * @author author
+	 *
+	 */
+	public interface reader {
+		/**
+		 * 
+		 * @return
+		 * @throws IOException
+		 * @throws CambriaApiException
+		 */
+		message next() throws IOException, CambriaApiException;
+	}
+}
diff --git a/src/main/java/com/att/nsa/cambria/resources/CambriaOutboundEventStream.java b/src/main/java/com/att/nsa/cambria/resources/CambriaOutboundEventStream.java
new file mode 100644
index 0000000..e519f71
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/resources/CambriaOutboundEventStream.java
@@ -0,0 +1,516 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.resources;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.Date;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.json.JSONTokener;
+
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+import com.att.nsa.cambria.CambriaApiException;
+import com.att.nsa.cambria.backends.Consumer;
+import com.att.nsa.cambria.backends.Consumer.Message;
+import com.att.nsa.cambria.beans.DMaaPContext;
+import com.att.nsa.cambria.constants.CambriaConstants;
+import com.att.nsa.cambria.metabroker.Topic;
+import com.att.nsa.cambria.utils.DMaaPResponseBuilder.StreamWriter;
+import com.att.nsa.cambria.utils.Utils;
+
+
+/**
+ * class used to write the consumed messages
+ * 
+ * @author author
+ *
+ */
+public class CambriaOutboundEventStream implements StreamWriter {
+	private static final int kTopLimit = 1024 * 4;
+
+	/**
+	 * 
+	 * static innerclass it takes all the input parameter for kafka consumer
+	 * like limit, timeout, meta, pretty
+	 * 
+	 * @author author
+	 *
+	 */
+	public static class Builder {
+
+		// Required
+		private final Consumer fConsumer;
+		//private final rrNvReadable fSettings;   // used during write to tweak
+												// format, decide to explicitly
+												// close stream or not
+
+		// Optional
+		private int fLimit;
+		private int fTimeoutMs;
+		private String fTopicFilter;
+		private boolean fPretty;
+		private boolean fWithMeta;
+
+		// private int fOffset;
+		/**
+		 * constructor it initializes all the consumer parameters
+		 * 
+		 * @param c
+		 * @param settings
+		 */
+		public Builder(Consumer c) {
+			this.fConsumer = c;
+			//this.fSettings = settings;
+
+			fLimit = CambriaConstants.kNoTimeout;
+			fTimeoutMs = CambriaConstants.kNoLimit;
+			fTopicFilter = CambriaConstants.kNoFilter;
+			fPretty = false;
+			fWithMeta = false;
+			// fOffset = CambriaEvents.kNextOffset;
+		}
+
+		/**
+		 * 
+		 * constructor initializes with limit
+		 * 
+		 * @param l
+		 *            only l no of messages will be consumed
+		 * @return
+		 */
+		public Builder limit(int l) {
+			this.fLimit = l;
+			return this;
+		}
+
+		/**
+		 * constructor initializes with timeout
+		 * 
+		 * @param t
+		 *            if there is no message to consume, them DMaaP will wait
+		 *            for t time
+		 * @return
+		 */
+		public Builder timeout(int t) {
+			this.fTimeoutMs = t;
+			return this;
+		}
+
+		/**
+		 * constructor initializes with filter
+		 * 
+		 * @param f
+		 *            filter
+		 * @return
+		 */
+		public Builder filter(String f) {
+			this.fTopicFilter = f;
+			return this;
+		}
+
+		/**
+		 * constructor initializes with boolean value pretty
+		 * 
+		 * @param p
+		 *            messages print in new line
+		 * @return
+		 */
+		public Builder pretty(boolean p) {
+			fPretty = p;
+			return this;
+		}
+
+		/**
+		 * constructor initializes with boolean value meta
+		 * 
+		 * @param withMeta,
+		 *            along with messages offset will print
+		 * @return
+		 */
+		public Builder withMeta(boolean withMeta) {
+			fWithMeta = withMeta;
+			return this;
+		}
+
+		// public Builder atOffset ( int pos )
+		// {
+		// fOffset = pos;
+		// return this;
+		// }
+		/**
+		 * method returs object of CambriaOutboundEventStream
+		 * 
+		 * @return
+		 * @throws CambriaApiException
+		 */
+		public CambriaOutboundEventStream build() throws CambriaApiException {
+			return new CambriaOutboundEventStream(this);
+		}
+	}
+
+	@SuppressWarnings("unchecked")
+	/**
+	 * 
+	 * @param builder
+	 * @throws CambriaApiException
+	 * 
+	 */
+	private CambriaOutboundEventStream(Builder builder) throws CambriaApiException {
+		fConsumer = builder.fConsumer;
+		fLimit = builder.fLimit;
+		fTimeoutMs = builder.fTimeoutMs;
+		//fSettings = builder.fSettings;
+		fSent = 0;
+		fPretty = builder.fPretty;
+		fWithMeta = builder.fWithMeta;
+		
+//		if (CambriaConstants.kNoFilter.equals(builder.fTopicFilter)) {
+//			fHpAlarmFilter = null;
+//			fHppe = null;
+//		} else {
+//			try {
+//				final JSONObject filter = new JSONObject(new JSONTokener(builder.fTopicFilter));
+//				HpConfigContext<HpEvent> cc = new HpConfigContext<HpEvent>();
+//				fHpAlarmFilter = cc.create(HpAlarmFilter.class, filter);
+//				final EventFactory<HpJsonEvent> ef = new HpJsonEventFactory();
+//				fHppe = new HpProcessingEngine<HpJsonEvent>(ef);
+//			} catch (HpReaderException e) {
+//				// JSON was okay, but the filter engine says it's bogus
+//				throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST,
+//						"Couldn't create filter: " + e.getMessage());
+//			} catch (JSONException e) {
+//				// user sent a bogus JSON object
+//				throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST,
+//						"Couldn't parse JSON: " + e.getMessage());
+//			}
+//		}
+	}
+
+	/**
+	 * 
+	 * interface provides onWait and onMessage methods
+	 *
+	 */
+	public interface operation {
+		/**
+		 * Call thread.sleep
+		 * @throws IOException
+		 */
+		void onWait() throws IOException;
+/**
+ * provides the output based in the consumer paramter
+ * @param count
+ * @param msg
+ * @throws IOException
+ */
+		void onMessage(int count, Message msg) throws IOException;
+	}
+
+	/**
+	 * 
+	 * @return
+	 */
+	public int getSentCount() {
+		return fSent;
+	}
+
+	@Override
+	/**
+	 * 
+	 * @param os
+	 * throws IOException
+	 */
+	public void write(final OutputStream os) throws IOException {
+		//final boolean transactionEnabled = topic.isTransactionEnabled();
+		//final boolean transactionEnabled = isTransEnabled();
+		final boolean transactionEnabled = istransEnable;
+		os.write('[');
+
+		fSent = forEachMessage(new operation() {
+			@Override
+			public void onMessage(int count, Message msg) throws IOException, JSONException {
+
+				String message = "";
+				JSONObject jsonMessage = null;
+				if (transactionEnabled) {
+					jsonMessage = new JSONObject(msg.getMessage());
+					message = jsonMessage.getString("message");
+				}
+
+				if (count > 0) {
+					os.write(',');
+				}
+
+				if (fWithMeta) {
+					final JSONObject entry = new JSONObject();
+					entry.put("offset", msg.getOffset());
+					entry.put("message", message);
+					os.write(entry.toString().getBytes());
+				} else {
+					//os.write(message.getBytes());
+					 String jsonString = "";
+					if(transactionEnabled){
+						jsonString= JSONObject.valueToString(message);
+					}else{
+						jsonString = JSONObject.valueToString (msg.getMessage());
+						}
+				 	os.write ( jsonString.getBytes () );
+				}
+
+				if (fPretty) {
+					os.write('\n');
+				}
+
+				
+				String metricTopicname= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"metrics.send.cambria.topic");
+				if (null==metricTopicname)
+           		  metricTopicname="msgrtr.apinode.metrics.dmaap";
+           	 
+           	 if (!metricTopicname.equalsIgnoreCase(topic.getName())) {
+				if (transactionEnabled) {
+					final String transactionId = jsonMessage.getString("transactionId");
+					responseTransactionId = transactionId;
+
+					StringBuilder consumerInfo = new StringBuilder();
+					if (null != dmaapContext && null != dmaapContext.getRequest()) {
+						final HttpServletRequest request = dmaapContext.getRequest();
+						consumerInfo.append("consumerIp= \"" + request.getRemoteHost() + "\",");
+						consumerInfo.append("consServerIp= \"" + request.getLocalAddr() + "\",");
+						consumerInfo.append("consumerId= \"" + Utils.getUserApiKey(request) + "\",");
+						consumerInfo.append(
+								"consumerGroup= \"" + getConsumerGroupFromRequest(request.getRequestURI()) + "\",");
+						consumerInfo.append("consumeTime= \"" + Utils.getFormattedDate(new Date()) + "\",");
+					}
+
+					log.info("Consumer [" + consumerInfo.toString() + "transactionId= \"" + transactionId
+							+ "\",messageLength= \"" + message.length() + "\",topic= \"" + topic.getName() + "\"]");
+				}
+           	 }
+
+			}
+
+			@Override
+			/**
+			 * 
+			 * It makes thread to wait
+			 * @throws IOException
+			 */
+			public void onWait() throws IOException {
+				os.flush(); // likely totally unnecessary for a network socket
+				try {
+					// FIXME: would be good to wait/signal
+					Thread.sleep(100);
+				} catch (InterruptedException e) {
+					// ignore
+				}
+			}
+		});
+
+		//if (null != dmaapContext && isTransactionEnabled()) {
+			if (null != dmaapContext && istransEnable) {
+			
+			dmaapContext.getResponse().setHeader("transactionId",
+					Utils.getResponseTransactionId(responseTransactionId));
+		}
+
+		os.write(']');
+		os.flush();
+
+		boolean close_out_stream = true;
+		String strclose_out_stream = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"close.output.stream");
+		if(null!=strclose_out_stream)close_out_stream=Boolean.parseBoolean(strclose_out_stream);
+		
+		//if (fSettings.getBoolean("close.output.stream", true)) {
+				if (close_out_stream) {
+			os.close();
+		}
+	}
+
+	/**
+	 * 
+	 * @param requestURI
+	 * @return
+	 */
+	private String getConsumerGroupFromRequest(String requestURI) {
+		if (null != requestURI && !requestURI.isEmpty()) {
+
+			String consumerDetails = requestURI.substring(requestURI.indexOf("events/") + 7);
+
+			int startIndex = consumerDetails.indexOf("/") + 1;
+			int endIndex = consumerDetails.lastIndexOf("/");
+			return consumerDetails.substring(startIndex, endIndex);
+		}
+		return null;
+	}
+/**
+ * 
+ * @param op
+ * @return
+ * @throws IOException
+ * @throws JSONException 
+ */
+	public int forEachMessage(operation op) throws IOException, JSONException {
+		final int effectiveLimit = (fLimit == 0 ? kTopLimit : fLimit);
+
+		int count = 0;
+		boolean firstPing = true;
+
+		final long startMs = System.currentTimeMillis();
+		final long timeoutMs = fTimeoutMs + startMs;
+
+		while (firstPing || (count == 0 && System.currentTimeMillis() < timeoutMs)) {
+			if (!firstPing) {
+				op.onWait();
+			}
+			firstPing = false;
+
+			Consumer.Message msg = null;
+			while (count < effectiveLimit && (msg = fConsumer.nextMessage()) != null) {
+
+				
+				String message = "";
+			//	if (topic.isTransactionEnabled() || true) {
+				if (istransEnable) {
+					// As part of DMaaP changes we are wrapping the original
+					// message into a json object
+					// and then this json object is further wrapped into message
+					// object before publishing,
+					// so extracting the original message from the message
+					// object for matching with filter.
+					final JSONObject jsonMessage = new JSONObject(msg.getMessage());
+					message = jsonMessage.getString("message");
+				} else {
+					message = msg.getMessage();
+				}
+
+				// If filters are enabled/set, message should be in JSON format
+				// for filters to work for
+				// otherwise filter will automatically ignore message in
+				// non-json format.
+				if (filterMatches(message)) {
+					op.onMessage(count, msg);
+					count++;
+				}
+			}
+		}
+
+		return count;
+	}
+
+	/**
+	 * 
+	 * Checks whether filter is initialized
+	 */
+//	private boolean isFilterInitialized() {
+//		return (fHpAlarmFilter != null && fHppe != null);
+//	}
+
+	/**
+	 * 
+	 * @param msg
+	 * @return
+	 */
+	private boolean filterMatches(String msg) {
+		boolean result = true;
+//		if (isFilterInitialized()) {
+//			try {
+//				final HpJsonEvent e = new HpJsonEvent("e", new JSONObject(msg));
+//				result = fHpAlarmFilter.matches(fHppe, e);
+//			} catch (JSONException x) {
+//				// the msg may not be JSON
+//				result = false;
+//				log.error("Failed due to " + x.getMessage());
+//			} catch (Exception x) {
+//				log.error("Error using filter: " + x.getMessage(), x);
+//			}
+//		}
+
+		return result;
+	}
+
+	public DMaaPContext getDmaapContext() {
+		return dmaapContext;
+	}
+
+	public void setDmaapContext(DMaaPContext dmaapContext) {
+		this.dmaapContext = dmaapContext;
+	}
+
+	public Topic getTopic() {
+		return topic;
+	}
+
+	public void setTopic(Topic topic) {
+		this.topic = topic;
+	}
+	
+	public void setTopicStyle(boolean aaftopic) {
+		this.isAAFTopic = aaftopic;
+	}
+	
+	public void setTransEnabled ( boolean transEnable) {
+		this.istransEnable = transEnable;
+	}
+
+	/*private boolean isTransactionEnabled() {
+		//return topic.isTransactionEnabled();
+		return true; // let metrics creates for all the topics
+	}*/
+
+	private boolean isTransEnabled() {
+		String istransidUEBtopicreqd = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"transidUEBtopicreqd");
+		boolean istransidreqd=false;
+		if ((null != istransidUEBtopicreqd && istransidUEBtopicreqd.equalsIgnoreCase("true")) || isAAFTopic){
+			istransidreqd = true; 
+		}
+		
+		return istransidreqd;
+
+	}
+	
+	private final Consumer fConsumer;
+	private final int fLimit;
+	private final int fTimeoutMs;
+	//private final rrNvReadable fSettings;
+	private final boolean fPretty;
+	private final boolean fWithMeta;
+	private int fSent;
+//	private final HpAlarmFilter<HpJsonEvent> fHpAlarmFilter;
+//	private final HpProcessingEngine<HpJsonEvent> fHppe;
+	private DMaaPContext dmaapContext;
+	private String responseTransactionId;
+	private Topic topic;
+	private boolean isAAFTopic = false;
+	private boolean istransEnable = false;
+	
+
+	//private static final Logger log = Logger.getLogger(CambriaOutboundEventStream.class);
+	
+	private static final EELFLogger log = EELFManager.getInstance().getLogger(CambriaOutboundEventStream.class);
+}
\ No newline at end of file
diff --git a/src/main/java/com/att/nsa/cambria/resources/streamReaders/CambriaJsonStreamReader.java b/src/main/java/com/att/nsa/cambria/resources/streamReaders/CambriaJsonStreamReader.java
new file mode 100644
index 0000000..9d727ad
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/resources/streamReaders/CambriaJsonStreamReader.java
@@ -0,0 +1,172 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.resources.streamReaders;
+
+import java.io.InputStream;
+
+import javax.servlet.http.HttpServletResponse;
+
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.json.JSONTokener;
+
+import com.att.nsa.cambria.CambriaApiException;
+import com.att.nsa.cambria.backends.Publisher.message;
+import com.att.nsa.cambria.beans.LogDetails;
+import com.att.nsa.cambria.resources.CambriaEventSet.reader;
+
+/**
+ * 
+ * @author author
+ *
+ */
+public class CambriaJsonStreamReader implements reader {
+	private final JSONTokener fTokens;
+	private final boolean fIsList;
+	private long fCount;
+	private final String fDefPart;
+	public static final String kKeyField = "cambria.partition";
+
+	/**
+	 * 
+	 * @param is
+	 * @param defPart
+	 * @throws CambriaApiException
+	 */
+	public CambriaJsonStreamReader(InputStream is, String defPart) throws CambriaApiException {
+		try {
+			fTokens = new JSONTokener(is);
+			fCount = 0;
+			fDefPart = defPart;
+
+			final int c = fTokens.next();
+			if (c == '[') {
+				fIsList = true;
+			} else if (c == '{') {
+				fTokens.back();
+				fIsList = false;
+			} else {
+				throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, "Expecting an array or an object.");
+			}
+		} catch (JSONException e) {
+			throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, e.getMessage());
+		}
+	}
+
+	@Override
+	public message next() throws CambriaApiException {
+		try {
+			if (!fTokens.more()) {
+				return null;
+			}
+
+			final int c = fTokens.next();
+			
+			/*if (c ==','){
+				fCloseCount++;
+				System.out.println("fCloseCount=" + fCloseCount +" fCount "+fCount);
+			}*/
+			if (fIsList) {
+				if (c == ']' || (fCount > 0 && c == 10))
+					return null;
+
+
+				if (fCount > 0 && c != ',' && c!= 10) {
+					throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST,
+							"Expected ',' or closing ']' after last object.");
+				}
+
+				if (fCount == 0 && c != '{' && c!= 10  && c!=32) {
+					throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, "Expected { to start an object.");
+				}
+			} else if (fCount != 0 || c != '{') {
+				throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, "Expected '{' to start an object.");
+			}
+
+			if (c == '{') {
+				fTokens.back();
+			}
+			final JSONObject o = new JSONObject(fTokens);
+			fCount++;
+			return new msg(o);
+		} catch (JSONException e) {
+			throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, e.getMessage());
+
+		}
+	}
+
+	private class msg implements message {
+		private final String fKey;
+		private  String fMsg;
+		private LogDetails logDetails;
+		private boolean transactionEnabled;
+
+		/**
+		 * constructor
+		 * 
+		 * @param o
+		 */
+		//public msg(JSONObject o){}
+		
+		
+		public msg(JSONObject o) {
+			String key = o.optString(kKeyField, fDefPart);
+			if (key == null) {
+				key = "" + System.currentTimeMillis();
+			}
+			fKey = key;
+					
+				fMsg = o.toString().trim();
+			
+		}
+
+		@Override
+		public String getKey() {
+			return fKey;
+		}
+
+		@Override
+		public String getMessage() {
+			return fMsg;
+		}
+
+		@Override
+		public boolean isTransactionEnabled() {
+			return transactionEnabled;
+		}
+
+		@Override
+		public void setTransactionEnabled(boolean transactionEnabled) {
+			this.transactionEnabled = transactionEnabled;
+		}
+
+		@Override
+		public void setLogDetails(LogDetails logDetails) {
+			this.logDetails = logDetails;
+		}
+
+		@Override
+		public LogDetails getLogDetails() {
+			return logDetails;
+		}
+	}
+}
diff --git a/src/main/java/com/att/nsa/cambria/resources/streamReaders/CambriaRawStreamReader.java b/src/main/java/com/att/nsa/cambria/resources/streamReaders/CambriaRawStreamReader.java
new file mode 100644
index 0000000..16f6785
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/resources/streamReaders/CambriaRawStreamReader.java
@@ -0,0 +1,141 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.resources.streamReaders;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+import javax.servlet.http.HttpServletResponse;
+
+import com.att.nsa.cambria.CambriaApiException;
+import com.att.nsa.cambria.backends.Publisher.message;
+import com.att.nsa.cambria.beans.LogDetails;
+import com.att.nsa.cambria.resources.CambriaEventSet.reader;
+import com.att.nsa.util.StreamTools;
+
+/**
+ * 
+ * This stream reader reads raw bytes creating a single message.
+ * @author author
+ *
+ */
+public class CambriaRawStreamReader implements reader
+{
+	/**
+	 * This is the constructor of CambriaRawStreamReader, it will basically the read from Input stream
+	 * @param is
+	 * @param defPart
+	 * @throws CambriaApiException
+	 */
+	public CambriaRawStreamReader ( InputStream is, String defPart ) throws CambriaApiException
+	{
+		fStream = is;
+		fDefPart = defPart;
+		fClosed = false;
+	}
+
+	@Override
+	/**
+	 * 
+	 * next() method reads the bytes and
+	 * iterates through the messages 
+	 * @throws CambriaApiException
+	 * 
+	 */
+	public message next () throws CambriaApiException
+	{
+		if ( fClosed ) return null;
+
+		try
+		{
+			final byte[] rawBytes = StreamTools.readBytes ( fStream );
+			fClosed = true;
+			return new message ()
+			{
+				private LogDetails logDetails;
+				private boolean transactionEnabled;
+
+				/**
+				 * returns boolean value which 
+				 * indicates whether transaction is enabled
+				 */
+				public boolean isTransactionEnabled() {
+					return transactionEnabled;
+				}
+
+				/**
+				 * sets boolean value which 
+				 * indicates whether transaction is enabled
+				 */
+				public void setTransactionEnabled(boolean transactionEnabled) {
+					this.transactionEnabled = transactionEnabled;
+				}
+				
+				@Override
+				/**
+				 * @returns key
+				 * It ch4ecks whether fDefPart value is Null.
+				 * If yes, it will return ystem.currentTimeMillis () else
+				 * it will return fDefPart variable value
+				 */
+				public String getKey ()
+				{
+					return fDefPart == null ? "" + System.currentTimeMillis () : fDefPart;
+				}
+
+				@Override
+				/**
+				 * returns the message in String type object
+				 */
+				public String getMessage ()
+				{
+					return new String ( rawBytes );
+				}
+
+				/**
+				 * set log details in logDetails variable
+				 */
+				@Override
+				public void setLogDetails(LogDetails logDetails) {
+					this.logDetails = logDetails;
+				}
+
+				@Override
+				/**
+				 * get the log details
+				 */
+				public LogDetails getLogDetails() {
+					return this.logDetails;
+				}
+			};
+		}
+		catch ( IOException e )
+		{
+			throw new CambriaApiException ( HttpServletResponse.SC_BAD_REQUEST, e.getMessage () );
+		}
+	}
+	
+	private final InputStream fStream;
+	private final String fDefPart;
+	private boolean fClosed;
+	//private String transactionId;
+}
diff --git a/src/main/java/com/att/nsa/cambria/resources/streamReaders/CambriaStreamReader.java b/src/main/java/com/att/nsa/cambria/resources/streamReaders/CambriaStreamReader.java
new file mode 100644
index 0000000..38359f0
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/resources/streamReaders/CambriaStreamReader.java
@@ -0,0 +1,229 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.resources.streamReaders;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+import javax.servlet.http.HttpServletResponse;
+
+import com.att.nsa.cambria.CambriaApiException;
+import com.att.nsa.cambria.backends.Publisher.message;
+import com.att.nsa.cambria.beans.LogDetails;
+import com.att.nsa.cambria.resources.CambriaEventSet.reader;
+
+/**
+ * Read an optionally chunked stream in the Cambria app format. This format
+ * allows for speedier server-side message parsing than pure JSON. It's looks
+ * like:<br/>
+ * <br/>
+ * &lt;keyLength&gt;.&lt;msgLength&gt;.&lt;key&gt;&lt;message&gt;<br/>
+ * <br/>
+ * Whitespace before/after each entry is ignored, so messages can be delivered
+ * with newlines between them, or not.
+ * 
+ * @author author
+ *
+ */
+public class CambriaStreamReader implements reader {
+	/**
+	 * constructor initializing InputStream with fStream
+	 * 
+	 * @param senderStream
+	 * @throws CambriaApiException
+	 */
+	public CambriaStreamReader(InputStream senderStream) throws CambriaApiException {
+		fStream = senderStream;
+	}
+
+	@Override
+	/**
+	 * next method iterates through msg length
+	 * throws IOException
+	 * throws CambriaApiException
+	 * 
+	 */
+	public message next() throws IOException, CambriaApiException {
+		final int keyLen = readLength();
+		if (keyLen == -1)
+			return null;
+
+		final int msgLen = readLength();
+		final String keyPart = readString(keyLen);
+		final String msgPart = readString(msgLen);
+
+		return new msg(keyPart, msgPart);
+	}
+
+	private static class msg implements message {
+		/**
+		 * constructor initialization
+		 * 
+		 * @param key
+		 * @param msg
+		 */
+		public msg(String key, String msg) {
+			// if no key, use the current time. This allows the message to be
+			// delivered
+			// in any order without forcing it into a single partition as empty
+			// string would.
+			if (key.length() < 1) {
+				key = "" + System.currentTimeMillis();
+			}
+
+			fKey = key;
+			fMsg = msg;
+		}
+
+		@Override
+		/**
+		 * @returns fkey
+		 */
+		public String getKey() {
+			return fKey;
+		}
+
+		@Override
+		/**
+		 * returns the message in String type object
+		 */
+		public String getMessage() {
+			return fMsg;
+		}
+
+		private final String fKey;
+		private final String fMsg;
+		private LogDetails logDetails;
+		private boolean transactionEnabled;
+		
+		/**
+		 * returns boolean value which 
+		 * indicates whether transaction is enabled
+		 */
+		public boolean isTransactionEnabled() {
+			return transactionEnabled;
+		}
+		
+		/**
+		 * sets boolean value which 
+		 * indicates whether transaction is enabled
+		 */
+		public void setTransactionEnabled(boolean transactionEnabled) {
+			this.transactionEnabled = transactionEnabled;
+		}
+
+		@Override
+		/**
+		 * set log details in logDetails variable
+		 */
+		public void setLogDetails(LogDetails logDetails) {
+			this.logDetails = logDetails;
+		}
+
+		@Override
+		/**
+		 * get the log details
+		 */
+		public LogDetails getLogDetails() {
+			return this.logDetails;
+		}
+
+	}
+
+	private final InputStream fStream;
+
+	/**
+	 * max cambria length indicates message length
+	 
+	// This limit is here to prevent the server from spinning on a long string of numbers
+    // that is delivered with 'application/cambria' as the format. The limit needs to be
+    // large enough to support the max message length (currently 1MB, the default Kafka
+    // limit)
+    * */
+     
+    private static final int kMaxCambriaLength = 4*1000*1024;
+
+
+	/**
+	 * 
+	 * @return
+	 * @throws IOException
+	 * @throws CambriaApiException
+	 */
+	private int readLength() throws IOException, CambriaApiException {
+		// always ignore leading whitespace
+		int c = fStream.read();
+		while (Character.isWhitespace(c)) {
+			c = fStream.read();
+		}
+
+		if (c == -1) {
+			return -1;
+		}
+
+		int result = 0;
+		while (Character.isDigit(c)) {
+			result = (result * 10) + (c - '0');
+			if (result > kMaxCambriaLength) {
+				throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, "Expected . after length.");
+			}
+			c = fStream.read();
+		}
+
+		if (c != '.') {
+			throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, "Expected . after length.");
+		}
+
+		return result;
+	}
+
+	/**
+	 * 
+	 * @param len
+	 * @return
+	 * @throws IOException
+	 * @throws CambriaApiException
+	 */
+	private String readString(int len) throws IOException, CambriaApiException {
+		final byte[] buffer = new byte[len];
+
+		final long startMs = System.currentTimeMillis();
+		final long timeoutMs = startMs + 30000; // FIXME configurable
+
+		int readTotal = 0;
+		while (readTotal < len) {
+			final int read = fStream.read(buffer, readTotal, len - readTotal);
+			if (read == -1 || System.currentTimeMillis() > timeoutMs) {
+				// EOF
+				break;
+			}
+			readTotal += read;
+		}
+
+		if (readTotal < len) {
+			throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST,
+					"End of stream while reading " + len + " bytes");
+		}
+
+		return new String(buffer);
+	}
+}
diff --git a/src/main/java/com/att/nsa/cambria/resources/streamReaders/CambriaTextStreamReader.java b/src/main/java/com/att/nsa/cambria/resources/streamReaders/CambriaTextStreamReader.java
new file mode 100644
index 0000000..2b76a61
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/resources/streamReaders/CambriaTextStreamReader.java
@@ -0,0 +1,140 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.resources.streamReaders;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+
+import javax.servlet.http.HttpServletResponse;
+
+import com.att.nsa.cambria.CambriaApiException;
+import com.att.nsa.cambria.backends.Publisher.message;
+import com.att.nsa.cambria.beans.LogDetails;
+import com.att.nsa.cambria.resources.CambriaEventSet.reader;
+
+/**
+ * This stream reader just pulls single lines. It uses the default partition if provided. If
+ * not, the key is the current time, which does not guarantee ordering.
+ * 
+ * @author author
+ *
+ */
+public class CambriaTextStreamReader implements reader
+{
+	/**
+	 * This is the constructor for Cambria Text Reader format
+	 * @param is
+	 * @param defPart
+	 * @throws CambriaApiException
+	 */
+	public CambriaTextStreamReader ( InputStream is, String defPart ) throws CambriaApiException
+	{
+		fReader = new BufferedReader ( new InputStreamReader ( is ) );
+		fDefPart = defPart;
+	}
+
+	@Override
+	/**
+	 * next() method iterates through msg length
+	 * throws IOException
+	 * throws CambriaApiException
+	 * 
+	 */ 
+	public message next () throws CambriaApiException
+	{
+		try
+		{
+			final String line = fReader.readLine ();
+			if ( line == null ) return null;
+
+			return new message ()
+			{
+				private LogDetails logDetails;
+				private boolean transactionEnabled;
+
+				/**
+				 * returns boolean value which 
+				 * indicates whether transaction is enabled
+				 * @return
+				 */
+				public boolean isTransactionEnabled() {
+					return transactionEnabled;
+				}
+
+				/**
+				 * sets boolean value which 
+				 * indicates whether transaction is enabled
+				 */
+				public void setTransactionEnabled(boolean transactionEnabled) {
+					this.transactionEnabled = transactionEnabled;
+				}
+				
+				@Override
+				/**
+				 * @returns key
+				 * It ch4ecks whether fDefPart value is Null.
+				 * If yes, it will return ystem.currentTimeMillis () else
+				 * it will return fDefPart variable value
+				 */
+				public String getKey ()
+				{
+					return fDefPart == null ? "" + System.currentTimeMillis () : fDefPart;
+				}
+
+				@Override
+				/**
+				 * returns the message in String type object
+				 * @return
+				 */
+				public String getMessage ()
+				{
+					return line;
+				}
+
+				@Override
+				/**
+				 * set log details in logDetails variable
+				 */
+				public void setLogDetails(LogDetails logDetails) {
+					this.logDetails = logDetails;
+				}
+
+				@Override
+				/**
+				 * get the log details
+				 */
+				public LogDetails getLogDetails() {
+					return this.logDetails;
+				}
+			};
+		}
+		catch ( IOException e )
+		{
+			throw new CambriaApiException ( HttpServletResponse.SC_BAD_REQUEST, e.getMessage () );
+		}
+	}
+	
+	private final BufferedReader fReader;
+	private final String fDefPart;
+}
diff --git a/src/main/java/com/att/nsa/cambria/security/DMaaPAAFAuthenticator.java b/src/main/java/com/att/nsa/cambria/security/DMaaPAAFAuthenticator.java
new file mode 100644
index 0000000..fa4fe17
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/security/DMaaPAAFAuthenticator.java
@@ -0,0 +1,39 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.security;
+
+import javax.servlet.http.HttpServletRequest;
+
+import com.att.nsa.cambria.CambriaApiException;
+
+
+
+
+/**
+ * 
+ * @author author
+ *
+ */
+public interface DMaaPAAFAuthenticator {
+	boolean aafAuthentication( HttpServletRequest req , String role);
+	String aafPermissionString(String permission, String action) throws CambriaApiException;
+}
diff --git a/src/main/java/com/att/nsa/cambria/security/DMaaPAAFAuthenticatorImpl.java b/src/main/java/com/att/nsa/cambria/security/DMaaPAAFAuthenticatorImpl.java
new file mode 100644
index 0000000..97ca1fd
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/security/DMaaPAAFAuthenticatorImpl.java
@@ -0,0 +1,91 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.security;
+
+import java.util.Date;
+
+import javax.servlet.http.HttpServletRequest;
+
+import org.apache.http.HttpStatus;
+
+import com.att.nsa.cambria.CambriaApiException;
+import com.att.nsa.cambria.constants.CambriaConstants;
+import com.att.nsa.cambria.exception.DMaaPResponseCode;
+import com.att.nsa.cambria.exception.ErrorResponse;
+import com.att.nsa.cambria.utils.Utils;
+
+
+/**
+ * 
+ * @author author
+ *
+ */
+public class DMaaPAAFAuthenticatorImpl implements DMaaPAAFAuthenticator {
+
+	/**
+	 * @param req
+	 * @param role
+	 */
+	@Override
+	public boolean aafAuthentication(HttpServletRequest req, String role) {
+		boolean auth = false;
+		if(req.isUserInRole(role))
+		{
+			
+			auth = true;
+		}
+		return auth;
+	}
+
+	@Override
+	public String aafPermissionString(String topicName, String action) throws CambriaApiException {
+		
+		
+		String permission = "";
+		String nameSpace ="";
+		if(topicName.contains(".") && (topicName.contains("com.att")||topicName.contains("org"))) {
+			//String topic = topicName.substring(topicName.lastIndexOf(".")+1);
+			nameSpace = topicName.substring(0,topicName.lastIndexOf("."));
+		}
+		else {
+			nameSpace = null;
+			 nameSpace= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"defaultNSforUEB");
+			
+			if(null==nameSpace)nameSpace="com.att.dmaap.mr.ueb";
+			
+			
+			/*ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+					DMaaPResponseCode.TOPIC_NOT_IN_AAF.getResponseCode(), "Topic does not exist in AAF"
+							, null, Utils.getFormattedDate(new Date()), topicName,
+					null, null, null, null);
+					
+			throw new CambriaApiException(errRes);*/
+		}
+		
+		permission = nameSpace+".mr.topic|:topic."+topicName+"|"+action;
+		return permission;
+		
+	}
+	
+	
+
+}
diff --git a/src/main/java/com/att/nsa/cambria/security/DMaaPAuthenticator.java b/src/main/java/com/att/nsa/cambria/security/DMaaPAuthenticator.java
new file mode 100644
index 0000000..5e7073d
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/security/DMaaPAuthenticator.java
@@ -0,0 +1,61 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.security;
+
+import javax.servlet.http.HttpServletRequest;
+
+import com.att.nsa.cambria.beans.DMaaPContext;
+import com.att.nsa.security.NsaApiKey;
+
+
+/**
+ * An interface for authenticating an inbound request.
+ * @author author
+ *
+ * @param <K> NsaApiKey
+ */
+public interface DMaaPAuthenticator<K extends NsaApiKey> {
+
+	/**
+	 * Qualify a request as possibly using the authentication method that this class implements.
+	 * @param req
+	 * @return true if the request might be authenticated by this class
+	 */
+	boolean qualify ( HttpServletRequest req );
+	
+	/**
+	 * Check for a request being authentic. If it is, return the API key. If not, return null.
+	 * @param req An inbound web request
+	 * @return the API key for an authentic request, or null
+	 */
+	K isAuthentic ( HttpServletRequest req );
+	/**
+	 * Check for a ctx being authenticate. If it is, return the API key. If not, return null.
+	 * @param ctx
+	 * @return the API key for an authentication request, or null
+	 */
+	K authenticate ( DMaaPContext ctx );
+	
+	
+	void addAuthenticator(DMaaPAuthenticator<K> a);
+
+}
diff --git a/src/main/java/com/att/nsa/cambria/security/DMaaPAuthenticatorImpl.java b/src/main/java/com/att/nsa/cambria/security/DMaaPAuthenticatorImpl.java
new file mode 100644
index 0000000..eb2a483
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/security/DMaaPAuthenticatorImpl.java
@@ -0,0 +1,135 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.security;
+
+import java.util.LinkedList;
+
+import javax.servlet.http.HttpServletRequest;
+
+import com.att.nsa.cambria.beans.DMaaPContext;
+import com.att.nsa.cambria.security.impl.DMaaPOriginalUebAuthenticator;
+import com.att.nsa.security.NsaApiKey;
+import com.att.nsa.security.NsaAuthenticator;
+import com.att.nsa.security.authenticators.OriginalUebAuthenticator;
+import com.att.nsa.security.db.NsaApiDb;
+import com.att.nsa.security.db.simple.NsaSimpleApiKey;
+
+/**
+ * 
+ * @author author
+ *
+ * @param <K>
+ */
+public class DMaaPAuthenticatorImpl<K extends NsaApiKey> implements DMaaPAuthenticator<K> {
+
+	private final LinkedList<DMaaPAuthenticator<K>> fAuthenticators;
+	
+
+
+	// Setting timeout to a large value for testing purpose.
+	// private static final long kDefaultRequestTimeWindow = 1000 * 60 * 10; //
+	// 10 minutes
+	private static final long kDefaultRequestTimeWindow = 1000 * 60 * 10 * 10 * 10 * 10 * 10;
+
+	/**
+	 * Construct the security manager against an API key database
+	 * 
+	 * @param db
+	 *            the API key db
+	 */
+	public DMaaPAuthenticatorImpl(NsaApiDb<K> db) {
+		this(db, kDefaultRequestTimeWindow);
+	}
+
+	
+	
+	
+	/**
+	 * Construct the security manager against an API key database with a
+	 * specific request time window size
+	 * 
+	 * @param db
+	 *            the API key db
+	 * @param authTimeWindowMs
+	 *            the size of the time window for request authentication
+	 */
+	public DMaaPAuthenticatorImpl(NsaApiDb<K> db, long authTimeWindowMs) {
+		fAuthenticators = new LinkedList<DMaaPAuthenticator<K>>();
+
+		fAuthenticators.add(new DMaaPOriginalUebAuthenticator<K>(db, authTimeWindowMs));
+	}
+
+	/**
+	 * Authenticate a user's request. This method returns the API key if the
+	 * user is authentic, null otherwise.
+	 * 
+	 * @param ctx
+	 * @return an api key record, or null
+	 */
+	public K authenticate(DMaaPContext ctx) {
+		final HttpServletRequest req = ctx.getRequest();
+		for (DMaaPAuthenticator<K> a : fAuthenticators) {
+			if (a.qualify(req)) {
+				final K k = a.isAuthentic(req);
+				if (k != null)
+					return k;
+			}
+			// else: this request doesn't look right to the authenticator
+		}
+		return null;
+	}
+
+	/**
+	 * Get the user associated with the incoming request, or null if the user is
+	 * not authenticated.
+	 * 
+	 * @param ctx
+	 * @return
+	 */
+	public static NsaSimpleApiKey getAuthenticatedUser(DMaaPContext ctx) {
+		final DMaaPAuthenticator<NsaSimpleApiKey> m = ctx.getConfigReader().getfSecurityManager();
+		return m.authenticate(ctx);
+	}
+
+	/**
+	 * method by default returning false
+	 * @param req
+	 * @return false
+	 */
+	public boolean qualify(HttpServletRequest req) {
+		return false;
+	}
+/**
+ * method by default returning null
+ * @param req
+ * @return null
+ */
+	public K isAuthentic(HttpServletRequest req) {
+		return null;
+	}
+	
+	public void addAuthenticator ( DMaaPAuthenticator<K> a )
+	{
+		this.fAuthenticators.add(a);
+	}
+
+}
diff --git a/src/main/java/com/att/nsa/cambria/security/impl/DMaaPMechIdAuthenticator.java b/src/main/java/com/att/nsa/cambria/security/impl/DMaaPMechIdAuthenticator.java
new file mode 100644
index 0000000..c2a41cc
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/security/impl/DMaaPMechIdAuthenticator.java
@@ -0,0 +1,89 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.security.impl;
+
+import javax.servlet.http.HttpServletRequest;
+
+
+
+import com.att.nsa.cambria.beans.DMaaPContext;
+import com.att.nsa.cambria.security.DMaaPAuthenticator;
+import com.att.nsa.security.NsaApiKey;
+import com.att.nsa.security.authenticators.MechIdAuthenticator;
+//import com.att.nsa.security.db.NsaApiDb;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+/**
+ * An authenticator for AT&T MechIds.
+ * 
+ * @author author
+ *
+ * @param <K>
+ */
+public class DMaaPMechIdAuthenticator <K extends NsaApiKey> implements DMaaPAuthenticator<K> {
+
+/**
+ * This is not yet implemented. by refault its returing false
+ * @param req HttpServletRequest
+ * @return false
+ */
+	public boolean qualify (HttpServletRequest req) {
+		// we haven't implemented anything here yet, so there's no qualifying request
+		return false;
+	}
+/**
+ * This metod authenticate the mech id 
+ * @param req
+ * @return APIkey or null
+ */
+	public K isAuthentic (HttpServletRequest req) {
+		final String remoteAddr = req.getRemoteAddr();
+		authLog ( "MechId auth is not yet implemented.", remoteAddr );
+		return null;
+	}
+
+	private static void authLog ( String msg, String remoteAddr )
+	{
+		log.info ( "AUTH-LOG(" + remoteAddr + "): " + msg );
+	}
+
+//	private final NsaApiDb<K> fDb;
+	//private static final Logger log = Logger.getLogger( MechIdAuthenticator.class.toString());
+	private static final EELFLogger log = EELFManager.getInstance().getLogger(MechIdAuthenticator.class);
+/**
+ * Curently its not yet implemented returning null
+ * @param ctx DMaaP context
+ * @return APIkey or null
+ */
+	@Override
+	public K authenticate(DMaaPContext ctx) {
+		// TODO Auto-generated method stub
+		return null;
+	}
+@Override
+public void addAuthenticator(DMaaPAuthenticator<K> a) {
+	// TODO Auto-generated method stub
+	
+}
+
+}
\ No newline at end of file
diff --git a/src/main/java/com/att/nsa/cambria/security/impl/DMaaPOriginalUebAuthenticator.java b/src/main/java/com/att/nsa/cambria/security/impl/DMaaPOriginalUebAuthenticator.java
new file mode 100644
index 0000000..fdcf7c1
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/security/impl/DMaaPOriginalUebAuthenticator.java
@@ -0,0 +1,291 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.security.impl;
+
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import javax.servlet.http.HttpServletRequest;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.att.nsa.cambria.beans.DMaaPContext;
+import com.att.nsa.cambria.security.DMaaPAuthenticator;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.drumlin.till.data.sha1HmacSigner;
+import com.att.nsa.security.NsaApiKey;
+import com.att.nsa.security.db.NsaApiDb;
+
+/**
+ * This authenticator handles an AWS-like authentication, originally used by the
+ * Cambria server (the API server for UEB).
+ * 
+ * @author author
+ *
+ * @param <K>
+ */
+public class DMaaPOriginalUebAuthenticator<K extends NsaApiKey> implements DMaaPAuthenticator<K> {
+	/**
+	 * constructor initialization
+	 * 
+	 * @param db
+	 * @param requestTimeWindowMs
+	 */
+	public DMaaPOriginalUebAuthenticator(NsaApiDb<K> db, long requestTimeWindowMs) {
+		fDb = db;
+		fRequestTimeWindowMs = requestTimeWindowMs;
+		//fAuthenticators = new LinkedList<DMaaPAuthenticator<K>>();
+
+		//fAuthenticators.add(new DMaaPOriginalUebAuthenticator<K>(db, requestTimeWindowMs));
+
+	}
+
+	@Override
+	public boolean qualify(HttpServletRequest req) {
+		// accept anything that comes in with X-(Cambria)Auth in the header
+		final String xAuth = getFirstHeader(req, new String[] { "X-CambriaAuth", "X-Auth" });
+		return xAuth != null;
+	}
+
+	/**
+	 * method for authentication
+	 * 
+	 * @param req
+	 * @return
+	 */
+	public K isAuthentic(HttpServletRequest req) {
+		final String remoteAddr = req.getRemoteAddr();
+		// Cambria originally used "Cambria..." headers, but as the API key
+		// system is now more
+		// general, we take either form.
+		final String xAuth = getFirstHeader(req, new String[] { "X-CambriaAuth", "X-Auth" });
+		final String xDate = getFirstHeader(req, new String[] { "X-CambriaDate", "X-Date" });
+
+		final String httpDate = req.getHeader("Date");
+
+		final String xNonce = getFirstHeader(req, new String[] { "X-Nonce" });
+		return authenticate(remoteAddr, xAuth, xDate, httpDate, xNonce);
+	}
+
+	/**
+	 * Authenticate a user's request. This method returns the API key if the
+	 * user is authentic, null otherwise.
+	 * 
+	 * @param remoteAddr
+	 * @param xAuth
+	 * @param xDate
+	 * @param httpDate
+	 * @param nonce
+	 * @return an api key record, or null
+	 */
+	public K authenticate(String remoteAddr, String xAuth, String xDate, String httpDate, String nonce) {
+		if (xAuth == null) {
+			authLog("No X-Auth header on request", remoteAddr);
+			return null;
+		}
+		
+		final String[] xAuthParts = xAuth.split(":");
+		if (xAuthParts.length != 2) {
+			authLog("Bad X-Auth header format (" + xAuth + ")", remoteAddr);
+			return null;
+		}
+
+
+		// get the api key and signature
+		final String clientApiKey = xAuthParts[0];
+		final String clientApiHash = xAuthParts[1];
+		if (clientApiKey.length() == 0 || clientApiHash.length() == 0) {
+			authLog("Bad X-Auth header format (" + xAuth + ")", remoteAddr);
+			return null;
+		}
+		// if the user provided X-Date, use that. Otherwise, go for Date
+		final String dateString = xDate != null ? xDate : httpDate;
+		final Date clientDate = getClientDate(dateString);
+		if (clientDate == null) {
+			authLog("Couldn't parse client date '" + dateString + "'. Preferring X-Date over Date.", remoteAddr);
+			return null;
+		}
+		// check the time range
+		final long nowMs = System.currentTimeMillis();
+		final long diffMs = Math.abs(nowMs - clientDate.getTime());
+		if (diffMs > fRequestTimeWindowMs) {
+			authLog("Client date is not in acceptable range of server date. Client:" + clientDate.getTime()
+					+ ", Server: " + nowMs + ", Threshold: " + fRequestTimeWindowMs + ".", remoteAddr);
+			return null;
+		}
+		K apiRecord;
+		try {
+			apiRecord = fDb.loadApiKey(clientApiKey);
+			if (apiRecord == null) {
+				authLog("No such API key " + clientApiKey, remoteAddr);
+				return null;
+			}
+		} catch (ConfigDbException e) {
+			authLog("Couldn't load API key " + clientApiKey + ": " + e.getMessage(), remoteAddr);
+			return null;
+		}
+				// make the signed content
+		final StringBuilder sb = new StringBuilder();
+		sb.append(dateString);
+		if (nonce != null) {
+			sb.append(":");
+			sb.append(nonce);
+		}
+		final String signedContent = sb.toString();
+		// now check the signed date string
+		final String serverCalculatedSignature = sha1HmacSigner.sign(signedContent, apiRecord.getSecret());
+		if (serverCalculatedSignature == null || !serverCalculatedSignature.equals(clientApiHash)) {
+			authLog("Signatures don't match. Rec'd " + clientApiHash + ", expect " + serverCalculatedSignature + ".",
+					remoteAddr);
+			return null;
+		}
+		authLog("authenticated " + apiRecord.getKey(), remoteAddr);
+		return apiRecord;
+	}
+
+	/**
+	 * Get the first value of the first existing header from the headers list
+	 * 
+	 * @param req
+	 * @param headers
+	 * @return a header value, or null if none exist
+	 */
+	private static String getFirstHeader(HttpServletRequest req, String[] headers) {
+		for (String header : headers) {
+			final String result = req.getHeader(header);
+			if (result != null)
+				return result;
+		}
+		return null;
+	}
+
+	/**
+	 * Parse the date string into a Date using one of the supported date
+	 * formats.
+	 * 
+	 * @param dateHeader
+	 * @return a date, or null
+	 */
+	private static Date getClientDate(String dateString) {
+		if (dateString == null) {
+			return null;
+		}
+
+		// parse the date
+		Date result = null;
+		for (String dateFormat : kDateFormats) {
+			final SimpleDateFormat parser = new SimpleDateFormat(dateFormat, java.util.Locale.US);
+			if (!dateFormat.contains("z") && !dateFormat.contains("Z")) {
+				parser.setTimeZone(TIMEZONE_GMT);
+			}
+
+			try {
+				result = parser.parse(dateString);
+				break;
+			} catch (ParseException e) {
+				// presumably wrong format
+			}
+		}
+		return result;
+	}
+
+	private static void authLog(String msg, String remoteAddr) {
+		log.info("AUTH-LOG(" + remoteAddr + "): " + msg);
+	}
+
+	private final NsaApiDb<K> fDb;
+	private final long fRequestTimeWindowMs;
+
+	private static final java.util.TimeZone TIMEZONE_GMT = java.util.TimeZone.getTimeZone("GMT");
+	
+	private static final String kDateFormats[] =
+		{
+		    // W3C date format (RFC 3339).
+		    "yyyy-MM-dd'T'HH:mm:ssz",
+		    "yyyy-MM-dd'T'HH:mm:ssXXX",		// as of Java 7, reqd to handle colon in TZ offset
+
+		    // Preferred HTTP date format (RFC 1123).
+		    "EEE, dd MMM yyyy HH:mm:ss zzz",
+
+		    // simple unix command line 'date' format
+		    "EEE MMM dd HH:mm:ss z yyyy",
+
+		    // Common date format (RFC 822).
+		    "EEE, dd MMM yy HH:mm:ss z",
+		    "EEE, dd MMM yy HH:mm z",
+		    "dd MMM yy HH:mm:ss z",
+		    "dd MMM yy HH:mm z",
+
+			// Obsoleted HTTP date format (ANSI C asctime() format).
+		    "EEE MMM dd HH:mm:ss yyyy",
+
+		    // Obsoleted HTTP date format (RFC 1036).
+		    "EEEE, dd-MMM-yy HH:mm:ss zzz",
+		};
+
+	/*private static final String kDateFormats[] = {
+			// W3C date format (RFC 3339).
+			"yyyy-MM-dd'T'HH:mm:ssz",
+
+			// Preferred HTTP date format (RFC 1123).
+			"EEE, dd MMM yyyy HH:mm:ss zzz",
+
+			// simple unix command line 'date' format
+			"EEE MMM dd HH:mm:ss z yyyy",
+
+			// Common date format (RFC 822).
+			"EEE, dd MMM yy HH:mm:ss z", "EEE, dd MMM yy HH:mm z", "dd MMM yy HH:mm:ss z", "dd MMM yy HH:mm z",
+
+			// Obsoleted HTTP date format (ANSI C asctime() format).
+			"EEE MMM dd HH:mm:ss yyyy",
+
+			// Obsoleted HTTP date format (RFC 1036).
+			"EEEE, dd-MMM-yy HH:mm:ss zzz", }; */
+	// logger declaration
+	//private static final Logger log = Logger.getLogger(DMaaPOriginalUebAuthenticator.class.toString());
+	private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPOriginalUebAuthenticator.class);
+	@Override
+//	public K authenticate(DMaaPContext ctx) {
+		// TODO Auto-generated method stub
+		//return null;
+	//}
+	
+	public K authenticate(DMaaPContext ctx) {
+		
+		/*final HttpServletRequest req = ctx.getRequest();
+		for (DMaaPAuthenticator<K> a : fAuthenticators) {
+			if (a.qualify(req)) {
+				final K k = a.isAuthentic(req);
+				if (k != null)
+					return k;
+			}
+			// else: this request doesn't look right to the authenticator
+		}*/
+		return null;
+	}
+
+
+	public void addAuthenticator ( DMaaPAuthenticator<K> a )
+	{
+		//this.fAuthenticators.add(a);
+	}
+	//private final LinkedList<DMaaPAuthenticator<K>> fAuthenticators;
+}
\ No newline at end of file
diff --git a/src/main/java/com/att/nsa/cambria/service/AdminService.java b/src/main/java/com/att/nsa/cambria/service/AdminService.java
new file mode 100644
index 0000000..6f0d9cf
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/service/AdminService.java
@@ -0,0 +1,83 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.service;
+
+import java.io.IOException;
+
+import org.json.JSONException;
+
+import com.att.nsa.cambria.beans.DMaaPContext;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+
+/**
+ * @author author
+ *
+ */
+public interface AdminService {
+	/**
+	 * method provide consumerCache
+	 * 
+	 * @param dMaaPContext
+	 * @throws IOException
+	 */
+	void showConsumerCache(DMaaPContext dMaaPContext) throws IOException,AccessDeniedException;
+
+	/**
+	 * method drops consumer cache
+	 * 
+	 * @param dMaaPContext
+	 * @throws JSONException
+	 * @throws IOException
+	 */
+	void dropConsumerCache(DMaaPContext dMaaPContext) throws JSONException, IOException,AccessDeniedException;
+	
+	
+	/**
+	 * Get list of blacklisted ips 
+	 * @param dMaaPContext context
+	 * @throws IOException ex
+	 * @throws AccessDeniedException ex
+	 */
+	void getBlacklist ( DMaaPContext dMaaPContext ) throws IOException, AccessDeniedException;
+	
+	/**
+	 * Add ip to blacklist
+	 * @param dMaaPContext context
+	 * @param ip ip
+	 * @throws IOException ex
+	 * @throws ConfigDbException ex
+	 * @throws AccessDeniedException ex
+	 */
+	void addToBlacklist ( DMaaPContext dMaaPContext, String ip ) throws IOException, ConfigDbException, AccessDeniedException;
+	
+	/**
+	 * Remove ip from blacklist
+	 * @param dMaaPContext context
+	 * @param ip ip
+	 * @throws IOException ex
+	 * @throws ConfigDbException ex
+	 * @throws AccessDeniedException ex
+	 */
+	void removeFromBlacklist ( DMaaPContext dMaaPContext, String ip ) throws IOException, ConfigDbException, AccessDeniedException;
+	
+}
diff --git a/src/main/java/com/att/nsa/cambria/service/ApiKeysService.java b/src/main/java/com/att/nsa/cambria/service/ApiKeysService.java
new file mode 100644
index 0000000..6fc9c0d
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/service/ApiKeysService.java
@@ -0,0 +1,105 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.service;
+
+import java.io.IOException;
+
+import com.att.nsa.cambria.beans.ApiKeyBean;
+import com.att.nsa.cambria.beans.DMaaPContext;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.security.db.NsaApiDb.KeyExistsException;
+
+/**
+ * Declaring all the method in interface that is mainly used for authentication
+ * purpose.
+ *
+ *
+ */
+
+public interface ApiKeysService {
+	/**
+	 * This method declaration for getting all ApiKey that has generated on
+	 * server.
+	 * 
+	 * @param dmaapContext
+	 * @throws ConfigDbException
+	 * @throws IOException
+	 */
+
+	public void getAllApiKeys(DMaaPContext dmaapContext)
+			throws ConfigDbException, IOException;
+
+	/**
+	 * Getting information about specific ApiKey
+	 * 
+	 * @param dmaapContext
+	 * @param apikey
+	 * @throws ConfigDbException
+	 * @throws IOException
+	 */
+
+	public void getApiKey(DMaaPContext dmaapContext, String apikey)
+			throws ConfigDbException, IOException;
+
+	/**
+	 * Thid method is used for create a particular ApiKey
+	 * 
+	 * @param dmaapContext
+	 * @param nsaApiKey
+	 * @throws KeyExistsException
+	 * @throws ConfigDbException
+	 * @throws IOException
+	 */
+
+	public void createApiKey(DMaaPContext dmaapContext, ApiKeyBean nsaApiKey)
+			throws KeyExistsException, ConfigDbException, IOException;
+
+	/**
+	 * This method is used for update ApiKey that is already generated on
+	 * server.
+	 * 
+	 * @param dmaapContext
+	 * @param apikey
+	 * @param nsaApiKey
+	 * @throws ConfigDbException
+	 * @throws IOException
+	 * @throws AccessDeniedException
+	 * @throws com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException 
+	 */
+	public void updateApiKey(DMaaPContext dmaapContext, String apikey,
+			ApiKeyBean nsaApiKey) throws ConfigDbException, IOException,AccessDeniedException
+			;
+
+	/**
+	 * This method is used for delete specific ApiKey
+	 * 
+	 * @param dmaapContext
+	 * @param apikey
+	 * @throws ConfigDbException
+	 * @throws IOException
+	 * @throws AccessDeniedException
+	 */
+
+	public void deleteApiKey(DMaaPContext dmaapContext, String apikey)
+			throws ConfigDbException, IOException,AccessDeniedException;
+}
diff --git a/src/main/java/com/att/nsa/cambria/service/EventsService.java b/src/main/java/com/att/nsa/cambria/service/EventsService.java
new file mode 100644
index 0000000..477538d
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/service/EventsService.java
@@ -0,0 +1,75 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.service;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+import com.att.nsa.cambria.CambriaApiException;
+import com.att.nsa.cambria.backends.ConsumerFactory.UnavailableException;
+import com.att.nsa.cambria.beans.DMaaPContext;
+import com.att.nsa.cambria.metabroker.Broker.TopicExistsException;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
+
+/**
+ * 
+ * @author author
+ *
+ */
+public interface EventsService {
+	/**
+	 * 
+	 * @param ctx
+	 * @param topic
+	 * @param consumerGroup
+	 * @param clientId
+	 * @throws ConfigDbException
+	 * @throws TopicExistsException
+	 * @throws AccessDeniedException
+	 * @throws UnavailableException
+	 * @throws CambriaApiException
+	 * @throws IOException
+	 */
+	public void getEvents(DMaaPContext ctx, String topic, String consumerGroup, String clientId)
+			throws ConfigDbException, TopicExistsException,UnavailableException,
+			CambriaApiException, IOException,AccessDeniedException;
+
+	/**
+	 * 
+	 * @param ctx
+	 * @param topic
+	 * @param msg
+	 * @param defaultPartition
+	 * @param requestTime
+	 * @throws ConfigDbException
+	 * @throws AccessDeniedException
+	 * @throws TopicExistsException
+	 * @throws CambriaApiException
+	 * @throws IOException
+	 */
+	public void pushEvents(DMaaPContext ctx, final String topic, InputStream msg, final String defaultPartition,
+			final String requestTime) throws ConfigDbException, AccessDeniedException, TopicExistsException,
+					CambriaApiException, IOException,missingReqdSetting;
+
+}
diff --git a/src/main/java/com/att/nsa/cambria/service/MMService.java b/src/main/java/com/att/nsa/cambria/service/MMService.java
new file mode 100644
index 0000000..5c14674
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/service/MMService.java
@@ -0,0 +1,68 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.service;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+import org.json.JSONException;
+
+import com.att.nsa.cambria.CambriaApiException;
+import com.att.nsa.cambria.backends.ConsumerFactory.UnavailableException;
+import com.att.nsa.cambria.beans.DMaaPContext;
+import com.att.nsa.cambria.metabroker.Broker.TopicExistsException;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+
+/**
+ * Contains the logic for executing calls to the Mirror Maker agent tool.
+ * 
+ * @author <a href="mailto:"></a>
+ *
+ * @since May 25, 2016
+ */
+
+public interface MMService {
+
+	/*
+	 * this method calls the add white list method of a Mirror Maker agent API
+	 */
+	public void addWhiteList();
+	
+	/*
+	 * this method calls the remove white list method of a Mirror Maker agent API
+	 */
+	public void removeWhiteList();
+	
+	/*
+	 * This method calls the list white list method of a Mirror Maker agent API
+	 */
+	public void listWhiteList();
+	
+	public String subscribe(DMaaPContext ctx, String topic, String consumerGroup, String clientId) throws ConfigDbException, TopicExistsException, 
+		AccessDeniedException, UnavailableException, CambriaApiException, IOException;
+	
+	public void pushEvents(DMaaPContext ctx, final String topic, InputStream msg, final String defaultPartition,
+			final String requestTime) throws ConfigDbException, AccessDeniedException, TopicExistsException,
+			CambriaApiException, IOException, missingReqdSetting;
+}
diff --git a/src/main/java/com/att/nsa/cambria/service/MetricsService.java b/src/main/java/com/att/nsa/cambria/service/MetricsService.java
new file mode 100644
index 0000000..6b11682
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/service/MetricsService.java
@@ -0,0 +1,54 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.service;
+
+/**
+ * @author 
+ *
+ */
+import java.io.IOException;
+
+import com.att.nsa.cambria.CambriaApiException;
+import com.att.nsa.cambria.beans.DMaaPContext;
+
+/**
+ * 
+ * @author author
+ *
+ */
+public interface MetricsService {
+	/**
+	 * 
+	 * @param ctx
+	 * @throws IOException
+	 */
+	public void get(DMaaPContext ctx) throws IOException;
+
+	/**
+	 * 
+	 * @param ctx
+	 * @param name
+	 * @throws IOException
+	 * @throws CambriaApiException
+	 */
+	public void getMetricByName(DMaaPContext ctx, String name) throws IOException, CambriaApiException;
+}
diff --git a/src/main/java/com/att/nsa/cambria/service/TopicService.java b/src/main/java/com/att/nsa/cambria/service/TopicService.java
new file mode 100644
index 0000000..9ed39af
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/service/TopicService.java
@@ -0,0 +1,176 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.service;
+
+import java.io.IOException;
+
+import org.json.JSONException;
+
+import com.att.nsa.cambria.CambriaApiException;
+import com.att.nsa.cambria.beans.DMaaPContext;
+import com.att.nsa.cambria.beans.TopicBean;
+import com.att.nsa.cambria.metabroker.Broker.TopicExistsException;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import com.att.nsa.configs.ConfigDbException;
+
+/**
+ * interface provide all the topic related operations
+ * 
+ * @author author
+ *
+ */
+public interface TopicService {
+	/**
+	 * method fetch details of all the topics
+	 * 
+	 * @param dmaapContext
+	 * @throws JSONException
+	 * @throws ConfigDbException
+	 * @throws IOException
+	 */
+	void getTopics(DMaaPContext dmaapContext) throws JSONException, ConfigDbException, IOException;
+	void getAllTopics(DMaaPContext dmaapContext) throws JSONException, ConfigDbException, IOException;
+
+	/**
+	 * method fetch details of specific topic
+	 * 
+	 * @param dmaapContext
+	 * @param topicName
+	 * @throws ConfigDbException
+	 * @throws IOException
+	 * @throws TopicExistsException
+	 */
+	void getTopic(DMaaPContext dmaapContext, String topicName)
+			throws ConfigDbException, IOException, TopicExistsException;
+
+	/**
+	 * method used to create the topic
+	 * 
+	 * @param dmaapContext
+	 * @param topicBean
+	 * @throws CambriaApiException
+	 * @throws TopicExistsException
+	 * @throws IOException
+	 * @throws AccessDeniedException
+	 * @throws JSONException 
+	 */
+
+	void createTopic(DMaaPContext dmaapContext, TopicBean topicBean)
+			throws CambriaApiException, TopicExistsException, IOException, AccessDeniedException;
+
+	/**
+	 * method used to delete to topic
+	 * 
+	 * @param dmaapContext
+	 * @param topicName
+	 * @throws IOException
+	 * @throws AccessDeniedException
+	 * @throws ConfigDbException
+	 * @throws CambriaApiException
+	 * @throws TopicExistsException
+	 */
+
+	void deleteTopic(DMaaPContext dmaapContext, String topicName)
+			throws IOException, AccessDeniedException, ConfigDbException, CambriaApiException, TopicExistsException;
+
+	/**
+	 * method provides list of all the publishers associated with a topic
+	 * 
+	 * @param dmaapContext
+	 * @param topicName
+	 * @throws IOException
+	 * @throws ConfigDbException
+	 * @throws TopicExistsException
+	 */
+	void getPublishersByTopicName(DMaaPContext dmaapContext, String topicName)
+			throws IOException, ConfigDbException, TopicExistsException;
+
+	/**
+	 * method provides details of all the consumer associated with a specific
+	 * topic
+	 * 
+	 * @param dmaapContext
+	 * @param topicName
+	 * @throws IOException
+	 * @throws ConfigDbException
+	 * @throws TopicExistsException
+	 */
+	void getConsumersByTopicName(DMaaPContext dmaapContext, String topicName)
+			throws IOException, ConfigDbException, TopicExistsException;
+
+	/**
+	 * method provides publishing right to a specific topic
+	 * 
+	 * @param dmaapContext
+	 * @param topicName
+	 * @param producerId
+	 * @throws AccessDeniedException
+	 * @throws ConfigDbException
+	 * @throws IOException
+	 * @throws TopicExistsException
+	 */
+	void permitPublisherForTopic(DMaaPContext dmaapContext, String topicName, String producerId)
+			throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,CambriaApiException;
+
+	/**
+	 * method denies any specific publisher from a topic
+	 * 
+	 * @param dmaapContext
+	 * @param topicName
+	 * @param producerId
+	 * @throws AccessDeniedException
+	 * @throws ConfigDbException
+	 * @throws IOException
+	 * @throws TopicExistsException
+	 */
+	void denyPublisherForTopic(DMaaPContext dmaapContext, String topicName, String producerId)
+			throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,CambriaApiException;
+
+	/**
+	 * method provide consuming right to a specific user on a topic
+	 * 
+	 * @param dmaapContext
+	 * @param topicName
+	 * @param consumerId
+	 * @throws AccessDeniedException
+	 * @throws ConfigDbException
+	 * @throws IOException
+	 * @throws TopicExistsException
+	 */
+	void permitConsumerForTopic(DMaaPContext dmaapContext, String topicName, String consumerId)
+			throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,CambriaApiException;
+
+	/**
+	 * method denies a particular user's consuming right on a topic
+	 * 
+	 * @param dmaapContext
+	 * @param topicName
+	 * @param consumerId
+	 * @throws AccessDeniedException
+	 * @throws ConfigDbException
+	 * @throws IOException
+	 * @throws TopicExistsException
+	 */
+	void denyConsumerForTopic(DMaaPContext dmaapContext, String topicName, String consumerId)
+			throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,CambriaApiException;
+
+}
diff --git a/src/main/java/com/att/nsa/cambria/service/TransactionService.java b/src/main/java/com/att/nsa/cambria/service/TransactionService.java
new file mode 100644
index 0000000..109b2c8
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/service/TransactionService.java
@@ -0,0 +1,61 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.service;
+
+import java.io.IOException;
+
+import com.att.aft.dme2.internal.jettison.json.JSONException;
+import com.att.nsa.cambria.beans.DMaaPContext;
+import com.att.nsa.cambria.transaction.TransactionObj;
+import com.att.nsa.configs.ConfigDbException;
+
+/**
+ * 
+ * @author author
+ *
+ */
+public interface TransactionService {
+	/**
+	 * 
+	 * @param trnObj
+	 */
+	void checkTransaction(TransactionObj trnObj);
+
+	/**
+	 * 
+	 * @param dmaapContext
+	 * @throws ConfigDbException
+	 * @throws IOException
+	 */
+	void getAllTransactionObjs(DMaaPContext dmaapContext) throws ConfigDbException, IOException;
+
+	/**
+	 * 
+	 * @param dmaapContext
+	 * @param transactionId
+	 * @throws ConfigDbException
+	 * @throws JSONException
+	 * @throws IOException
+	 */
+	void getTransactionObj(DMaaPContext dmaapContext, String transactionId)
+			throws ConfigDbException, JSONException, IOException;
+}
diff --git a/src/main/java/com/att/nsa/cambria/service/UIService.java b/src/main/java/com/att/nsa/cambria/service/UIService.java
new file mode 100644
index 0000000..b6555fe
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/service/UIService.java
@@ -0,0 +1,91 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+/**
+ * 
+ */
+package com.att.nsa.cambria.service;
+
+import java.io.IOException;
+
+import com.att.nsa.cambria.beans.DMaaPContext;
+import com.att.nsa.configs.ConfigDbException;
+
+import kafka.common.TopicExistsException;
+
+/**
+ * @author author
+ *
+ */
+public interface UIService {
+	/**
+	 * Returning template of hello page.
+	 * 
+	 * @param dmaapContext
+	 * @throws IOException
+	 */
+	void hello(DMaaPContext dmaapContext) throws IOException;
+
+	/**
+	 * Fetching list of all api keys and returning in a templated form for
+	 * display
+	 * 
+	 * @param dmaapContext
+	 * @throws ConfigDbException
+	 * @throws IOException
+	 */
+	void getApiKeysTable(DMaaPContext dmaapContext) throws ConfigDbException,
+			IOException;
+
+	/**
+	 * Fetching detials of apikey in a templated form for display
+	 * 
+	 * @param dmaapContext
+	 * @param apiKey
+	 * @throws Exception
+	 */
+	void getApiKey(DMaaPContext dmaapContext, final String apiKey)
+			throws Exception;
+
+	/**
+	 * Fetching list of all the topics and returning in a templated form for
+	 * display
+	 * 
+	 * @param dmaapContext
+	 * @throws ConfigDbException
+	 * @throws IOException
+	 */
+	void getTopicsTable(DMaaPContext dmaapContext) throws ConfigDbException,
+			IOException;
+
+	/**
+	 * Fetching detials of topic in a templated form for display
+	 * 
+	 * @param dmaapContext
+	 * @param topic
+	 * @throws ConfigDbException
+	 * @throws IOException
+	 * @throws TopicExistsException
+	 */
+	void getTopic(DMaaPContext dmaapContext, final String topic)
+			throws ConfigDbException, IOException, TopicExistsException;
+
+}
diff --git a/src/main/java/com/att/nsa/cambria/service/impl/AdminServiceImpl.java b/src/main/java/com/att/nsa/cambria/service/impl/AdminServiceImpl.java
new file mode 100644
index 0000000..2585ab5
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/service/impl/AdminServiceImpl.java
@@ -0,0 +1,188 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.service.impl;
+
+import java.io.IOException;
+import java.util.Collection;
+import java.util.Set;
+
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.springframework.stereotype.Component;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.att.nsa.cambria.backends.Consumer;
+import com.att.nsa.cambria.backends.ConsumerFactory;
+import com.att.nsa.cambria.beans.DMaaPContext;
+import com.att.nsa.cambria.security.DMaaPAuthenticatorImpl;
+import com.att.nsa.cambria.service.AdminService;
+import com.att.nsa.cambria.utils.DMaaPResponseBuilder;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.limits.Blacklist;
+import com.att.nsa.security.NsaApiKey;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+
+/**
+ * @author author
+ *
+ */
+@Component
+public class AdminServiceImpl implements AdminService {
+
+	//private Logger log = Logger.getLogger(AdminServiceImpl.class.toString());
+	private static final EELFLogger log = EELFManager.getInstance().getLogger(AdminServiceImpl.class);
+	/**
+	 * getConsumerCache returns consumer cache
+	 * @param dMaaPContext context
+	 * @throws IOException ex
+	 * @throws AccessDeniedException 
+	 */
+	@Override	
+	public void showConsumerCache(DMaaPContext dMaaPContext) throws IOException, AccessDeniedException {
+		adminAuthenticate(dMaaPContext);
+		
+		JSONObject consumers = new JSONObject();
+		JSONArray jsonConsumersList = new JSONArray();
+
+		for (Consumer consumer : getConsumerFactory(dMaaPContext).getConsumers()) {
+			JSONObject consumerObject = new JSONObject();
+			consumerObject.put("name", consumer.getName());
+			consumerObject.put("created", consumer.getCreateTimeMs());
+			consumerObject.put("accessed", consumer.getLastAccessMs());
+			jsonConsumersList.put(consumerObject);
+		}
+
+		consumers.put("consumers", jsonConsumersList);
+		log.info("========== AdminServiceImpl: getConsumerCache: " + jsonConsumersList.toString() + "===========");
+		DMaaPResponseBuilder.respondOk(dMaaPContext, consumers);
+	}
+
+	/**
+	 * 
+	 * dropConsumerCache() method clears consumer cache
+	 * @param dMaaPContext context
+	 * @throws JSONException ex
+	 * @throws IOException ex
+	 * @throws AccessDeniedException 
+	 * 
+	 */
+	@Override
+	public void dropConsumerCache(DMaaPContext dMaaPContext) throws JSONException, IOException, AccessDeniedException {
+		adminAuthenticate(dMaaPContext);
+		getConsumerFactory(dMaaPContext).dropCache();
+		DMaaPResponseBuilder.respondOkWithHtml(dMaaPContext, "Consumer cache cleared successfully");
+		// log.info("========== AdminServiceImpl: dropConsumerCache: Consumer
+		// Cache successfully dropped.===========");
+	}
+
+	/** 
+	 * getfConsumerFactory returns CosnumerFactory details
+	 * @param dMaaPContext contxt
+	 * @return ConsumerFactory obj
+	 * 
+	 */
+	private ConsumerFactory getConsumerFactory(DMaaPContext dMaaPContext) {
+		return dMaaPContext.getConfigReader().getfConsumerFactory();
+	}
+	
+	/**
+	 * return ipblacklist
+	 * @param dMaaPContext context
+	 * @return blacklist obj
+	 */
+	private static Blacklist getIpBlacklist(DMaaPContext dMaaPContext) {
+		return dMaaPContext.getConfigReader().getfIpBlackList();
+	}
+	
+	
+	/**
+	 * Get list of blacklisted ips
+	 */
+	@Override
+	public void getBlacklist ( DMaaPContext dMaaPContext ) throws IOException, AccessDeniedException
+	{
+		adminAuthenticate ( dMaaPContext );
+
+		DMaaPResponseBuilder.respondOk ( dMaaPContext,
+			new JSONObject().put ( "blacklist", setToJsonArray ( getIpBlacklist (dMaaPContext).asSet() ) ) );
+	}
+	
+	/**
+	 * Add ip to blacklist
+	 */
+	@Override
+	public void addToBlacklist ( DMaaPContext dMaaPContext, String ip ) throws IOException, ConfigDbException, AccessDeniedException
+	{
+		adminAuthenticate ( dMaaPContext );
+
+		getIpBlacklist (dMaaPContext).add ( ip );
+		DMaaPResponseBuilder.respondOkNoContent ( dMaaPContext );
+	}
+	
+	/**
+	 * Remove ip from blacklist
+	 */
+	@Override
+	public void removeFromBlacklist ( DMaaPContext dMaaPContext, String ip ) throws IOException, ConfigDbException, AccessDeniedException
+	{
+		adminAuthenticate ( dMaaPContext );
+
+		getIpBlacklist (dMaaPContext).remove ( ip );
+		DMaaPResponseBuilder.respondOkNoContent ( dMaaPContext );
+	}
+	
+	/**
+	 * Authenticate if user is admin
+	 * @param dMaaPContext context
+	 * @throws AccessDeniedException ex
+	 */
+	private static void adminAuthenticate ( DMaaPContext dMaaPContext ) throws AccessDeniedException
+	{
+		
+		final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dMaaPContext);
+		if ( user == null || !user.getKey ().equals ( "admin" ) )
+		{
+			throw new AccessDeniedException ();
+		}
+	}
+	
+	public static JSONArray setToJsonArray ( Set<?> fields )
+	{
+		return collectionToJsonArray ( fields );
+	}
+
+	public static JSONArray collectionToJsonArray ( Collection<?> fields )
+	{
+		final JSONArray a = new JSONArray ();
+		if ( fields != null )
+		{
+			for ( Object o : fields )
+			{
+				a.put ( o );
+			}
+		}
+		return a;
+	}
+
+}
diff --git a/src/main/java/com/att/nsa/cambria/service/impl/ApiKeysServiceImpl.java b/src/main/java/com/att/nsa/cambria/service/impl/ApiKeysServiceImpl.java
new file mode 100644
index 0000000..637d2fb
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/service/impl/ApiKeysServiceImpl.java
@@ -0,0 +1,326 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.service.impl;
+
+import java.io.IOException;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.json.JSONArray;
+import org.json.JSONObject;
+import org.springframework.stereotype.Service;
+
+//import com.att.nsa.apiServer.util.Emailer;
+import com.att.nsa.cambria.utils.Emailer;
+import com.att.nsa.cambria.beans.ApiKeyBean;
+import com.att.nsa.cambria.beans.DMaaPContext;
+import com.att.nsa.cambria.constants.CambriaConstants;
+import com.att.nsa.cambria.security.DMaaPAuthenticatorImpl;
+import com.att.nsa.cambria.service.ApiKeysService;
+import com.att.nsa.cambria.utils.ConfigurationReader;
+import com.att.nsa.cambria.utils.DMaaPResponseBuilder;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.drumlin.service.standards.HttpStatusCodes;
+import com.att.nsa.security.NsaApiKey;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import com.att.nsa.security.db.NsaApiDb;
+import com.att.nsa.security.db.NsaApiDb.KeyExistsException;
+import com.att.nsa.security.db.simple.NsaSimpleApiKey;
+
+/**
+ * Implementation of the ApiKeysService, this will provide the below operations,
+ * getAllApiKeys, getApiKey, createApiKey, updateApiKey, deleteApiKey
+ * 
+ * @author author
+ */
+@Service
+public class ApiKeysServiceImpl implements ApiKeysService {
+
+	//private Logger log = Logger.getLogger(ApiKeysServiceImpl.class.toString());
+	private static final EELFLogger log = EELFManager.getInstance().getLogger(ApiKeysServiceImpl.class.toString());
+	/**
+	 * This method will provide all the ApiKeys present in kafka server.
+	 * 
+	 * @param dmaapContext
+	 * @throws ConfigDbException
+	 * @throws IOException
+	 */
+	public void getAllApiKeys(DMaaPContext dmaapContext)
+			throws ConfigDbException, IOException {
+
+		ConfigurationReader configReader = dmaapContext.getConfigReader();
+
+		log.info("configReader : " + configReader.toString());
+
+		final JSONObject result = new JSONObject();
+		final JSONArray keys = new JSONArray();
+		result.put("apiKeys", keys);
+
+		NsaApiDb<NsaSimpleApiKey> apiDb = configReader.getfApiKeyDb();
+
+		for (String key : apiDb.loadAllKeys()) {
+			keys.put(key);
+		}
+		log.info("========== ApiKeysServiceImpl: getAllApiKeys: Api Keys are : "
+				+ keys.toString() + "===========");
+		DMaaPResponseBuilder.respondOk(dmaapContext, result);
+	}
+
+	/**
+	 * @param dmaapContext
+	 * @param apikey
+	 * @throws ConfigDbException
+	 * @throws IOException
+	 */
+	@Override
+	public void getApiKey(DMaaPContext dmaapContext, String apikey)
+			throws ConfigDbException, IOException {
+
+		String errorMsg = "Api key name is not mentioned.";
+		int errorCode = HttpStatusCodes.k400_badRequest;
+		
+		if (null != apikey) {
+			NsaSimpleApiKey simpleApiKey = getApiKeyDb(dmaapContext)
+					.loadApiKey(apikey);
+			
+		
+			if (null != simpleApiKey) {
+				JSONObject result = simpleApiKey.asJsonObject();
+				DMaaPResponseBuilder.respondOk(dmaapContext, result);
+				log.info("========== ApiKeysServiceImpl: getApiKey : "
+						+ result.toString() + "===========");
+				return;
+			} else {
+				errorMsg = "Api key [" + apikey + "] does not exist.";
+				errorCode = HttpStatusCodes.k404_notFound;
+				log.info("========== ApiKeysServiceImpl: getApiKey: Error : API Key does not exist. "
+						+ "===========");
+				DMaaPResponseBuilder.respondWithError(dmaapContext, errorCode,
+						errorMsg);
+				throw new IOException();
+			}
+		}
+
+	}
+
+	/**
+	 * @param dmaapContext
+	 * @param nsaApiKey
+	 * @throws KeyExistsException
+	 * @throws ConfigDbException
+	 * @throws IOException
+	 */
+	@Override
+	public void createApiKey(DMaaPContext dmaapContext, ApiKeyBean nsaApiKey)
+			throws KeyExistsException, ConfigDbException, IOException {
+
+		log.debug("TopicService: : createApiKey....");
+		
+		
+			String contactEmail = nsaApiKey.getEmail();
+			final boolean emailProvided = contactEmail != null && contactEmail.length() > 0 && contactEmail.indexOf("@") > 1 ;
+			 String kSetting_AllowAnonymousKeys= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"apiKeys.allowAnonymous");
+			 if(null==kSetting_AllowAnonymousKeys) kSetting_AllowAnonymousKeys ="false";
+			 
+	     // if ((contactEmail == null) || (contactEmail.length() == 0))
+			 if ( kSetting_AllowAnonymousKeys.equalsIgnoreCase("true")    &&  !emailProvided   )
+	      {
+	        DMaaPResponseBuilder.respondWithErrorInJson(dmaapContext, 400, "You must provide an email address.");
+	        return;
+	      }
+		
+
+	  
+	  
+		final NsaApiDb<NsaSimpleApiKey> apiKeyDb = getApiKeyDb(dmaapContext);
+		String apiKey = nsaApiKey.getKey();
+		String sharedSecret = nsaApiKey.getSharedSecret();
+		final NsaSimpleApiKey key = apiKeyDb.createApiKey(apiKey,
+				sharedSecret);
+
+		if (null != key) {
+
+			if (null != nsaApiKey.getEmail()) {
+				key.setContactEmail(nsaApiKey.getEmail());
+			}
+
+			if (null != nsaApiKey.getDescription()) {
+				key.setDescription(nsaApiKey.getDescription());
+			}
+
+			log.debug("=======ApiKeysServiceImpl: createApiKey : saving api key : "
+					+ key.toString() + "=====");
+			apiKeyDb.saveApiKey(key);
+			// email out the secret to validate the email address
+			if ( emailProvided )
+			{
+				String body = "\n" + "Your email address was provided as the creator of new API key \""
+				+ apiKey + "\".\n" + "\n" + "If you did not make this request, please let us know."
+				+ " See http://sa2020.it.att.com:8888 for contact information, " + "but don't worry -"
+				+ " the API key is useless without the information below, which has been provided "
+				+ "only to you.\n" + "\n\n" + "For API key \"" + apiKey + "\", use API key secret:\n\n\t"
+				+ sharedSecret + "\n\n" + "Note that it's normal to share the API key"
+				+ " (" + apiKey + "). " 			
+				+ "This is how you are granted access to resources " + "like a UEB topic or Flatiron scope. "
+				+ "However, you should NOT share the API key's secret. " + "The API key is associated with your"
+				+ " email alone. ALL access to data made with this " + "key will be your responsibility. If you "
+				+ "share the secret, someone else can use the API key " + "to access proprietary data with your "
+				+ "identity.\n" + "\n" + "Enjoy!\n" + "\n" + "The GFP/SA-2020 Team";
+	
+		        Emailer em = dmaapContext.getConfigReader().getSystemEmailer();
+		        em.send(contactEmail, "New API Key", body);
+			}
+			log.debug("TopicService: : sending response.");
+	
+			JSONObject o = key.asJsonObject();
+			
+			o.put ( NsaSimpleApiKey.kApiSecretField,
+					emailProvided ?
+						"Emailed to " + contactEmail + "." :
+						key.getSecret ()
+				);
+			DMaaPResponseBuilder.respondOk(dmaapContext,
+					o);
+	        /*o.put("secret", "Emailed to " + contactEmail + ".");
+			DMaaPResponseBuilder.respondOk(dmaapContext,
+					o); */
+			return;
+		} else {
+			log.debug("=======ApiKeysServiceImpl: createApiKey : Error in creating API Key.=====");
+			DMaaPResponseBuilder.respondWithError(dmaapContext,
+					HttpStatusCodes.k500_internalServerError,
+					"Failed to create api key.");
+			throw new KeyExistsException(apiKey);
+		}
+	}
+
+	/**
+	 * @param dmaapContext
+	 * @param apikey
+	 * @param nsaApiKey
+	 * @throws ConfigDbException
+	 * @throws IOException
+	 * @throws AccessDeniedException
+	 */
+	@Override
+	public void updateApiKey(DMaaPContext dmaapContext, String apikey,
+			ApiKeyBean nsaApiKey) throws ConfigDbException, IOException, AccessDeniedException {
+
+		String errorMsg = "Api key name is not mentioned.";
+		int errorCode = HttpStatusCodes.k400_badRequest;
+
+		if (null != apikey) {
+			final NsaApiDb<NsaSimpleApiKey> apiKeyDb = getApiKeyDb(dmaapContext);
+			final NsaSimpleApiKey key = apiKeyDb.loadApiKey(apikey);
+			boolean shouldUpdate = false;
+
+			if (null != key) {
+				final NsaApiKey user = DMaaPAuthenticatorImpl
+						.getAuthenticatedUser(dmaapContext);
+
+				if (user == null || !user.getKey().equals(key.getKey())) {
+					throw new AccessDeniedException("You must authenticate with the key you'd like to update.");
+				}
+
+				if (null != nsaApiKey.getEmail()) {
+					key.setContactEmail(nsaApiKey.getEmail());
+					shouldUpdate = true;
+				}
+
+				if (null != nsaApiKey.getDescription()) {
+					key.setDescription(nsaApiKey.getDescription());
+					shouldUpdate = true;
+				}
+
+				if (shouldUpdate) {
+					apiKeyDb.saveApiKey(key);
+				}
+
+				log.info("======ApiKeysServiceImpl : updateApiKey : Key Updated Successfully :"
+						+ key.toString() + "=========");
+				DMaaPResponseBuilder.respondOk(dmaapContext,
+						key.asJsonObject());
+				return;
+			}
+		} else {
+			errorMsg = "Api key [" + apikey + "] does not exist.";
+			errorCode = HttpStatusCodes.k404_notFound;
+			DMaaPResponseBuilder.respondWithError(dmaapContext, errorCode,
+					errorMsg);
+			log.info("======ApiKeysServiceImpl : updateApiKey : Error in Updating Key.============");
+			throw new IOException();
+		}
+	}
+
+	/**
+	 * @param dmaapContext
+	 * @param apikey
+	 * @throws ConfigDbException
+	 * @throws IOException
+	 * @throws AccessDeniedException
+	 */
+	@Override
+	public void deleteApiKey(DMaaPContext dmaapContext, String apikey)
+			throws ConfigDbException, IOException, AccessDeniedException {
+
+		String errorMsg = "Api key name is not mentioned.";
+		int errorCode = HttpStatusCodes.k400_badRequest;
+
+		if (null != apikey) {
+			final NsaApiDb<NsaSimpleApiKey> apiKeyDb = getApiKeyDb(dmaapContext);
+			final NsaSimpleApiKey key = apiKeyDb.loadApiKey(apikey);
+
+			if (null != key) {
+
+				final NsaApiKey user = DMaaPAuthenticatorImpl
+						.getAuthenticatedUser(dmaapContext);
+				if (user == null || !user.getKey().equals(key.getKey())) {
+					throw new AccessDeniedException("You don't own the API key.");
+				}
+
+				apiKeyDb.deleteApiKey(key);
+				log.info("======ApiKeysServiceImpl : deleteApiKey : Deleted Key successfully.============");
+				DMaaPResponseBuilder.respondOkWithHtml(dmaapContext,
+						"Api key [" + apikey + "] deleted successfully.");
+				return;
+			}
+		} else {
+			errorMsg = "Api key [" + apikey + "] does not exist.";
+			errorCode = HttpStatusCodes.k404_notFound;
+			DMaaPResponseBuilder.respondWithError(dmaapContext, errorCode,
+					errorMsg);
+			log.info("======ApiKeysServiceImpl : deleteApiKey : Error while deleting key.============");
+			throw new IOException();
+		}
+	}
+
+	/**
+	 * 
+	 * @param dmaapContext
+	 * @return
+	 */
+	private NsaApiDb<NsaSimpleApiKey> getApiKeyDb(DMaaPContext dmaapContext) {
+		ConfigurationReader configReader = dmaapContext.getConfigReader();
+		return configReader.getfApiKeyDb();
+	}
+
+}
diff --git a/src/main/java/com/att/nsa/cambria/service/impl/BaseTransactionDbImpl.java b/src/main/java/com/att/nsa/cambria/service/impl/BaseTransactionDbImpl.java
new file mode 100644
index 0000000..cdbf57b
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/service/impl/BaseTransactionDbImpl.java
@@ -0,0 +1,153 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.service.impl;
+
+import java.util.Set;
+import java.util.TreeSet;
+
+import com.att.nsa.cambria.transaction.DMaaPTransactionFactory;
+import com.att.nsa.cambria.transaction.DMaaPTransactionObj;
+import com.att.nsa.cambria.transaction.DMaaPTransactionObjDB;
+import com.att.nsa.cambria.transaction.TransactionObj;
+import com.att.nsa.configs.ConfigDb;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.configs.ConfigPath;
+
+/**
+ * Persistent storage for Transaction objects built over an abstract config db.
+ * 
+ * @author author
+ *
+ * @param <K>
+ */
+public class BaseTransactionDbImpl<K extends DMaaPTransactionObj> implements DMaaPTransactionObjDB<K> {
+
+	private final ConfigDb fDb;
+	private final ConfigPath fBasePath;
+	private final DMaaPTransactionFactory<K> fKeyFactory;
+
+	private static final String kStdRootPath = "/transaction";
+
+	private ConfigPath makePath(String transactionId) {
+		return fBasePath.getChild(transactionId);
+	}
+
+	/**
+	 * Construct an Transaction db over the given config db at the standard
+	 * location
+	 * 
+	 * @param db
+	 * @param keyFactory
+	 * @throws ConfigDbException
+	 */
+	public BaseTransactionDbImpl(ConfigDb db, DMaaPTransactionFactory<K> keyFactory) throws ConfigDbException {
+		this(db, kStdRootPath, keyFactory);
+	}
+
+	/**
+	 * Construct an Transaction db over the given config db using the given root
+	 * location
+	 * 
+	 * @param db
+	 * @param rootPath
+	 * @param keyFactory
+	 * @throws ConfigDbException
+	 */
+	public BaseTransactionDbImpl(ConfigDb db, String rootPath, DMaaPTransactionFactory<K> keyFactory)
+			throws ConfigDbException {
+		fDb = db;
+		fBasePath = db.parse(rootPath);
+		fKeyFactory = keyFactory;
+
+		if (!db.exists(fBasePath)) {
+			db.store(fBasePath, "");
+		}
+	}
+
+	/**
+	 * Create a new Transaction Obj. If one exists,
+	 * 
+	 * @param id
+	 * @return the new Transaction record
+	 * @throws ConfigDbException
+	 */
+	public synchronized K createTransactionObj(String id) throws KeyExistsException, ConfigDbException {
+		final ConfigPath path = makePath(id);
+		if (fDb.exists(path)) {
+			throw new KeyExistsException(id);
+		}
+
+		// make one, store it, return it
+		final K newKey = fKeyFactory.makeNewTransactionId(id);
+		fDb.store(path, newKey.serialize());
+		return newKey;
+	}
+
+	/**
+	 * Save an Transaction record. This must be used after changing auxiliary
+	 * data on the record. Note that the transaction object must exist (via
+	 * createTransactionObj).
+	 * 
+	 * @param transaction
+	 *            object
+	 * @throws ConfigDbException
+	 */
+	@Override
+	public synchronized void saveTransactionObj(K trnObj) throws ConfigDbException {
+		final ConfigPath path = makePath(trnObj.getId());
+		if (!fDb.exists(path) || !(trnObj instanceof TransactionObj)) {
+			throw new IllegalStateException(trnObj.getId() + " is not known to this database");
+		}
+		fDb.store(path, ((TransactionObj) trnObj).serialize());
+	}
+
+	/**
+	 * Load an Transaction record based on the Transaction Id value
+	 * 
+	 * @param transactionId
+	 * @return an Transaction Object record or null
+	 * @throws ConfigDbException
+	 */
+	@Override
+	public synchronized K loadTransactionObj(String transactionId) throws ConfigDbException {
+		final String data = fDb.load(makePath(transactionId));
+		if (data != null) {
+			return fKeyFactory.makeNewTransactionObj(data);
+		}
+		return null;
+	}
+
+	/**
+	 * Load all transactions known to this database. (This could be expensive.)
+	 * 
+	 * @return a set of all Transaction objects
+	 * @throws ConfigDbException
+	 */
+	public synchronized Set<String> loadAllTransactionObjs() throws ConfigDbException {
+		final TreeSet<String> result = new TreeSet<String>();
+		for (ConfigPath cp : fDb.loadChildrenNames(fBasePath)) {
+			result.add(cp.getName());
+		}
+		return result;
+	}
+
+}
diff --git a/src/main/java/com/att/nsa/cambria/service/impl/EventsServiceImpl.java b/src/main/java/com/att/nsa/cambria/service/impl/EventsServiceImpl.java
new file mode 100644
index 0000000..3386f19
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/service/impl/EventsServiceImpl.java
@@ -0,0 +1,788 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.service.impl;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.LinkedList;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.core.MediaType;
+
+import org.apache.http.HttpStatus;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.json.JSONObject;
+import org.json.JSONTokener;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Service;
+
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+import com.att.nsa.cambria.CambriaApiException;
+import com.att.nsa.cambria.backends.Consumer;
+import com.att.nsa.cambria.backends.ConsumerFactory;
+import com.att.nsa.cambria.backends.ConsumerFactory.UnavailableException;
+import com.att.nsa.cambria.backends.MetricsSet;
+import com.att.nsa.cambria.backends.Publisher;
+import com.att.nsa.cambria.backends.Publisher.message;
+import com.att.nsa.cambria.beans.DMaaPCambriaLimiter;
+import com.att.nsa.cambria.beans.DMaaPContext;
+import com.att.nsa.cambria.beans.LogDetails;
+import com.att.nsa.cambria.constants.CambriaConstants;
+import com.att.nsa.cambria.exception.DMaaPAccessDeniedException;
+import com.att.nsa.cambria.exception.DMaaPErrorMessages;
+import com.att.nsa.cambria.exception.DMaaPResponseCode;
+import com.att.nsa.cambria.exception.ErrorResponse;
+import com.att.nsa.cambria.metabroker.Broker.TopicExistsException;
+import com.att.nsa.cambria.metabroker.Topic;
+import com.att.nsa.cambria.resources.CambriaEventSet;
+import com.att.nsa.cambria.resources.CambriaOutboundEventStream;
+import com.att.nsa.cambria.security.DMaaPAAFAuthenticator;
+import com.att.nsa.cambria.security.DMaaPAAFAuthenticatorImpl;
+import com.att.nsa.cambria.security.DMaaPAuthenticatorImpl;
+import com.att.nsa.cambria.service.EventsService;
+import com.att.nsa.cambria.utils.DMaaPResponseBuilder;
+import com.att.nsa.cambria.utils.Utils;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.drumlin.service.standards.MimeTypes;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
+import com.att.nsa.security.NsaApiKey;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import com.att.nsa.util.rrConvertor;
+
+import kafka.producer.KeyedMessage;
+
+/**
+ * This class provides the functinality to publish and subscribe message to
+ * kafka
+ * 
+ * @author author
+ *
+ */
+@Service
+public class EventsServiceImpl implements EventsService {
+	//private static final Logger LOG = Logger.getLogger(EventsServiceImpl.class);
+	private static final EELFLogger LOG = EELFManager.getInstance().getLogger(EventsServiceImpl.class);
+
+	private static final String BATCH_LENGTH = "event.batch.length";
+	private static final String TRANSFER_ENCODING = "Transfer-Encoding";
+	@Autowired
+	private DMaaPErrorMessages errorMessages;
+
+	//@Value("${metrics.send.cambria.topic}")
+	//private String metricsTopic;
+	
+	/**
+	 * @param ctx
+	 * @param topic
+	 * @param consumerGroup
+	 * @param clientId
+	 * @throws ConfigDbException,
+	 *             TopicExistsException, AccessDeniedException,
+	 *             UnavailableException, CambriaApiException, IOException
+	 * 
+	 * 
+	 */
+	@Override
+	public void getEvents(DMaaPContext ctx, String topic, String consumerGroup, String clientId)
+			throws ConfigDbException, TopicExistsException, AccessDeniedException, UnavailableException,
+			CambriaApiException, IOException,DMaaPAccessDeniedException {
+		final long startTime = System.currentTimeMillis();
+		final HttpServletRequest req = ctx.getRequest();
+
+		boolean isAAFTopic=false;
+		// was this host blacklisted?
+		final String remoteAddr = Utils.getRemoteAddress(ctx);;
+		if ( ctx.getConfigReader().getfIpBlackList().contains ( remoteAddr ) )
+		{
+			
+			ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, 
+					DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), "Source address [" + remoteAddr +
+				"] is blacklisted. Please contact the cluster management team."
+					,null,Utils.getFormattedDate(new Date()),topic,
+					Utils.getUserApiKey(ctx.getRequest()),ctx.getRequest().getRemoteHost(),
+					null,null);
+			LOG.info(errRes.toString());
+			throw new CambriaApiException(errRes);
+		}
+		
+		
+		int limit = CambriaConstants.kNoLimit;
+		if (req.getParameter("limit") != null) {
+			limit = Integer.parseInt(req.getParameter("limit"));
+		}
+
+		int timeoutMs= CambriaConstants.kNoTimeout;
+		String strtimeoutMS = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"timeout");
+		if(strtimeoutMS!=null)timeoutMs=Integer.parseInt(strtimeoutMS);
+		//int timeoutMs = ctx.getConfigReader().getSettings().getInt("timeout", CambriaConstants.kNoTimeout);
+		if (req.getParameter("timeout") != null) {
+			timeoutMs = Integer.parseInt(req.getParameter("timeout"));
+		}
+
+		// By default no filter is applied if filter is not passed as a
+		// parameter in the request URI
+		String topicFilter = CambriaConstants.kNoFilter;
+		if (null != req.getParameter("filter")) {
+			topicFilter = req.getParameter("filter");
+		}
+		// pretty to print the messaages in new line
+		String prettyval="0";
+		String strPretty=AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"pretty");
+		if (null!=strPretty)prettyval=strPretty;
+		
+		String metaval="0";
+		String strmeta=AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"meta");
+		if (null!=strmeta)metaval=strmeta;
+		
+		final boolean pretty = rrConvertor
+				.convertToBooleanBroad(prettyval);
+		// withMeta to print offset along with message
+		final boolean withMeta = rrConvertor
+				.convertToBooleanBroad(metaval);
+		
+		
+		/*final boolean pretty = rrConvertor
+				.convertToBooleanBroad(ctx.getConfigReader().getSettings().getString("pretty", "0"));
+		// withMeta to print offset along with message
+		final boolean withMeta = rrConvertor
+				.convertToBooleanBroad(ctx.getConfigReader().getSettings().getString("meta", "0"));
+*/
+		final LogWrap logger = new LogWrap ( topic, consumerGroup, clientId);
+		logger.info("fetch: timeout=" + timeoutMs + ", limit=" + limit + ", filter=" + topicFilter);
+
+		// is this user allowed to read this topic?
+		final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx);
+		final Topic metatopic = ctx.getConfigReader().getfMetaBroker().getTopic(topic);
+		
+		if (metatopic == null) {
+			// no such topic.
+			ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND, 
+					DMaaPResponseCode.RESOURCE_NOT_FOUND.getResponseCode(), 
+					errorMessages.getTopicNotExist()+"-[" + topic + "]",null,Utils.getFormattedDate(new Date()),topic,null,null,
+					clientId,ctx.getRequest().getRemoteHost());
+			LOG.info(errRes.toString());
+			throw new CambriaApiException(errRes);
+		}
+		String metricTopicname= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"metrics.send.cambria.topic");
+		if (null==metricTopicname)
+   		 metricTopicname="msgrtr.apinode.metrics.dmaap";
+		
+		 if(null==ctx.getRequest().getHeader("Authorization")&& !topic.equalsIgnoreCase(metricTopicname))
+		{	
+			if (null != metatopic.getOwner() && !("".equals(metatopic.getOwner()))){
+			// check permissions
+			metatopic.checkUserRead(user);	
+			}
+		}
+		// if headers are not provided then user will be null
+		 if(user == null && null!=ctx.getRequest().getHeader("Authorization"))
+		{
+			// the topic name will be sent by the client
+//			String permission = "com.att.dmaap.mr.topic"+"|"+topic+"|"+"sub";
+			DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
+			String permission = aaf.aafPermissionString(topic, "sub");
+			if(!aaf.aafAuthentication(ctx.getRequest(), permission))
+			{
+				ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED, 
+						DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), 
+						errorMessages.getNotPermitted1()+" read "+errorMessages.getNotPermitted2()+topic,null,Utils.getFormattedDate(new Date()),topic,null,null,
+						clientId,ctx.getRequest().getRemoteHost());
+				LOG.info(errRes.toString());
+				throw new DMaaPAccessDeniedException(errRes);
+				
+			}
+			isAAFTopic = true;
+		}
+		Consumer c = null;
+		try {
+			final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
+
+			final DMaaPCambriaLimiter rl = ctx.getConfigReader().getfRateLimiter();
+			rl.onCall(topic, consumerGroup, clientId);
+
+			c = ctx.getConfigReader().getfConsumerFactory().getConsumerFor(topic, consumerGroup, clientId, timeoutMs);
+
+		/*	final CambriaOutboundEventStream coes = new CambriaOutboundEventStream.Builder(c,
+					ctx.getConfigReader().getSettings()).timeout(timeoutMs).limit(limit).filter(topicFilter)
+							.pretty(pretty).withMeta(withMeta)
+							// .atOffset(topicOffset)
+							.build();*/
+			final CambriaOutboundEventStream coes = new CambriaOutboundEventStream.Builder(c).timeout(timeoutMs).limit(limit).filter(topicFilter)
+					.pretty(pretty).withMeta(withMeta).build();
+			coes.setDmaapContext(ctx);
+			coes.setTopic(metatopic);
+			if( isTransEnabled() || isAAFTopic ){
+				coes.setTransEnabled(true);
+			}else{
+			coes.setTransEnabled(false);
+			}
+			coes.setTopicStyle(isAAFTopic);
+            
+			DMaaPResponseBuilder.setNoCacheHeadings(ctx);
+
+			DMaaPResponseBuilder.respondOkWithStream(ctx, MediaType.APPLICATION_JSON, coes);
+
+			// No IOException thrown during respondOkWithStream, so commit the
+			// new offsets to all the brokers
+			c.commitOffsets();
+			final int sent = coes.getSentCount();
+
+			metricsSet.consumeTick(sent);
+			rl.onSend(topic, consumerGroup, clientId, sent);
+
+			final long elapsedMs = System.currentTimeMillis() - startTime;
+			logger.info("Sent " + sent + " msgs in " + elapsedMs + " ms; committed to offset " + c.getOffset());
+
+		} catch (UnavailableException excp) {
+			logger.warn(excp.getMessage(), excp);
+			
+			ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE, 
+					DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(), 
+					errorMessages.getServerUnav()+ excp.getMessage(),null,Utils.getFormattedDate(new Date()),topic,null,null,
+					clientId,ctx.getRequest().getRemoteHost());
+			LOG.info(errRes.toString());
+			throw new CambriaApiException(errRes);
+			
+		} catch (CambriaApiException excp) {
+			logger.warn(excp.getMessage(), excp);
+			throw excp;
+		} catch (Exception excp) {
+			logger.warn("Couldn't respond to client, closing cambria consumer", excp);
+			ctx.getConfigReader().getfConsumerFactory().destroyConsumer(topic, consumerGroup, clientId);
+			
+			ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE, 
+					DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(), 
+					"Couldn't respond to client, closing cambria consumer"+ excp.getMessage(),null,Utils.getFormattedDate(new Date()),topic,null,null,
+					clientId,ctx.getRequest().getRemoteHost());
+			LOG.info(errRes.toString());
+			throw new CambriaApiException(errRes);
+		} finally {
+			// If no cache, close the consumer now that we're done with it.
+			boolean kSetting_EnableCache = ConsumerFactory.kDefault_IsCacheEnabled;
+			String strkSetting_EnableCache=AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,ConsumerFactory.kSetting_EnableCache);
+			if(null!=strkSetting_EnableCache) kSetting_EnableCache=Boolean.parseBoolean(strkSetting_EnableCache);
+			//if (!ctx.getConfigReader().getSettings().getBoolean(ConsumerFactory.kSetting_EnableCache,	ConsumerFactory.kDefault_IsCacheEnabled) && (c != null)) {
+			if (!kSetting_EnableCache && (c != null)) {
+				c.close();
+
+			}
+		}
+	}
+
+	/**
+	 * @throws missingReqdSetting 
+	 * 
+	 */
+	@Override
+	public void pushEvents(DMaaPContext ctx, final String topic, InputStream msg, final String defaultPartition,
+			final String requestTime) throws ConfigDbException, AccessDeniedException, TopicExistsException,
+			CambriaApiException, IOException, missingReqdSetting,DMaaPAccessDeniedException {
+
+		// is this user allowed to write to this topic?
+		final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx);
+		final Topic metatopic = ctx.getConfigReader().getfMetaBroker().getTopic(topic);
+		boolean isAAFTopic=false;
+		
+			// was this host blacklisted?
+				final String remoteAddr = Utils.getRemoteAddress(ctx);
+				
+				if ( ctx.getConfigReader().getfIpBlackList().contains ( remoteAddr ) )
+				{
+					
+					ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, 
+							DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), "Source address [" + remoteAddr +
+						"] is blacklisted. Please contact the cluster management team."
+							,null,Utils.getFormattedDate(new Date()),topic,
+							Utils.getUserApiKey(ctx.getRequest()),ctx.getRequest().getRemoteHost(),
+							null,null);
+					LOG.info(errRes.toString());
+					throw new CambriaApiException(errRes);
+				}
+				
+				  String topicNameStd = null;
+	               
+	               //	topicNameStd= ctx.getConfigReader().getSettings().getString("enforced.topic.name.AAF");
+	            	topicNameStd= com.att.ajsc.beans.PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop,"enforced.topic.name.AAF");
+	            	String metricTopicname= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"metrics.send.cambria.topic");
+	            	 if (null==metricTopicname)
+	            		 metricTopicname="msgrtr.apinode.metrics.dmaap";
+	                boolean topicNameEnforced=false;
+	                if (null != topicNameStd && topic.startsWith(topicNameStd)  )
+	                {
+	                	topicNameEnforced = true;
+	                }
+		
+	               //Here check if the user has rights to publish on the topic
+	               //( This will be called when no auth is added or when UEB API Key Authentication is used)
+	               //checkUserWrite(user) method will throw an error when there is no Auth header added or when the
+	               //user has no publish rights
+	                
+				if(null != metatopic &&  null != metatopic.getOwner() && !("".equals(metatopic.getOwner())) && null==ctx.getRequest().getHeader("Authorization") && !topic.equalsIgnoreCase(metricTopicname)) 
+				{
+					metatopic.checkUserWrite(user);
+				}
+
+	
+				
+				// if headers are not provided then user will be null
+		 if(topicNameEnforced || (user == null && null!=ctx.getRequest().getHeader("Authorization") && !topic.equalsIgnoreCase(metricTopicname)))
+		{
+			// the topic name will be sent by the client
+						// String permission = "com.att.dmaap.mr.topic"+"|"+topic+"|"+"pub";
+						DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
+						String permission = aaf.aafPermissionString(topic, "pub");
+						if(!aaf.aafAuthentication(ctx.getRequest(), permission))
+						{
+							ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED, 
+									DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), 
+									errorMessages.getNotPermitted1()+" publish "+errorMessages.getNotPermitted2()+topic,null,Utils.getFormattedDate(new Date()),topic,
+									Utils.getUserApiKey(ctx.getRequest()),ctx.getRequest().getRemoteHost(),
+									null,null);
+							LOG.info(errRes.toString());
+							throw new DMaaPAccessDeniedException(errRes);
+						}
+						isAAFTopic=true;
+		}	
+		 
+		final HttpServletRequest req = ctx.getRequest();
+
+		// check for chunked input
+		boolean chunked = false;
+		if (null != req.getHeader(TRANSFER_ENCODING)) {
+			chunked = req.getHeader(TRANSFER_ENCODING).contains("chunked");
+		}
+		// get the media type, or set it to a generic value if it wasn't
+		// provided
+		String mediaType = req.getContentType();
+		if (mediaType == null || mediaType.length() == 0) {
+			mediaType = MimeTypes.kAppGenericBinary;
+		}
+
+		if (mediaType.contains("charset=UTF-8")) {
+			mediaType = mediaType.replace("; charset=UTF-8", "").trim();
+		}
+		
+		String istransidUEBtopicreqd = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"transidUEBtopicreqd");
+		boolean istransidreqd=false;
+		if (null != istransidUEBtopicreqd && istransidUEBtopicreqd.equalsIgnoreCase("true")){
+			istransidreqd = true; 
+		}
+		
+		if (isAAFTopic || istransidreqd ) {
+			pushEventsWithTransaction(ctx, msg, topic, defaultPartition, requestTime, chunked, mediaType);
+		}
+		else
+		{
+			pushEvents(ctx, topic, msg, defaultPartition, chunked, mediaType);
+		}
+			
+
+	}
+
+	/**
+	 * 
+	 * @param ctx
+	 * @param topic
+	 * @param msg
+	 * @param defaultPartition
+	 * @param chunked
+	 * @param mediaType
+	 * @throws ConfigDbException
+	 * @throws AccessDeniedException
+	 * @throws TopicExistsException
+	 * @throws CambriaApiException
+	 * @throws IOException
+	 */
+	private void pushEvents(DMaaPContext ctx, String topic, InputStream msg, String defaultPartition,
+			boolean chunked, String mediaType) throws ConfigDbException, AccessDeniedException, TopicExistsException,
+					CambriaApiException, IOException {
+		final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
+
+		// setup the event set
+		final CambriaEventSet events = new CambriaEventSet(mediaType, msg, chunked, defaultPartition);
+
+		// start processing, building a batch to push to the backend
+		final long startMs = System.currentTimeMillis();
+		long count = 0;
+		
+		long maxEventBatch=1024 * 16;
+		String batchlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,BATCH_LENGTH);
+		if(null!=batchlen)maxEventBatch=Long.parseLong(batchlen);
+		
+		// long maxEventBatch = ctx.getConfigReader().getSettings().getLong(BATCH_LENGTH, 1024 * 16);
+		final LinkedList<Publisher.message> batch = new LinkedList<Publisher.message>();
+		final ArrayList<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>();
+
+		try {
+			// for each message...
+			Publisher.message m = null;
+			while ((m = events.next()) != null) {
+				// add the message to the batch
+				batch.add(m);
+				final KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, m.getKey(),
+						m.getMessage());
+				kms.add(data);
+				// check if the batch is full
+				final int sizeNow = batch.size();
+				if (sizeNow > maxEventBatch) {
+					ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
+					kms.clear();
+					batch.clear();
+					metricsSet.publishTick(sizeNow);
+					count += sizeNow;
+				}
+			}
+
+			// send the pending batch
+			final int sizeNow = batch.size();
+			if (sizeNow > 0) {
+				ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
+				kms.clear();
+				batch.clear();
+				metricsSet.publishTick(sizeNow);
+				count += sizeNow;
+			}
+
+			final long endMs = System.currentTimeMillis();
+			final long totalMs = endMs - startMs;
+
+			LOG.info("Published " + count + " msgs in " + totalMs + "ms for topic " + topic);
+
+			// build a responseP
+			final JSONObject response = new JSONObject();
+			response.put("count", count);
+			response.put("serverTimeMs", totalMs);
+			DMaaPResponseBuilder.respondOk(ctx, response);
+
+		} catch (Exception excp) {
+			int status = HttpStatus.SC_NOT_FOUND;
+			String errorMsg=null;
+			if(excp instanceof CambriaApiException) {
+				 status = ((CambriaApiException) excp).getStatus();
+				 JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
+				 JSONObject errObject = new JSONObject(jsonTokener);
+				 errorMsg = (String) errObject.get("message");
+					
+			}
+			ErrorResponse errRes = new ErrorResponse(status, 
+					DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(), 
+					errorMessages.getPublishMsgError()+":"+topic+"."+errorMessages.getPublishMsgCount()+count+"."+errorMsg,null,Utils.getFormattedDate(new Date()),topic,
+					null,ctx.getRequest().getRemoteHost(),
+					null,null);
+			LOG.info(errRes.toString());
+			throw new CambriaApiException(errRes);
+			
+			
+		}
+	}
+
+	/**
+	 * 
+	 * @param ctx
+	 * @param inputStream
+	 * @param topic
+	 * @param partitionKey
+	 * @param requestTime
+	 * @param chunked
+	 * @param mediaType
+	 * @throws ConfigDbException
+	 * @throws AccessDeniedException
+	 * @throws TopicExistsException
+	 * @throws IOException
+	 * @throws CambriaApiException
+	 */
+	private void pushEventsWithTransaction(DMaaPContext ctx, InputStream inputStream, final String topic,
+			final String partitionKey, final String requestTime, final boolean chunked, final String mediaType)
+					throws ConfigDbException, AccessDeniedException, TopicExistsException, IOException,
+					CambriaApiException {
+
+		final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
+
+		// setup the event set
+		final CambriaEventSet events = new CambriaEventSet(mediaType, inputStream, chunked, partitionKey);
+
+		// start processing, building a batch to push to the backend
+		final long startMs = System.currentTimeMillis();
+		long count = 0;
+		long maxEventBatch =  1024 * 16;
+		String evenlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,BATCH_LENGTH);
+			if(null!=evenlen)maxEventBatch=Long.parseLong(evenlen);
+		//final long maxEventBatch = ctx.getConfigReader().getSettings().getLong(BATCH_LENGTH, 1024 * 16);
+		final LinkedList<Publisher.message> batch = new LinkedList<Publisher.message>();
+		final ArrayList<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>();
+
+		Publisher.message m = null;
+		int messageSequence = 1;
+		Long batchId = 1L;
+		final boolean transactionEnabled = true;
+		int publishBatchCount=0;
+		SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss.SS");
+
+		//LOG.warn("Batch Start Id: " + Utils.getFromattedBatchSequenceId(batchId));
+		try {
+			// for each message...
+			batchId=DMaaPContext.getBatchID();
+			
+			String responseTransactionId = null;
+			
+			while ((m = events.next()) != null) {
+			
+				//LOG.warn("Batch Start Id: " + Utils.getFromattedBatchSequenceId(batchId));
+				
+
+				addTransactionDetailsToMessage(m, topic, ctx.getRequest(), requestTime, messageSequence, batchId,
+						transactionEnabled);
+				messageSequence++;
+
+				// add the message to the batch
+				batch.add(m);
+				
+				responseTransactionId = m.getLogDetails().getTransactionId();
+				
+				JSONObject jsonObject = new JSONObject();
+				jsonObject.put("message", m.getMessage());
+				jsonObject.put("transactionId", responseTransactionId);
+				final KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, m.getKey(),
+						jsonObject.toString());
+				kms.add(data);
+
+				// check if the batch is full
+				final int sizeNow = batch.size();
+				if (sizeNow >= maxEventBatch) {
+					String startTime = sdf.format(new Date());
+					LOG.info("Batch Start Details:[serverIp="+ctx.getRequest().getLocalAddr()+",Batch Start Id=" + batchId+"]");
+					try {
+						ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
+						//transactionLogs(batch);
+						for (message msg : batch) {
+							LogDetails logDetails = msg.getLogDetails();
+							LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails());
+						}
+					} catch (Exception excp) {
+						
+						int status = HttpStatus.SC_NOT_FOUND;
+						String errorMsg=null;
+						if(excp instanceof CambriaApiException) {
+							 status = ((CambriaApiException) excp).getStatus();
+							 JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
+							 JSONObject errObject = new JSONObject(jsonTokener);
+							 errorMsg = (String) errObject.get("message");
+						}
+						ErrorResponse errRes = new ErrorResponse(status, 
+								DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(), 
+								"Transaction-"+errorMessages.getPublishMsgError()+":"+topic+ "."+errorMessages.getPublishMsgCount()+count+"."+errorMsg,
+								null,Utils.getFormattedDate(new Date()),topic,
+								Utils.getUserApiKey(ctx.getRequest()),ctx.getRequest().getRemoteHost(),
+								null,null);
+						LOG.info(errRes.toString());
+						throw new CambriaApiException(errRes);
+					}
+					kms.clear();
+					batch.clear();
+					metricsSet.publishTick(sizeNow);
+					publishBatchCount=sizeNow;
+					count += sizeNow;
+					//batchId++;
+					String endTime = sdf.format(new Date());
+					LOG.info("Batch End Details:[serverIp="+ctx.getRequest().getLocalAddr()+",Batch End Id=" + batchId
+							+ ",Batch Total=" + publishBatchCount+",Batch Start Time="+startTime+",Batch End Time="+endTime+"]");
+					batchId=DMaaPContext.getBatchID();
+				}
+			}
+
+			// send the pending batch
+			final int sizeNow = batch.size();
+			if (sizeNow > 0) {
+				String startTime = sdf.format(new Date());
+				LOG.info("Batch Start Details:[serverIp="+ctx.getRequest().getLocalAddr()+",Batch Start Id=" + batchId+"]");
+				try {
+					ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
+					//transactionLogs(batch);
+					for (message msg : batch) {
+						LogDetails logDetails = msg.getLogDetails();
+						LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails());
+					}
+				} catch (Exception excp) {
+					int status = HttpStatus.SC_NOT_FOUND;
+					String errorMsg=null;
+					if(excp instanceof CambriaApiException) {
+						 status = ((CambriaApiException) excp).getStatus();
+						 JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
+						 JSONObject errObject = new JSONObject(jsonTokener);
+						 errorMsg = (String) errObject.get("message");
+					}
+					
+					ErrorResponse errRes = new ErrorResponse(status, 
+							DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(), 
+							"Transaction-"+errorMessages.getPublishMsgError()+":"+topic+"."+ errorMessages.getPublishMsgCount()+count+"."+errorMsg,
+							null,Utils.getFormattedDate(new Date()),topic,
+							Utils.getUserApiKey(ctx.getRequest()),ctx.getRequest().getRemoteHost(),
+							null,null);
+					LOG.info(errRes.toString());
+					throw new CambriaApiException(errRes);
+				}
+				kms.clear();
+				metricsSet.publishTick(sizeNow);
+				count += sizeNow;
+				//batchId++;
+				String endTime = sdf.format(new Date());
+				publishBatchCount=sizeNow;
+				LOG.info("Batch End Details:[serverIp="+ctx.getRequest().getLocalAddr()+",Batch End Id=" + batchId
+						+ ",Batch Total=" + publishBatchCount+",Batch Start Time="+startTime+",Batch End Time="+endTime+"]");
+			}
+
+			final long endMs = System.currentTimeMillis();
+			final long totalMs = endMs - startMs;
+
+			LOG.info("Published " + count + " msgs in " + totalMs + "ms for topic " + topic);
+
+			if (null != responseTransactionId) {
+				ctx.getResponse().setHeader("transactionId", Utils.getResponseTransactionId(responseTransactionId));
+			}
+			
+			// build a response
+			final JSONObject response = new JSONObject();
+			response.put("count", count);
+			response.put("serverTimeMs", totalMs);
+			DMaaPResponseBuilder.respondOk(ctx, response);
+			
+		} catch (Exception excp) {
+			int status = HttpStatus.SC_NOT_FOUND;
+			String errorMsg=null;
+			if(excp instanceof CambriaApiException) {
+				 status = ((CambriaApiException) excp).getStatus();
+				 JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
+				 JSONObject errObject = new JSONObject(jsonTokener);
+				 errorMsg = (String) errObject.get("message");
+			}
+			
+			ErrorResponse errRes = new ErrorResponse(
+					status, 
+					DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(), 
+					"Transaction-"+errorMessages.getPublishMsgError()+":"+topic+"."+errorMessages.getPublishMsgCount()+count+"."+errorMsg,null,Utils.getFormattedDate(new Date()),topic,
+					Utils.getUserApiKey(ctx.getRequest()),ctx.getRequest().getRemoteHost(),
+					null,null);
+			LOG.info(errRes.toString());
+			throw new CambriaApiException(errRes);
+		}
+	}
+
+	/**
+	 * 
+	 * @param msg
+	 * @param topic
+	 * @param request
+	 * @param messageCreationTime
+	 * @param messageSequence
+	 * @param batchId
+	 * @param transactionEnabled
+	 */
+	private static void addTransactionDetailsToMessage(message msg, final String topic, HttpServletRequest request,
+			final String messageCreationTime, final int messageSequence, final Long batchId,
+			final boolean transactionEnabled) {
+		LogDetails logDetails = generateLogDetails(topic, request, messageCreationTime, messageSequence, batchId,
+				transactionEnabled);
+		logDetails.setMessageLengthInBytes(Utils.messageLengthInBytes(msg.getMessage()));
+		msg.setTransactionEnabled(transactionEnabled);
+		msg.setLogDetails(logDetails);
+	}
+
+
+
+	/**
+	 * 
+	 * @author author
+	 *
+	 */
+	private static class LogWrap {
+		private final String fId;
+
+		/**
+		 * constructor initialization
+		 * 
+		 * @param topic
+		 * @param cgroup
+		 * @param cid
+		 */
+		public LogWrap(String topic, String cgroup, String cid) {
+			fId = "[" + topic + "/" + cgroup + "/" + cid + "] ";
+		}
+
+		/**
+		 * 
+		 * @param msg
+		 */
+		public void info(String msg) {
+			LOG.info(fId + msg);
+		}
+
+		/**
+		 * 
+		 * @param msg
+		 * @param t
+		 */
+		public void warn(String msg, Exception t) {
+			LOG.warn(fId + msg, t);
+		}
+
+	}
+	
+	private boolean isTransEnabled() {
+		String istransidUEBtopicreqd = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"transidUEBtopicreqd");
+		boolean istransidreqd=false;
+		if ((null != istransidUEBtopicreqd && istransidUEBtopicreqd.equalsIgnoreCase("true")) ){
+			istransidreqd = true; 
+		}
+		
+		return istransidreqd;
+
+	}
+
+	private static LogDetails generateLogDetails(final String topicName, HttpServletRequest request,
+			final String messageTimestamp, int messageSequence, Long batchId, final boolean transactionEnabled) {
+		LogDetails logDetails = new LogDetails();
+		logDetails.setTopicId(topicName);
+		logDetails.setMessageTimestamp(messageTimestamp);
+		logDetails.setPublisherId(Utils.getUserApiKey(request));
+		logDetails.setPublisherIp(request.getRemoteHost());
+		logDetails.setMessageBatchId(batchId);
+		logDetails.setMessageSequence(String.valueOf(messageSequence));
+		logDetails.setTransactionEnabled(transactionEnabled);
+		logDetails.setTransactionIdTs(Utils.getFormattedDate(new Date()));
+		logDetails.setServerIp(request.getLocalAddr());
+		return logDetails;
+	}
+
+	/*public String getMetricsTopic() {
+		return metricsTopic;
+	}
+
+	public void setMetricsTopic(String metricsTopic) {
+		this.metricsTopic = metricsTopic;
+	}*/
+
+}
\ No newline at end of file
diff --git a/src/main/java/com/att/nsa/cambria/service/impl/MMServiceImpl.java b/src/main/java/com/att/nsa/cambria/service/impl/MMServiceImpl.java
new file mode 100644
index 0000000..4f6a9a1
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/service/impl/MMServiceImpl.java
@@ -0,0 +1,605 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.service.impl;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.LinkedList;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.core.Context;
+
+import org.apache.http.HttpStatus;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+import org.json.JSONObject;
+import org.json.JSONTokener;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.stereotype.Service;
+
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+import com.att.nsa.cambria.CambriaApiException;
+import com.att.nsa.cambria.backends.Consumer;
+import com.att.nsa.cambria.backends.ConsumerFactory;
+import com.att.nsa.cambria.backends.ConsumerFactory.UnavailableException;
+import com.att.nsa.cambria.backends.Publisher.message;
+import com.att.nsa.cambria.backends.MetricsSet;
+import com.att.nsa.cambria.backends.Publisher;
+
+import com.att.nsa.cambria.beans.DMaaPContext;
+import com.att.nsa.cambria.beans.LogDetails;
+import com.att.nsa.cambria.constants.CambriaConstants;
+import com.att.nsa.cambria.exception.DMaaPAccessDeniedException;
+import com.att.nsa.cambria.exception.DMaaPErrorMessages;
+import com.att.nsa.cambria.exception.DMaaPResponseCode;
+import com.att.nsa.cambria.exception.ErrorResponse;
+import com.att.nsa.cambria.metabroker.Broker.TopicExistsException;
+import com.att.nsa.cambria.metabroker.Topic;
+import com.att.nsa.cambria.resources.CambriaEventSet;
+import com.att.nsa.cambria.resources.CambriaOutboundEventStream;
+import com.att.nsa.cambria.security.DMaaPAAFAuthenticator;
+import com.att.nsa.cambria.security.DMaaPAAFAuthenticatorImpl;
+import com.att.nsa.cambria.service.MMService;
+import com.att.nsa.cambria.utils.ConfigurationReader;
+import com.att.nsa.cambria.utils.DMaaPResponseBuilder;
+import com.att.nsa.cambria.utils.Utils;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.drumlin.service.standards.MimeTypes;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
+
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import com.att.nsa.util.rrConvertor;
+
+import kafka.producer.KeyedMessage;
+
+@Service
+public class MMServiceImpl implements MMService {
+	private static final String BATCH_LENGTH = "event.batch.length";
+	private static final String TRANSFER_ENCODING = "Transfer-Encoding";
+	//private static final Logger LOG = Logger.getLogger(MMServiceImpl.class);
+	private static final EELFLogger LOG = EELFManager.getInstance().getLogger(MMServiceImpl.class);
+	@Autowired
+	private DMaaPErrorMessages errorMessages;
+
+	@Autowired
+	@Qualifier("configurationReader")
+	private ConfigurationReader configReader;
+
+	// HttpServletRequest object
+	@Context
+	private HttpServletRequest request;
+
+	// HttpServletResponse object
+	@Context
+	private HttpServletResponse response;
+
+	@Override
+	public void addWhiteList() {
+
+	}
+
+	@Override
+	public void removeWhiteList() {
+
+	}
+
+	@Override
+	public void listWhiteList() {
+
+	}
+
+	@Override
+	public String subscribe(DMaaPContext ctx, String topic, String consumerGroup, String clientId)
+			throws ConfigDbException, TopicExistsException, AccessDeniedException, UnavailableException,
+			CambriaApiException, IOException {
+
+		// final long startTime = System.currentTimeMillis();
+		final HttpServletRequest req = ctx.getRequest();
+		ByteArrayOutputStream baos = new ByteArrayOutputStream();
+
+		// was this host blacklisted?
+		final String remoteAddr = Utils.getRemoteAddress(ctx);
+		
+		if (ctx.getConfigReader().getfIpBlackList().contains(remoteAddr)) {
+
+			ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+					DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+					"Source address [" + remoteAddr + "] is blacklisted. Please contact the cluster management team.",
+					null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
+					ctx.getRequest().getRemoteHost(), null, null);
+			LOG.info(errRes.toString());
+			throw new CambriaApiException(errRes);
+		}
+
+		int limit = CambriaConstants.kNoLimit;
+
+		if (req.getParameter("limit") != null) {
+			limit = Integer.parseInt(req.getParameter("limit"));
+		}
+		limit = 1;
+		// int timeoutMs = 60000;
+		int timeoutMs = CambriaConstants.kNoTimeout;
+		String strtimeoutMS = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "timeout");
+		if (strtimeoutMS != null)
+			timeoutMs = Integer.parseInt(strtimeoutMS);
+		// int timeoutMs = ctx.getConfigReader().getSettings().getInt("timeout",
+		// CambriaConstants.kNoTimeout);
+		if (req.getParameter("timeout") != null) {
+			timeoutMs = Integer.parseInt(req.getParameter("timeout"));
+		}
+
+		// By default no filter is applied if filter is not passed as a
+		// parameter in the request URI
+		String topicFilter = CambriaConstants.kNoFilter;
+		if (null != req.getParameter("filter")) {
+			topicFilter = req.getParameter("filter");
+		}
+		// pretty to print the messaages in new line
+		String prettyval = "0";
+		String strPretty = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "pretty");
+		if (null != strPretty)
+			prettyval = strPretty;
+
+		String metaval = "0";
+		String strmeta = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "meta");
+		if (null != strmeta)
+			metaval = strmeta;
+
+		final boolean pretty = rrConvertor.convertToBooleanBroad(prettyval);
+		// withMeta to print offset along with message
+		final boolean withMeta = rrConvertor.convertToBooleanBroad(metaval);
+
+		// is this user allowed to read this topic?
+		//final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx);
+		final Topic metatopic = ctx.getConfigReader().getfMetaBroker().getTopic(topic);
+
+		if (metatopic == null) {
+			// no such topic.
+			ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND,
+					DMaaPResponseCode.RESOURCE_NOT_FOUND.getResponseCode(),
+					errorMessages.getTopicNotExist() + "-[" + topic + "]", null, Utils.getFormattedDate(new Date()),
+					topic, null, null, clientId, ctx.getRequest().getRemoteHost());
+			LOG.info(errRes.toString());
+			throw new CambriaApiException(errRes);
+		}
+		//String metricTopicname = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,	"metrics.send.cambria.topic");
+		/*
+		 * if (null==metricTopicname)
+		 * metricTopicname="msgrtr.apinode.metrics.dmaap"; //else if(user!=null)
+		 * if(null==ctx.getRequest().getHeader("Authorization")&&
+		 * !topic.equalsIgnoreCase(metricTopicname)) { if (null !=
+		 * metatopic.getOwner() && !("".equals(metatopic.getOwner()))){ // check
+		 * permissions metatopic.checkUserRead(user); } }
+		 */
+
+		Consumer c = null;
+		try {
+			final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
+
+			c = ctx.getConfigReader().getfConsumerFactory().getConsumerFor(topic, consumerGroup, clientId, timeoutMs);
+
+			final CambriaOutboundEventStream coes = new CambriaOutboundEventStream.Builder(c).timeout(timeoutMs)
+					.limit(limit).filter(topicFilter).pretty(pretty).withMeta(withMeta).build();
+			coes.setDmaapContext(ctx);
+			coes.setTopic(metatopic);
+
+			DMaaPResponseBuilder.setNoCacheHeadings(ctx);
+
+			try {
+				coes.write(baos);
+			} catch (Exception ex) {
+
+			}
+
+			c.commitOffsets();
+			final int sent = coes.getSentCount();
+
+			metricsSet.consumeTick(sent);
+
+		} catch (UnavailableException excp) {
+
+			ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE,
+					DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(),
+					errorMessages.getServerUnav() + excp.getMessage(), null, Utils.getFormattedDate(new Date()), topic,
+					null, null, clientId, ctx.getRequest().getRemoteHost());
+			LOG.info(errRes.toString());
+			throw new CambriaApiException(errRes);
+
+		} catch (CambriaApiException excp) {
+
+			throw excp;
+		} catch (Exception excp) {
+
+			ctx.getConfigReader().getfConsumerFactory().destroyConsumer(topic, consumerGroup, clientId);
+
+			ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE,
+					DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(),
+					"Couldn't respond to client, closing cambria consumer" + excp.getMessage(), null,
+					Utils.getFormattedDate(new Date()), topic, null, null, clientId, ctx.getRequest().getRemoteHost());
+			LOG.info(errRes.toString());
+			throw new CambriaApiException(errRes);
+		} finally {
+
+			boolean kSetting_EnableCache = ConsumerFactory.kDefault_IsCacheEnabled;
+			String strkSetting_EnableCache = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+					ConsumerFactory.kSetting_EnableCache);
+			if (null != strkSetting_EnableCache)
+				kSetting_EnableCache = Boolean.parseBoolean(strkSetting_EnableCache);
+
+			if (!kSetting_EnableCache && (c != null)) {
+				c.close();
+
+			}
+		}
+		return baos.toString();
+	}
+
+	@Override
+	public void pushEvents(DMaaPContext ctx, final String topic, InputStream msg, final String defaultPartition,
+			final String requestTime) throws ConfigDbException, AccessDeniedException, TopicExistsException,
+					CambriaApiException, IOException, missingReqdSetting {
+
+		//final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx);
+		//final Topic metatopic = ctx.getConfigReader().getfMetaBroker().getTopic(topic);
+
+		final String remoteAddr = Utils.getRemoteAddress(ctx);
+
+		if (ctx.getConfigReader().getfIpBlackList().contains(remoteAddr)) {
+
+			ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+					DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+					"Source address [" + remoteAddr + "] is blacklisted. Please contact the cluster management team.",
+					null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
+					ctx.getRequest().getRemoteHost(), null, null);
+			LOG.info(errRes.toString());
+			throw new CambriaApiException(errRes);
+		}
+
+		String topicNameStd = null;
+
+		topicNameStd = com.att.ajsc.beans.PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop,
+				"enforced.topic.name.AAF");
+		String metricTopicname = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+				"metrics.send.cambria.topic");
+		if (null == metricTopicname)
+			metricTopicname = "msgrtr.apinode.metrics.dmaap";
+		boolean topicNameEnforced = false;
+		if (null != topicNameStd && topic.startsWith(topicNameStd)) {
+			topicNameEnforced = true;
+		}
+
+		final HttpServletRequest req = ctx.getRequest();
+
+		boolean chunked = false;
+		if (null != req.getHeader(TRANSFER_ENCODING)) {
+			chunked = req.getHeader(TRANSFER_ENCODING).contains("chunked");
+		}
+
+		String mediaType = req.getContentType();
+		if (mediaType == null || mediaType.length() == 0) {
+			mediaType = MimeTypes.kAppGenericBinary;
+		}
+
+		if (mediaType.contains("charset=UTF-8")) {
+			mediaType = mediaType.replace("; charset=UTF-8", "").trim();
+		}
+
+		if (!topic.equalsIgnoreCase(metricTopicname)) {
+			pushEventsWithTransaction(ctx, msg, topic, defaultPartition, requestTime, chunked, mediaType);
+		} else {
+			pushEvents(ctx, topic, msg, defaultPartition, chunked, mediaType);
+		}
+	}
+
+	private static void addTransactionDetailsToMessage(message msg, final String topic, HttpServletRequest request,
+			final String messageCreationTime, final int messageSequence, final Long batchId,
+			final boolean transactionEnabled) {
+		LogDetails logDetails = generateLogDetails(topic, request, messageCreationTime, messageSequence, batchId,
+				transactionEnabled);
+		logDetails.setMessageLengthInBytes(Utils.messageLengthInBytes(msg.getMessage()));
+		msg.setTransactionEnabled(transactionEnabled);
+		msg.setLogDetails(logDetails);
+	}
+
+	private static LogDetails generateLogDetails(final String topicName, HttpServletRequest request,
+			final String messageTimestamp, int messageSequence, Long batchId, final boolean transactionEnabled) {
+		LogDetails logDetails = new LogDetails();
+		logDetails.setTopicId(topicName);
+		logDetails.setMessageTimestamp(messageTimestamp);
+		logDetails.setPublisherId(Utils.getUserApiKey(request));
+		logDetails.setPublisherIp(request.getRemoteHost());
+		logDetails.setMessageBatchId(batchId);
+		logDetails.setMessageSequence(String.valueOf(messageSequence));
+		logDetails.setTransactionEnabled(transactionEnabled);
+		logDetails.setTransactionIdTs(Utils.getFormattedDate(new Date()));
+		logDetails.setServerIp(request.getLocalAddr());
+		return logDetails;
+	}
+
+	private void pushEvents(DMaaPContext ctx, String topic, InputStream msg, String defaultPartition, boolean chunked,
+			String mediaType) throws ConfigDbException, AccessDeniedException, TopicExistsException,
+					CambriaApiException, IOException {
+		final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
+
+		// setup the event set
+		final CambriaEventSet events = new CambriaEventSet(mediaType, msg, chunked, defaultPartition);
+
+		// start processing, building a batch to push to the backend
+		final long startMs = System.currentTimeMillis();
+		long count = 0;
+
+		long maxEventBatch = 1024 * 16;
+		String batchlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, BATCH_LENGTH);
+		if (null != batchlen)
+			maxEventBatch = Long.parseLong(batchlen);
+
+		// long maxEventBatch =
+		// ctx.getConfigReader().getSettings().getLong(BATCH_LENGTH, 1024 * 16);
+		final LinkedList<Publisher.message> batch = new LinkedList<Publisher.message>();
+		final ArrayList<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>();
+
+		try {
+			// for each message...
+			Publisher.message m = null;
+			while ((m = events.next()) != null) {
+				// add the message to the batch
+				batch.add(m);
+				final KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, m.getKey(),
+						m.getMessage());
+				kms.add(data);
+				// check if the batch is full
+				final int sizeNow = batch.size();
+				if (sizeNow > maxEventBatch) {
+					ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
+					kms.clear();
+					batch.clear();
+					metricsSet.publishTick(sizeNow);
+					count += sizeNow;
+				}
+			}
+
+			// send the pending batch
+			final int sizeNow = batch.size();
+			if (sizeNow > 0) {
+				ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
+				kms.clear();
+				batch.clear();
+				metricsSet.publishTick(sizeNow);
+				count += sizeNow;
+			}
+
+			final long endMs = System.currentTimeMillis();
+			final long totalMs = endMs - startMs;
+
+			LOG.info("Published " + count + " msgs in " + totalMs + "ms for topic " + topic);
+
+			// build a responseP
+			final JSONObject response = new JSONObject();
+			response.put("count", count);
+			response.put("serverTimeMs", totalMs);
+			// DMaaPResponseBuilder.respondOk(ctx, response);
+
+		} catch (Exception excp) {
+
+			int status = HttpStatus.SC_NOT_FOUND;
+			String errorMsg = null;
+			if (excp instanceof CambriaApiException) {
+				status = ((CambriaApiException) excp).getStatus();
+				JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
+				JSONObject errObject = new JSONObject(jsonTokener);
+				errorMsg = (String) errObject.get("message");
+
+			}
+			ErrorResponse errRes = new ErrorResponse(status, DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
+					errorMessages.getPublishMsgError() + ":" + topic + "." + errorMessages.getPublishMsgCount() + count
+							+ "." + errorMsg,
+					null, Utils.getFormattedDate(new Date()), topic, null, ctx.getRequest().getRemoteHost(), null,
+					null);
+			LOG.info(errRes.toString());
+			throw new CambriaApiException(errRes);
+
+		}
+	}
+
+	private void pushEventsWithTransaction(DMaaPContext ctx, InputStream inputStream, final String topic,
+			final String partitionKey, final String requestTime, final boolean chunked, final String mediaType)
+					throws ConfigDbException, AccessDeniedException, TopicExistsException, IOException,
+					CambriaApiException {
+
+		final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
+
+		// setup the event set
+		final CambriaEventSet events = new CambriaEventSet(mediaType, inputStream, chunked, partitionKey);
+
+		// start processing, building a batch to push to the backend
+		final long startMs = System.currentTimeMillis();
+		long count = 0;
+		long maxEventBatch = 1024 * 16;
+		String evenlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, BATCH_LENGTH);
+		if (null != evenlen)
+			maxEventBatch = Long.parseLong(evenlen);
+
+		final LinkedList<Publisher.message> batch = new LinkedList<Publisher.message>();
+		final ArrayList<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>();
+
+		Publisher.message m = null;
+		int messageSequence = 1;
+		Long batchId = 1L;
+		final boolean transactionEnabled = true;
+		int publishBatchCount = 0;
+		SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss.SS");
+
+		// LOG.warn("Batch Start Id: " +
+		// Utils.getFromattedBatchSequenceId(batchId));
+		try {
+			// for each message...
+			batchId = DMaaPContext.getBatchID();
+
+			String responseTransactionId = null;
+
+			while ((m = events.next()) != null) {
+
+				// LOG.warn("Batch Start Id: " +
+				// Utils.getFromattedBatchSequenceId(batchId));
+
+				addTransactionDetailsToMessage(m, topic, ctx.getRequest(), requestTime, messageSequence, batchId,
+						transactionEnabled);
+				messageSequence++;
+
+				// add the message to the batch
+				batch.add(m);
+
+				responseTransactionId = m.getLogDetails().getTransactionId();
+
+				JSONObject jsonObject = new JSONObject();
+				jsonObject.put("message", m.getMessage());
+				jsonObject.put("transactionId", responseTransactionId);
+				final KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, m.getKey(),
+						jsonObject.toString());
+				kms.add(data);
+
+				// check if the batch is full
+				final int sizeNow = batch.size();
+				if (sizeNow >= maxEventBatch) {
+					String startTime = sdf.format(new Date());
+					LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id="
+							+ batchId + "]");
+					try {
+						ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
+						// transactionLogs(batch);
+						for (message msg : batch) {
+							LogDetails logDetails = msg.getLogDetails();
+							LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails());
+						}
+					} catch (Exception excp) {
+
+						int status = HttpStatus.SC_NOT_FOUND;
+						String errorMsg = null;
+						if (excp instanceof CambriaApiException) {
+							status = ((CambriaApiException) excp).getStatus();
+							JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
+							JSONObject errObject = new JSONObject(jsonTokener);
+							errorMsg = (String) errObject.get("message");
+						}
+						ErrorResponse errRes = new ErrorResponse(status,
+								DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
+								"Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "."
+										+ errorMessages.getPublishMsgCount() + count + "." + errorMsg,
+								null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
+								ctx.getRequest().getRemoteHost(), null, null);
+						LOG.info(errRes.toString());
+						throw new CambriaApiException(errRes);
+					}
+					kms.clear();
+					batch.clear();
+					metricsSet.publishTick(sizeNow);
+					publishBatchCount = sizeNow;
+					count += sizeNow;
+					// batchId++;
+					String endTime = sdf.format(new Date());
+					LOG.info("Batch End Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch End Id="
+							+ batchId + ",Batch Total=" + publishBatchCount + ",Batch Start Time=" + startTime
+							+ ",Batch End Time=" + endTime + "]");
+					batchId = DMaaPContext.getBatchID();
+				}
+			}
+
+			// send the pending batch
+			final int sizeNow = batch.size();
+			if (sizeNow > 0) {
+				String startTime = sdf.format(new Date());
+				LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id="
+						+ batchId + "]");
+				try {
+					ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
+					// transactionLogs(batch);
+					for (message msg : batch) {
+						LogDetails logDetails = msg.getLogDetails();
+						LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails());
+					}
+				} catch (Exception excp) {
+					int status = HttpStatus.SC_NOT_FOUND;
+					String errorMsg = null;
+					if (excp instanceof CambriaApiException) {
+						status = ((CambriaApiException) excp).getStatus();
+						JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
+						JSONObject errObject = new JSONObject(jsonTokener);
+						errorMsg = (String) errObject.get("message");
+					}
+
+					ErrorResponse errRes = new ErrorResponse(status,
+							DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
+							"Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "."
+									+ errorMessages.getPublishMsgCount() + count + "." + errorMsg,
+							null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
+							ctx.getRequest().getRemoteHost(), null, null);
+					LOG.info(errRes.toString());
+					throw new CambriaApiException(errRes);
+				}
+				kms.clear();
+				metricsSet.publishTick(sizeNow);
+				count += sizeNow;
+				// batchId++;
+				String endTime = sdf.format(new Date());
+				publishBatchCount = sizeNow;
+				LOG.info("Batch End Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch End Id=" + batchId
+						+ ",Batch Total=" + publishBatchCount + ",Batch Start Time=" + startTime + ",Batch End Time="
+						+ endTime + "]");
+			}
+
+			final long endMs = System.currentTimeMillis();
+			final long totalMs = endMs - startMs;
+
+			LOG.info("Published " + count + " msgs in " + totalMs + "ms for topic " + topic);
+
+			// build a response
+			final JSONObject response = new JSONObject();
+			response.put("count", count);
+			response.put("serverTimeMs", totalMs);
+
+		} catch (Exception excp) {
+			int status = HttpStatus.SC_NOT_FOUND;
+			String errorMsg = null;
+			if (excp instanceof CambriaApiException) {
+				status = ((CambriaApiException) excp).getStatus();
+				JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
+				JSONObject errObject = new JSONObject(jsonTokener);
+				errorMsg = (String) errObject.get("message");
+			}
+
+			ErrorResponse errRes = new ErrorResponse(status, DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
+					"Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "."
+							+ errorMessages.getPublishMsgCount() + count + "." + errorMsg,
+					null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
+					ctx.getRequest().getRemoteHost(), null, null);
+			LOG.info(errRes.toString());
+			throw new CambriaApiException(errRes);
+		}
+	}
+}
diff --git a/src/main/java/com/att/nsa/cambria/service/impl/MetricsServiceImpl.java b/src/main/java/com/att/nsa/cambria/service/impl/MetricsServiceImpl.java
new file mode 100644
index 0000000..1a1baf5
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/service/impl/MetricsServiceImpl.java
@@ -0,0 +1,115 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.service.impl;
+
+import java.io.IOException;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.json.JSONObject;
+import org.springframework.stereotype.Component;
+
+import com.att.nsa.cambria.CambriaApiException;
+import com.att.nsa.cambria.backends.MetricsSet;
+import com.att.nsa.cambria.beans.DMaaPContext;
+import com.att.nsa.cambria.service.MetricsService;
+import com.att.nsa.cambria.utils.DMaaPResponseBuilder;
+import com.att.nsa.metrics.CdmMeasuredItem;
+
+/**
+ * 
+ * 
+ * This will provide all the generated metrics details also it can provide the
+ * get metrics details
+ * 
+ * 
+ * @author author
+ *
+ *
+ */
+@Component
+public class MetricsServiceImpl implements MetricsService {
+
+	//private static final Logger LOG = Logger.getLogger(MetricsService.class.toString());
+	private static final EELFLogger LOG = EELFManager.getInstance().getLogger(MetricsService.class);
+	/**
+	 * 
+	 * 
+	 * @param ctx
+	 * @throws IOException
+	 * 
+	 * 
+	 * get Metric details
+	 * 
+	 */
+	@Override
+	
+	public void get(DMaaPContext ctx) throws IOException {
+		LOG.info("Inside  : MetricsServiceImpl : get()");
+		final MetricsSet metrics = ctx.getConfigReader().getfMetrics();
+		DMaaPResponseBuilder.setNoCacheHeadings(ctx);
+		final JSONObject result = metrics.toJson();
+		DMaaPResponseBuilder.respondOk(ctx, result);
+		LOG.info("============ Metrics generated : " + result.toString() + "=================");
+
+	}
+
+
+	@Override
+	/**
+	 * 
+	 * get Metric by name
+	 * 
+	 * 
+	 * @param ctx
+	 * @param name
+	 * @throws IOException
+	 * @throws CambriaApiException
+	 * 
+	 * 
+	 */
+	public void getMetricByName(DMaaPContext ctx, String name) throws IOException, CambriaApiException {
+		LOG.info("Inside  : MetricsServiceImpl : getMetricByName()");
+		final MetricsSet metrics = ctx.getConfigReader().getfMetrics();
+
+		final CdmMeasuredItem item = metrics.getItem(name);
+		/**
+		 * check if item is null
+		 */
+		if (item == null) {
+			throw new CambriaApiException(404, "No metric named [" + name + "].");
+		}
+
+		final JSONObject entry = new JSONObject();
+		entry.put("summary", item.summarize());
+		entry.put("raw", item.getRawValueString());
+
+		DMaaPResponseBuilder.setNoCacheHeadings(ctx);
+
+		final JSONObject result = new JSONObject();
+		result.put(name, entry);
+
+		DMaaPResponseBuilder.respondOk(ctx, result);
+		LOG.info("============ Metrics generated : " + entry.toString() + "=================");
+	}
+
+}
diff --git a/src/main/java/com/att/nsa/cambria/service/impl/TopicServiceImpl.java b/src/main/java/com/att/nsa/cambria/service/impl/TopicServiceImpl.java
new file mode 100644
index 0000000..658523d
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/service/impl/TopicServiceImpl.java
@@ -0,0 +1,649 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+/**
+ * 
+ */
+package com.att.nsa.cambria.service.impl;
+
+import java.io.IOException;
+
+import org.apache.http.HttpStatus;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Service;
+
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+import com.att.nsa.cambria.CambriaApiException;
+import com.att.nsa.cambria.beans.DMaaPContext;
+import com.att.nsa.cambria.beans.DMaaPKafkaMetaBroker;
+import com.att.nsa.cambria.beans.TopicBean;
+import com.att.nsa.cambria.constants.CambriaConstants;
+import com.att.nsa.cambria.exception.DMaaPAccessDeniedException;
+import com.att.nsa.cambria.exception.DMaaPErrorMessages;
+import com.att.nsa.cambria.exception.DMaaPResponseCode;
+import com.att.nsa.cambria.exception.ErrorResponse;
+import com.att.nsa.cambria.metabroker.Broker;
+import com.att.nsa.cambria.metabroker.Broker.TopicExistsException;
+import com.att.nsa.cambria.metabroker.Topic;
+import com.att.nsa.cambria.security.DMaaPAAFAuthenticator;
+import com.att.nsa.cambria.security.DMaaPAAFAuthenticatorImpl;
+import com.att.nsa.cambria.security.DMaaPAuthenticatorImpl;
+import com.att.nsa.cambria.service.TopicService;
+import com.att.nsa.cambria.utils.DMaaPResponseBuilder;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.security.NsaAcl;
+import com.att.nsa.security.NsaApiKey;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+
+/**
+ * @author author
+ *
+ */
+@Service
+public class TopicServiceImpl implements TopicService {
+
+	//private static final Logger LOGGER = Logger.getLogger(TopicServiceImpl.class);
+	private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(TopicServiceImpl.class);
+	@Autowired
+	private DMaaPErrorMessages errorMessages;
+	
+	//@Value("${msgRtr.topicfactory.aaf}")
+	//private String mrFactory;
+	
+	
+	/**
+	 * @param dmaapContext
+	 * @throws JSONException
+	 * @throws ConfigDbException
+	 * @throws IOException
+	 * 
+	 */
+	@Override
+	public void getTopics(DMaaPContext dmaapContext) throws JSONException, ConfigDbException, IOException {
+
+		LOGGER.info("Fetching list of all the topics.");
+		JSONObject json = new JSONObject();
+
+		JSONArray topicsList = new JSONArray();
+
+		for (Topic topic : getMetaBroker(dmaapContext).getAllTopics()) {
+			topicsList.put(topic.getName());
+		}
+
+		json.put("topics", topicsList);
+
+		LOGGER.info("Returning list of all the topics.");
+		DMaaPResponseBuilder.respondOk(dmaapContext, json);
+
+	}
+
+	/**
+	 * @param dmaapContext
+	 * @throws JSONException
+	 * @throws ConfigDbException
+	 * @throws IOException
+	 * 
+	 */
+	public void getAllTopics(DMaaPContext dmaapContext) throws JSONException, ConfigDbException, IOException {
+
+		LOGGER.info("Fetching list of all the topics.");
+		JSONObject json = new JSONObject();
+
+		JSONArray topicsList = new JSONArray();
+
+		for (Topic topic : getMetaBroker(dmaapContext).getAllTopics()) {
+			JSONObject obj = new JSONObject();
+			obj.put("topicName", topic.getName());
+			//obj.put("description", topic.getDescription());
+			obj.put("owner", topic.getOwner());
+			obj.put("txenabled", topic.isTransactionEnabled());
+			topicsList.put(obj);
+		}
+
+		json.put("topics", topicsList);
+
+		LOGGER.info("Returning list of all the topics.");
+		DMaaPResponseBuilder.respondOk(dmaapContext, json);
+
+	}
+
+	
+	/**
+	 * @param dmaapContext
+	 * @param topicName
+	 * @throws ConfigDbException
+	 * @throws IOException
+	 * @throws TopicExistsException
+	 */
+	@Override
+	public void getTopic(DMaaPContext dmaapContext, String topicName)
+			throws ConfigDbException, IOException, TopicExistsException {
+
+		LOGGER.info("Fetching details of topic " + topicName);
+		Topic t = getMetaBroker(dmaapContext).getTopic(topicName);
+
+		if (null == t) {
+			LOGGER.error("Topic [" + topicName + "] does not exist.");
+			throw new TopicExistsException("Topic [" + topicName + "] does not exist.");
+		}
+
+		JSONObject o = new JSONObject();
+		o.put ( "name", t.getName () );
+		o.put ( "description", t.getDescription () );
+		
+		if (null!=t.getOwners ())
+		o.put ( "owner", t.getOwners ().iterator ().next () );
+		if(null!=t.getReaderAcl ())
+		o.put ( "readerAcl", aclToJson ( t.getReaderAcl () ) );
+		if(null!=t.getWriterAcl ())
+		o.put ( "writerAcl", aclToJson ( t.getWriterAcl () ) );
+	
+		LOGGER.info("Returning details of topic " + topicName);
+		DMaaPResponseBuilder.respondOk(dmaapContext, o);
+
+	}
+
+	
+	/**
+	 * @param dmaapContext
+	 * @param topicBean
+	 * @throws CambriaApiException
+	 * @throws AccessDeniedException
+	 * @throws IOException
+	 * @throws TopicExistsException
+	 * @throws JSONException
+	 * 
+	 * 
+	 * 
+	 */
+	@Override
+	public void createTopic(DMaaPContext dmaapContext, TopicBean topicBean)
+			throws CambriaApiException, DMaaPAccessDeniedException,IOException, TopicExistsException {
+
+		LOGGER.info("Creating topic " + topicBean.getTopicName());
+		
+		final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext);
+		String key = null;
+		String appName=dmaapContext.getRequest().getHeader("AppName");
+		String enfTopicName= com.att.ajsc.beans.PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop,"enforced.topic.name.AAF");
+	
+		if(user != null)
+		{
+			key = user.getKey();
+			
+			if(  enfTopicName != null && topicBean.getTopicName().indexOf(enfTopicName) >=0 ) {
+				
+				LOGGER.error("Failed to create topic"+topicBean.getTopicName()+", Authentication failed.");
+				
+				ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED, 
+						DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), 
+						errorMessages.getCreateTopicFail()+" "+errorMessages.getNotPermitted1()+" create "+errorMessages.getNotPermitted2());
+				LOGGER.info(errRes.toString());
+				throw new DMaaPAccessDeniedException(errRes);
+				
+			}
+		}
+				
+		//else if (user==null && (null==dmaapContext.getRequest().getHeader("Authorization") && null == dmaapContext.getRequest().getHeader("cookie")) ) {
+			else if (user == null &&  null==dmaapContext.getRequest().getHeader("Authorization")     && 
+					 (null == appName  &&  null == dmaapContext.getRequest().getHeader("cookie"))) {
+			LOGGER.error("Failed to create topic"+topicBean.getTopicName()+", Authentication failed.");
+			
+			ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED, 
+					DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), 
+					errorMessages.getCreateTopicFail()+" "+errorMessages.getNotPermitted1()+" create "+errorMessages.getNotPermitted2());
+			LOGGER.info(errRes.toString());
+			throw new DMaaPAccessDeniedException(errRes);
+		}
+		
+		if (user == null &&  (null!=dmaapContext.getRequest().getHeader("Authorization") ||
+					 null != dmaapContext.getRequest().getHeader("cookie"))) {
+			//if (user == null && (null!=dmaapContext.getRequest().getHeader("Authorization") || null != dmaapContext.getRequest().getHeader("cookie"))) {
+			 // ACL authentication is not provided so we will use the aaf authentication
+			LOGGER.info("Authorization the topic");
+		
+			String permission = "";
+			String nameSpace="";
+			if(topicBean.getTopicName().indexOf(".")>1)
+			 nameSpace = topicBean.getTopicName().substring(0,topicBean.getTopicName().lastIndexOf("."));
+		
+			 String mrFactoryVal=AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"msgRtr.topicfactory.aaf");
+		
+			//AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSettings_KafkaZookeeper);
+			
+			permission = mrFactoryVal+nameSpace+"|create";
+			DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
+			
+			if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
+			{
+				
+				LOGGER.error("Failed to create topic"+topicBean.getTopicName()+", Authentication failed.");
+				
+				ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED, 
+						DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), 
+						errorMessages.getCreateTopicFail()+" "+errorMessages.getNotPermitted1()+" create "+errorMessages.getNotPermitted2());
+				LOGGER.info(errRes.toString());
+				throw new DMaaPAccessDeniedException(errRes);
+				
+			}else{
+				// if user is null and aaf authentication is ok then key should be ""
+				//key = "";
+				/**
+				 * Added as part of AAF user it should return username
+				 */
+				
+				key = dmaapContext.getRequest().getUserPrincipal().getName().toString();
+				LOGGER.info("key ==================== "+key);
+				
+			}
+		}
+
+		try {
+			final String topicName = topicBean.getTopicName();
+			final String desc = topicBean.getTopicDescription();
+
+			final  int partitions = topicBean.getPartitionCount();
+		
+			final int replicas = topicBean.getReplicationCount();
+			boolean transactionEnabled = topicBean.isTransactionEnabled();
+			
+
+			final Broker metabroker = getMetaBroker(dmaapContext);
+			final Topic t = metabroker.createTopic(topicName, desc, key, partitions, replicas,
+					transactionEnabled);
+
+			LOGGER.info("Topic created successfully. Sending response");
+			DMaaPResponseBuilder.respondOk(dmaapContext, topicToJson(t));
+		} catch (JSONException excp) {
+			
+			LOGGER.error("Failed to create topic. Couldn't parse JSON data.", excp);
+			ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_BAD_REQUEST, 
+					DMaaPResponseCode.INCORRECT_JSON.getResponseCode(), 
+					errorMessages.getIncorrectJson());
+			LOGGER.info(errRes.toString());
+			throw new CambriaApiException(errRes);
+			
+		}
+	}
+
+	/**
+	 * @param dmaapContext
+	 * @param topicName
+	 * @throws ConfigDbException
+	 * @throws IOException
+	 * @throws TopicExistsException
+	 * @throws CambriaApiException
+	 * @throws AccessDeniedException
+	 */
+	@Override
+	public void deleteTopic(DMaaPContext dmaapContext, String topicName)
+			throws IOException, ConfigDbException, CambriaApiException, TopicExistsException, DMaaPAccessDeniedException, AccessDeniedException {
+
+		LOGGER.info("Deleting topic " + topicName);
+		final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext);
+
+		if (user == null && null!=dmaapContext.getRequest().getHeader("Authorization")) {
+			LOGGER.info("Authenticating the user, as ACL authentication is not provided");
+//			String permission = "com.att.dmaap.mr.topic"+"|"+topicName+"|"+"manage";
+			String permission = "";
+			String nameSpace = topicName.substring(0,topicName.lastIndexOf("."));
+			 String mrFactoryVal=AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"msgRtr.topicfactory.aaf");
+//			String tokens[] = topicName.split(".mr.topic.");
+			permission = mrFactoryVal+nameSpace+"|destroy";
+			DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
+			if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
+			{
+				LOGGER.error("Failed to delete topi"+topicName+". Authentication failed.");
+				ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, 
+						DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), 
+						errorMessages.getCreateTopicFail()+" "+errorMessages.getNotPermitted1()+" delete "+errorMessages.getNotPermitted2());
+				LOGGER.info(errRes.toString());
+				throw new DMaaPAccessDeniedException(errRes);
+			}
+			
+			
+		}
+
+		final Broker metabroker = getMetaBroker(dmaapContext);
+		final Topic topic = metabroker.getTopic(topicName);
+
+		if (topic == null) {
+			LOGGER.error("Failed to delete topic. Topic [" + topicName + "] does not exist.");
+			throw new TopicExistsException("Failed to delete topic. Topic [" + topicName + "] does not exist.");
+		}
+
+		metabroker.deleteTopic(topicName);
+
+		LOGGER.info("Topic [" + topicName + "] deleted successfully. Sending response.");
+		DMaaPResponseBuilder.respondOkWithHtml(dmaapContext, "Topic [" + topicName + "] deleted successfully");
+
+	}
+
+	/**
+	 * 
+	 * @param dmaapContext
+	 * @return
+	 */
+	private DMaaPKafkaMetaBroker getMetaBroker(DMaaPContext dmaapContext) {
+		return (DMaaPKafkaMetaBroker) dmaapContext.getConfigReader().getfMetaBroker();
+	}
+
+	/**
+	 * @param dmaapContext
+	 * @param topicName
+	 * @throws ConfigDbException
+	 * @throws IOException
+	 * @throws TopicExistsException
+	 * 
+	 */
+	@Override
+	public void getPublishersByTopicName(DMaaPContext dmaapContext, String topicName)
+			throws ConfigDbException, IOException, TopicExistsException {
+		LOGGER.info("Retrieving list of all the publishers for topic " + topicName);
+		Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
+
+		if (topic == null) {
+			LOGGER.error("Failed to retrieve publishers list for topic. Topic [" + topicName + "] does not exist.");
+			throw new TopicExistsException(
+					"Failed to retrieve publishers list for topic. Topic [" + topicName + "] does not exist.");
+		}
+		
+		
+
+		final NsaAcl acl = topic.getWriterAcl();
+
+		LOGGER.info("Returning list of all the publishers for topic " + topicName + ". Sending response.");
+		DMaaPResponseBuilder.respondOk(dmaapContext, aclToJson(acl));
+
+	}
+
+	/**
+	 * 
+	 * @param acl
+	 * @return
+	 */
+	private static JSONObject aclToJson(NsaAcl acl) {
+		final JSONObject o = new JSONObject();
+		if (acl == null) {
+			o.put("enabled", false);
+			o.put("users", new JSONArray());
+		} else {
+			o.put("enabled", acl.isActive());
+
+			final JSONArray a = new JSONArray();
+			for (String user : acl.getUsers()) {
+				a.put(user);
+			}
+			o.put("users", a);
+		}
+		return o;
+	}
+
+	/**
+	 * @param dmaapContext
+	 * @param topicName
+	 */
+	@Override
+	public void getConsumersByTopicName(DMaaPContext dmaapContext, String topicName)
+			throws IOException, ConfigDbException, TopicExistsException {
+		LOGGER.info("Retrieving list of all the consumers for topic " + topicName);
+		Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
+
+		if (topic == null) {
+			LOGGER.error("Failed to retrieve consumers list for topic. Topic [" + topicName + "] does not exist.");
+			throw new TopicExistsException(
+					"Failed to retrieve consumers list for topic. Topic [" + topicName + "] does not exist.");
+		}
+
+		final NsaAcl acl = topic.getReaderAcl();
+
+		LOGGER.info("Returning list of all the consumers for topic " + topicName + ". Sending response.");
+		DMaaPResponseBuilder.respondOk(dmaapContext, aclToJson(acl));
+
+	}
+
+	/**
+	 * 
+	 * @param t
+	 * @return
+	 */
+	private static JSONObject topicToJson(Topic t) {
+		final JSONObject o = new JSONObject();
+
+		o.put("name", t.getName());
+		o.put("description", t.getDescription());
+		o.put("owner", t.getOwner());
+		o.put("readerAcl", aclToJson(t.getReaderAcl()));
+		o.put("writerAcl", aclToJson(t.getWriterAcl()));
+
+		return o;
+	}
+
+	/**
+	 * @param dmaapContext
+	 * @param topicName
+	 * @param producerId
+	 * @throws ConfigDbException
+	 * @throws IOException
+	 * @throws TopicExistsException
+	 * @throws AccessDeniedException
+	 * @throws  
+	 * 
+	 */
+	@Override
+	public void permitPublisherForTopic(DMaaPContext dmaapContext, String topicName, String producerId)
+			throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,CambriaApiException {
+
+		LOGGER.info("Granting write access to producer [" + producerId + "] for topic " + topicName);
+		final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext);
+		
+//		if (user == null) {
+//			
+//			LOGGER.info("Authenticating the user, as ACL authentication is not provided");
+////			String permission = "com.att.dmaap.mr.topic"+"|"+topicName+"|"+"manage";
+//			
+//			DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
+//			String permission = aaf.aafPermissionString(topicName, "manage");
+//			if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
+//			{
+//				LOGGER.error("Failed to permit write access to producer [" + producerId + "] for topic " + topicName
+//									+ ". Authentication failed.");
+//				ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, 
+//						DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), 
+//						errorMessages.getNotPermitted1()+" <Grant publish permissions> "+errorMessages.getNotPermitted2()+ topicName);
+//				LOGGER.info(errRes);
+//				throw new DMaaPAccessDeniedException(errRes);
+//			}
+//		}
+
+		Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
+
+		if (null == topic) {
+			LOGGER.error("Failed to permit write access to producer [" + producerId + "] for topic. Topic [" + topicName
+					+ "] does not exist.");
+			throw new TopicExistsException("Failed to permit write access to producer [" + producerId
+					+ "] for topic. Topic [" + topicName + "] does not exist.");
+		}
+
+		topic.permitWritesFromUser(producerId, user);
+
+		LOGGER.info("Write access has been granted to producer [" + producerId + "] for topic [" + topicName
+				+ "]. Sending response.");
+		DMaaPResponseBuilder.respondOkWithHtml(dmaapContext, "Write access has been granted to publisher.");
+
+	}
+
+	/**
+	 * @param dmaapContext
+	 * @param topicName
+	 * @param producerId
+	 * @throws ConfigDbException
+	 * @throws IOException
+	 * @throws TopicExistsException
+	 * @throws AccessDeniedException
+	 * @throws DMaaPAccessDeniedException 
+	 * 
+	 */
+	@Override
+	public void denyPublisherForTopic(DMaaPContext dmaapContext, String topicName, String producerId)
+			throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException, DMaaPAccessDeniedException {
+
+		LOGGER.info("Revoking write access to producer [" + producerId + "] for topic " + topicName);
+		final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext);
+//		if (user == null) {
+//			
+////			String permission = "com.att.dmaap.mr.topic"+"|"+topicName+"|"+"manage";
+//			DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
+//			String permission = aaf.aafPermissionString(topicName, "manage");
+//			if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
+//			{
+//				LOGGER.error("Failed to revoke write access to producer [" + producerId + "] for topic " + topicName
+//						+ ". Authentication failed.");
+//				ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, 
+//						DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), 
+//						errorMessages.getNotPermitted1()+" <Revoke publish permissions> "+errorMessages.getNotPermitted2()+ topicName);
+//				LOGGER.info(errRes);
+//				throw new DMaaPAccessDeniedException(errRes);
+//				
+//			}
+//		}
+
+		Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
+
+		if (null == topic) {
+			LOGGER.error("Failed to revoke write access to producer [" + producerId + "] for topic. Topic [" + topicName
+					+ "] does not exist.");
+			throw new TopicExistsException("Failed to revoke write access to producer [" + producerId
+					+ "] for topic. Topic [" + topicName + "] does not exist.");
+		}
+
+		topic.denyWritesFromUser(producerId, user);
+
+		LOGGER.info("Write access has been revoked to producer [" + producerId + "] for topic [" + topicName
+				+ "]. Sending response.");
+		DMaaPResponseBuilder.respondOkWithHtml(dmaapContext, "Write access has been revoked for publisher.");
+
+	}
+
+	/**
+	 * @param dmaapContext
+	 * @param topicName
+	 * @param consumerId
+	 * @throws DMaaPAccessDeniedException 
+	 */
+	@Override
+	public void permitConsumerForTopic(DMaaPContext dmaapContext, String topicName, String consumerId)
+			throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException, DMaaPAccessDeniedException {
+
+		LOGGER.info("Granting read access to consumer [" + consumerId + "] for topic " + topicName);
+		final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext);
+//		if (user == null) {
+//			
+////			String permission = "com.att.dmaap.mr.topic"+"|"+topicName+"|"+"manage";
+//			DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
+//			String permission = aaf.aafPermissionString(topicName, "manage");
+//			if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
+//			{
+//				LOGGER.error("Failed to permit read access to consumer [" + consumerId + "] for topic " + topicName
+//						+ ". Authentication failed.");
+//				ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, 
+//						DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), 
+//						errorMessages.getNotPermitted1()+" <Grant consume permissions> "+errorMessages.getNotPermitted2()+ topicName);
+//				LOGGER.info(errRes);
+//				throw new DMaaPAccessDeniedException(errRes);
+//			}
+//		}
+
+		Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
+
+		if (null == topic) {
+			LOGGER.error("Failed to permit read access to consumer [" + consumerId + "] for topic. Topic [" + topicName
+					+ "] does not exist.");
+			throw new TopicExistsException("Failed to permit read access to consumer [" + consumerId
+					+ "] for topic. Topic [" + topicName + "] does not exist.");
+		}
+
+		topic.permitReadsByUser(consumerId, user);
+
+		LOGGER.info("Read access has been granted to consumer [" + consumerId + "] for topic [" + topicName
+				+ "]. Sending response.");
+		DMaaPResponseBuilder.respondOkWithHtml(dmaapContext,
+				"Read access has been granted for consumer [" + consumerId + "] for topic [" + topicName + "].");
+	}
+
+	/**
+	 * @param dmaapContext
+	 * @param topicName
+	 * @param consumerId
+	 * @throws DMaaPAccessDeniedException 
+	 */
+	@Override
+	public void denyConsumerForTopic(DMaaPContext dmaapContext, String topicName, String consumerId)
+			throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException, DMaaPAccessDeniedException {
+
+		LOGGER.info("Revoking read access to consumer [" + consumerId + "] for topic " + topicName);
+		final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext);
+//		if (user == null) {
+////			String permission = "com.att.dmaap.mr.topic"+"|"+topicName+"|"+"manage";
+//			DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
+//			String permission = aaf.aafPermissionString(topicName, "manage");
+//			if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
+//			{
+//				LOGGER.error("Failed to revoke read access to consumer [" + consumerId + "] for topic " + topicName
+//						+ ". Authentication failed.");
+//				ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, 
+//						DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), 
+//						errorMessages.getNotPermitted1()+" <Grant consume permissions> "+errorMessages.getNotPermitted2()+ topicName);
+//				LOGGER.info(errRes);
+//				throw new DMaaPAccessDeniedException(errRes);
+//			}
+//			
+//			
+//		}
+
+		Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
+
+		if (null == topic) {
+			LOGGER.error("Failed to revoke read access to consumer [" + consumerId + "] for topic. Topic [" + topicName
+					+ "] does not exist.");
+			throw new TopicExistsException("Failed to permit read access to consumer [" + consumerId
+					+ "] for topic. Topic [" + topicName + "] does not exist.");
+		}
+
+		topic.denyReadsByUser(consumerId, user);
+
+		LOGGER.info("Read access has been revoked to consumer [" + consumerId + "] for topic [" + topicName
+				+ "]. Sending response.");
+		DMaaPResponseBuilder.respondOkWithHtml(dmaapContext,
+				"Read access has been revoked for consumer [" + consumerId + "] for topic [" + topicName + "].");
+
+	}
+
+
+	
+	
+	
+}
diff --git a/src/main/java/com/att/nsa/cambria/service/impl/TransactionServiceImpl.java b/src/main/java/com/att/nsa/cambria/service/impl/TransactionServiceImpl.java
new file mode 100644
index 0000000..9da2852
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/service/impl/TransactionServiceImpl.java
@@ -0,0 +1,100 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.service.impl;
+
+import java.io.IOException;
+
+import org.springframework.stereotype.Service;
+
+import com.att.aft.dme2.internal.jettison.json.JSONException;
+import com.att.nsa.cambria.beans.DMaaPContext;
+import com.att.nsa.cambria.service.TransactionService;
+import com.att.nsa.cambria.transaction.TransactionObj;
+import com.att.nsa.configs.ConfigDbException;
+
+/**
+ * Once the transaction rest gateway will be using that time it will provide all
+ * the transaction details like fetching all the transactional objects or get
+ * any particular transaction object details
+ * 
+ * @author author
+ *
+ */
+@Service
+public class TransactionServiceImpl implements TransactionService {
+
+	@Override
+	public void checkTransaction(TransactionObj trnObj) {
+		/* Need to implement the method */
+	}
+
+	@Override
+	public void getAllTransactionObjs(DMaaPContext dmaapContext)
+			throws ConfigDbException, IOException {
+
+		/*
+		 * ConfigurationReader configReader = dmaapContext.getConfigReader();
+		 * 
+		 * LOG.info("configReader : "+configReader.toString());
+		 * 
+		 * final JSONObject result = new JSONObject (); final JSONArray
+		 * transactionIds = new JSONArray (); result.put ( "transactionIds",
+		 * transactionIds );
+		 * 
+		 * DMaaPTransactionObjDB<DMaaPTransactionObj> transDb =
+		 * configReader.getfTranDb();
+		 * 
+		 * for (String transactionId : transDb.loadAllTransactionObjs()) {
+		 * transactionIds.put (transactionId); } LOG.info(
+		 * "========== TransactionServiceImpl: getAllTransactionObjs: Transaction objects are : "
+		 * + transactionIds.toString()+"===========");
+		 * DMaaPResponseBuilder.respondOk(dmaapContext, result);
+		 */
+	}
+
+	@Override
+	public void getTransactionObj(DMaaPContext dmaapContext,
+			String transactionId) throws ConfigDbException, JSONException,
+			IOException {
+
+		/*
+		 * if (null != transactionId) {
+		 * 
+		 * ConfigurationReader configReader = dmaapContext.getConfigReader();
+		 * 
+		 * DMaaPTransactionObj trnObj;
+		 * 
+		 * trnObj = configReader.getfTranDb().loadTransactionObj(transactionId);
+		 * 
+		 * 
+		 * if (null != trnObj) { trnObj.serialize(); JSONObject result =
+		 * trnObj.asJsonObject(); DMaaPResponseBuilder.respondOk(dmaapContext,
+		 * result);
+		 * LOG.info("========== TransactionServiceImpl: getTransactionObj : "+
+		 * result.toString()+"==========="); return; }
+		 * 
+		 * } LOG.info(
+		 * "========== TransactionServiceImpl: getTransactionObj: Error : Transaction object does not exist. "
+		 * +"===========");
+		 */
+	}
+}
diff --git a/src/main/java/com/att/nsa/cambria/service/impl/UIServiceImpl.java b/src/main/java/com/att/nsa/cambria/service/impl/UIServiceImpl.java
new file mode 100644
index 0000000..0fbf657
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/service/impl/UIServiceImpl.java
@@ -0,0 +1,206 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.service.impl;
+
+import java.io.IOException;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.json.JSONArray;
+import org.json.JSONObject;
+import org.springframework.stereotype.Service;
+
+import com.att.nsa.cambria.beans.DMaaPContext;
+import com.att.nsa.cambria.beans.DMaaPKafkaMetaBroker;
+import com.att.nsa.cambria.metabroker.Topic;
+import com.att.nsa.cambria.service.UIService;
+import com.att.nsa.cambria.utils.DMaaPResponseBuilder;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.security.db.NsaApiDb;
+import com.att.nsa.security.db.simple.NsaSimpleApiKey;
+
+import kafka.common.TopicExistsException;
+
+/**
+ * @author author
+ *
+ */
+@Service
+public class UIServiceImpl implements UIService {
+
+	//private static final Logger LOGGER = Logger.getLogger(UIServiceImpl.class);
+	private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(UIServiceImpl.class);
+	/**
+	 * Returning template of hello page
+	 * @param dmaapContext
+	 * @throws IOException
+	 */
+	@Override
+	public void hello(DMaaPContext dmaapContext) throws IOException {
+		LOGGER.info("Returning template of hello page.");
+		DMaaPResponseBuilder.respondOkWithHtml(dmaapContext, "templates/hello.html");
+	}
+
+	/**
+	 * Fetching list of all api keys and returning in a templated form for display.
+	 * @param dmaapContext
+	 * @throws ConfigDbException
+	 * @throws IOException
+	 */
+	@Override
+	public void getApiKeysTable(DMaaPContext dmaapContext) throws ConfigDbException, IOException {
+		// TODO - We need to work on the templates and how data will be set in
+		// the template
+		LOGGER.info("Fetching list of all api keys and returning in a templated form for display.");
+		Map<String, NsaSimpleApiKey> keyMap = getApiKeyDb(dmaapContext).loadAllKeyRecords();
+
+		LinkedList<JSONObject> keyList = new LinkedList<JSONObject>();
+
+		JSONObject jsonList = new JSONObject();
+
+		for (Entry<String, NsaSimpleApiKey> e : keyMap.entrySet()) {
+			final NsaSimpleApiKey key = e.getValue();
+			final JSONObject jsonObject = new JSONObject();
+			jsonObject.put("key", key.getKey());
+			jsonObject.put("email", key.getContactEmail());
+			jsonObject.put("description", key.getDescription());
+			keyList.add(jsonObject);
+		}
+
+		jsonList.put("apiKeys", keyList);
+
+		LOGGER.info("Returning list of all the api keys in JSON format for the template.");
+		// "templates/apiKeyList.html"
+		DMaaPResponseBuilder.respondOk(dmaapContext, jsonList);
+
+	}
+
+	/**
+	 * @param dmaapContext
+	 * @param apiKey
+	 * @throws Exception
+	 */
+	@Override
+	public void getApiKey(DMaaPContext dmaapContext, String apiKey) throws Exception {
+		// TODO - We need to work on the templates and how data will be set in
+		// the template
+		LOGGER.info("Fetching detials of apikey: " + apiKey);
+		final NsaSimpleApiKey key = getApiKeyDb(dmaapContext).loadApiKey(apiKey);
+
+		if (null != key) {
+			LOGGER.info("Details of apikey [" + apiKey + "] found. Returning response");
+			DMaaPResponseBuilder.respondOk(dmaapContext, key.asJsonObject());
+		} else {
+			LOGGER.info("Details of apikey [" + apiKey + "] not found. Returning response");
+			throw new Exception("Key [" + apiKey + "] not found.");
+		}
+
+	}
+
+	/**
+	 * Fetching list of all the topics
+	 * @param dmaapContext
+	 * @throws ConfigDbException
+	 * @throws IOException
+	 */
+	@Override
+	public void getTopicsTable(DMaaPContext dmaapContext) throws ConfigDbException, IOException {
+		// TODO - We need to work on the templates and how data will be set in
+		// the template
+		LOGGER.info("Fetching list of all the topics and returning in a templated form for display");
+		List<Topic> topicsList = getMetaBroker(dmaapContext).getAllTopics();
+
+		JSONObject jsonObject = new JSONObject();
+
+		JSONArray topicsArray = new JSONArray();
+
+		List<Topic> topicList = getMetaBroker(dmaapContext).getAllTopics();
+
+		for (Topic topic : topicList) {
+			JSONObject obj = new JSONObject();
+			obj.put("topicName", topic.getName());
+			obj.put("description", topic.getDescription());
+			obj.put("owner", topic.getOwner());
+			topicsArray.put(obj);
+		}
+
+		jsonObject.put("topics", topicsList);
+
+		LOGGER.info("Returning the list of topics in templated format for display.");
+		DMaaPResponseBuilder.respondOk(dmaapContext, jsonObject);
+
+	}
+
+	/**
+	 * @param dmaapContext
+	 * @param topicName
+	 * @throws ConfigDbException
+	 * @throws IOException
+	 * @throws TopicExistsException
+	 */
+	@Override
+	public void getTopic(DMaaPContext dmaapContext, String topicName)
+			throws ConfigDbException, IOException, TopicExistsException {
+		// TODO - We need to work on the templates and how data will be set in
+		// the template
+		LOGGER.info("Fetching detials of apikey: " + topicName);
+		Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
+
+		if (null == topic) {
+			LOGGER.error("Topic [" + topicName + "] does not exist.");
+			throw new TopicExistsException("Topic [" + topicName + "] does not exist.");
+		}
+
+		JSONObject json = new JSONObject();
+		json.put("topicName", topic.getName());
+		json.put("description", topic.getDescription());
+		json.put("owner", topic.getOwner());
+
+		LOGGER.info("Returning details of topic [" + topicName + "]. Sending response.");
+		DMaaPResponseBuilder.respondOk(dmaapContext, json);
+
+	}
+
+	/**
+	 * 
+	 * @param dmaapContext
+	 * @return
+	 */
+	private NsaApiDb<NsaSimpleApiKey> getApiKeyDb(DMaaPContext dmaapContext) {
+		return dmaapContext.getConfigReader().getfApiKeyDb();
+
+	}
+
+	/**
+	 * 
+	 * @param dmaapContext
+	 * @return
+	 */
+	private DMaaPKafkaMetaBroker getMetaBroker(DMaaPContext dmaapContext) {
+		return (DMaaPKafkaMetaBroker) dmaapContext.getConfigReader().getfMetaBroker();
+	}
+
+}
diff --git a/src/main/java/com/att/nsa/cambria/transaction/DMaaPTransactionFactory.java b/src/main/java/com/att/nsa/cambria/transaction/DMaaPTransactionFactory.java
new file mode 100644
index 0000000..ea276f9
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/transaction/DMaaPTransactionFactory.java
@@ -0,0 +1,44 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.transaction;
+/**
+ * 
+ * @author author
+ *
+ * @param <K>
+ */
+public interface DMaaPTransactionFactory<K extends DMaaPTransactionObj> {
+
+	/**
+	 * 
+	 * @param data
+	 * @return
+	 */
+	K makeNewTransactionObj ( String data );
+	/**
+	 * 
+	 * @param id
+	 * @return
+	 */
+	K makeNewTransactionId ( String id );
+
+}
diff --git a/src/main/java/com/att/nsa/cambria/transaction/DMaaPTransactionObj.java b/src/main/java/com/att/nsa/cambria/transaction/DMaaPTransactionObj.java
new file mode 100644
index 0000000..1a9ae0e
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/transaction/DMaaPTransactionObj.java
@@ -0,0 +1,83 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.transaction;
+
+import org.json.JSONObject;
+/**
+ * This is an interface for DMaaP transactional logging object class.
+ * @author author
+ *
+ */
+public interface DMaaPTransactionObj {
+	/**
+	 * This will get the transaction id
+	 * @return id transactionId
+	 */
+	String getId();
+	/**
+	 * This will set the transaction id
+	 * @param id transactionId
+	 */
+	void setId(String id);
+	/**
+	 * This will sync the transaction object mapping
+	 * @return String or null
+	 */
+	String serialize();
+	/**
+	 * get the total message count once the publisher published
+	 * @return long totalMessageCount
+	 */
+	long getTotalMessageCount();
+	/**
+	 * set the total message count once the publisher published
+	 * @param totalMessageCount
+	 */
+	void setTotalMessageCount(long totalMessageCount);
+	/**
+	 * get the total Success Message Count once the publisher published
+	 * @return getSuccessMessageCount
+	 */
+	long getSuccessMessageCount();
+	/**
+	 * set the total Success Message Count once the publisher published
+	 * @param successMessageCount
+	 */
+	void setSuccessMessageCount(long successMessageCount);
+	/**
+	 * get the failure Message Count once the publisher published
+	 * @return failureMessageCount
+	 */
+	long getFailureMessageCount();
+	/**
+	 * set the failure Message Count once the publisher published
+	 * @param failureMessageCount
+	 */
+	void setFailureMessageCount(long failureMessageCount);
+
+	/**
+	 * wrapping the data into json object
+	 * @return JSONObject
+	 */
+	JSONObject asJsonObject();
+
+}
diff --git a/src/main/java/com/att/nsa/cambria/transaction/DMaaPTransactionObjDB.java b/src/main/java/com/att/nsa/cambria/transaction/DMaaPTransactionObjDB.java
new file mode 100644
index 0000000..ab5393a
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/transaction/DMaaPTransactionObjDB.java
@@ -0,0 +1,86 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.transaction;
+
+import java.util.Set;
+
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.security.NsaSecurityManagerException;
+
+
+/**
+ * Persistent storage for Transaction Object and secrets built over an abstract config db. Instances
+ * of this DB must support concurrent access.
+ * @author author
+ *
+ * @param <K> DMaaPTransactionObj
+ */
+public interface DMaaPTransactionObjDB <K extends DMaaPTransactionObj> {
+
+
+	/**
+	 * Create a new Transaction Object. If one exists, 
+	 * @param id
+	 * @return the new Transaction record
+	 * @throws ConfigDbException 
+	 */
+	K createTransactionObj (String id) throws KeyExistsException, ConfigDbException;
+
+
+	/**
+	 * An exception to signal a Transaction object already exists 
+	 * @author author
+	 *
+	 */
+	public static class KeyExistsException extends NsaSecurityManagerException
+	{
+		/**
+		 * If the key exists
+		 * @param key
+		 */
+		public KeyExistsException ( String key ) { super ( "Transaction Object " + key + " exists" ); }
+		private static final long serialVersionUID = 1L;
+	}
+
+	/**
+	 * Save a Transaction Object record. This must be used after changing auxiliary data on the record.
+	 * Note that the transaction must exist (via createTransactionObj). 
+	 * @param transactionObj
+	 * @throws ConfigDbException 
+	 */
+	void saveTransactionObj ( K transactionObj ) throws ConfigDbException;
+	
+	/**
+	 * Load an Transaction Object record based on the Transaction ID value
+	 * @param transactionId
+	 * @return a transaction record or null
+	 * @throws ConfigDbException 
+	 */
+	K loadTransactionObj ( String transactionId ) throws ConfigDbException;
+	
+	/**
+	 * Load all Transaction objects.
+	 * @return
+	 * @throws ConfigDbException
+	 */
+	Set<String> loadAllTransactionObjs () throws ConfigDbException;
+}
\ No newline at end of file
diff --git a/src/main/java/com/att/nsa/cambria/transaction/TransactionObj.java b/src/main/java/com/att/nsa/cambria/transaction/TransactionObj.java
new file mode 100644
index 0000000..d6e75ef
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/transaction/TransactionObj.java
@@ -0,0 +1,202 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.transaction;
+
+import org.json.JSONObject;
+
+/**
+ * This is the class which will have the transaction enabled logging object
+ * details
+ * 
+ * @author author
+ *
+ */
+public class TransactionObj implements DMaaPTransactionObj {
+
+	private String id;
+	private String createTime;
+	private long totalMessageCount;
+	private long successMessageCount;
+	private long failureMessageCount;
+	private JSONObject fData = new JSONObject();
+	private TrnRequest trnRequest;
+	private static final String kAuxData = "transaction";
+
+	/**
+	 * Initializing constructor  
+	 * put the json data for transaction enabled logging
+	 * 
+	 * @param data
+	 */
+	public TransactionObj(JSONObject data) {
+		fData = data;
+
+		// check for required fields (these throw if not present)
+		getId();
+		getTotalMessageCount();
+		getSuccessMessageCount();
+		getFailureMessageCount();
+
+		// make sure we've got an aux data object
+		final JSONObject aux = fData.optJSONObject(kAuxData);
+		if (aux == null) {
+			fData.put(kAuxData, new JSONObject());
+		}
+	}
+
+	/**
+	 * this constructor will have the details of transaction id,
+	 * totalMessageCount successMessageCount, failureMessageCount to get the
+	 * transaction object
+	 * 
+	 * @param id
+	 * @param totalMessageCount
+	 * @param successMessageCount
+	 * @param failureMessageCount
+	 */
+	public TransactionObj(String id, long totalMessageCount, long successMessageCount, long failureMessageCount) {
+		this.id = id;
+		this.totalMessageCount = totalMessageCount;
+		this.successMessageCount = successMessageCount;
+		this.failureMessageCount = failureMessageCount;
+
+	}
+
+	/**
+	 * The constructor passing only transaction id
+	 * 
+	 * @param id
+	 */
+	public TransactionObj(String id) {
+		this.id = id;
+	}
+
+	/**
+	 * Wrapping the data into json object
+	 * 
+	 * @return JSONObject
+	 */
+	public JSONObject asJsonObject() {
+		final JSONObject full = new JSONObject(fData, JSONObject.getNames(fData));
+		return full;
+	}
+
+	/**
+	 * To get the transaction id
+	 */
+	public String getId() {
+		return id;
+	}
+
+	/**
+	 * To set the transaction id
+	 */
+	public void setId(String id) {
+		this.id = id;
+	}
+
+	/**
+	 * 
+	 * @return
+	 */
+	public String getCreateTime() {
+		return createTime;
+	}
+
+	/**
+	 * 
+	 * @param createTime
+	 */
+	public void setCreateTime(String createTime) {
+		this.createTime = createTime;
+	}
+
+	@Override
+	public String serialize() {
+		fData.put("transactionId", id);
+		fData.put("totalMessageCount", totalMessageCount);
+		fData.put("successMessageCount", successMessageCount);
+		fData.put("failureMessageCount", failureMessageCount);
+		return fData.toString();
+	}
+
+	public long getTotalMessageCount() {
+		return totalMessageCount;
+	}
+
+	public void setTotalMessageCount(long totalMessageCount) {
+		this.totalMessageCount = totalMessageCount;
+	}
+
+	public long getSuccessMessageCount() {
+		return successMessageCount;
+	}
+
+	public void setSuccessMessageCount(long successMessageCount) {
+		this.successMessageCount = successMessageCount;
+	}
+
+	public long getFailureMessageCount() {
+		return failureMessageCount;
+	}
+
+	/**
+	 * @param failureMessageCount
+	 */
+	public void setFailureMessageCount(long failureMessageCount) {
+		this.failureMessageCount = failureMessageCount;
+	}
+
+	/**
+	 * 
+	 * @return JSOnObject fData
+	 */
+	public JSONObject getfData() {
+		return fData;
+	}
+
+	/**
+	 * set the json object into data
+	 * 
+	 * @param fData
+	 */
+	public void setfData(JSONObject fData) {
+		this.fData = fData;
+	}
+
+	/**
+	 * 
+	 * @return
+	 */
+	public TrnRequest getTrnRequest() {
+		return trnRequest;
+	}
+
+	/**
+	 * 
+	 * @param trnRequest
+	 */
+	public void setTrnRequest(TrnRequest trnRequest) {
+		this.trnRequest = trnRequest;
+	}
+
+}
diff --git a/src/main/java/com/att/nsa/cambria/transaction/TrnRequest.java b/src/main/java/com/att/nsa/cambria/transaction/TrnRequest.java
new file mode 100644
index 0000000..551570f
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/transaction/TrnRequest.java
@@ -0,0 +1,183 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.transaction;
+
+/**
+ * Created for transaction enable logging details, this is nothing but a bean
+ * class.
+ * 
+ * @author author
+ *
+ */
+public class TrnRequest {
+
+	private String id;
+	private String requestCreate;
+	private String requestHost;
+	private String serverHost;
+	private String messageProceed;
+	private String totalMessage;
+	private String clientType;
+	private String url;
+
+	/**
+	 * 
+	 * 
+	 * 
+	 * @return id
+	 * 
+	 */
+	public String getId() {
+		return id;
+	}
+
+	/**
+	 * 
+	 * 
+	 * @param id
+	 */
+	public void setId(String id) {
+		this.id = id;
+	}
+
+	/**
+	 * 
+	 * 
+	 * @return requestCreate
+	 */
+	public String getRequestCreate() {
+		return requestCreate;
+	}
+
+	/**
+	 * 
+	 * @param requestCreate
+	 */
+	public void setRequestCreate(String requestCreate) {
+		this.requestCreate = requestCreate;
+	}
+
+	/**
+	 * 
+	 * @return
+	 */
+	public String getRequestHost() {
+		return requestHost;
+	}
+
+	/**
+	 * 
+	 * @param requestHost
+	 */
+	public void setRequestHost(String requestHost) {
+		this.requestHost = requestHost;
+	}
+
+	/**
+	 * 
+	 * 
+	 * 
+	 * @return
+	 */
+	public String getServerHost() {
+		return serverHost;
+	}
+
+	/**
+	 * 
+	 * @param serverHost
+	 */
+	public void setServerHost(String serverHost) {
+		this.serverHost = serverHost;
+	}
+
+	/**
+	 * 
+	 * 
+	 * 
+	 * @return
+	 */
+	public String getMessageProceed() {
+		return messageProceed;
+	}
+
+	/**
+	 * 
+	 * @param messageProceed
+	 */
+	public void setMessageProceed(String messageProceed) {
+		this.messageProceed = messageProceed;
+	}
+
+	/**
+	 * 
+	 * @return
+	 */
+	public String getTotalMessage() {
+		return totalMessage;
+	}
+
+	/**
+	 * 
+	 * @param totalMessage
+	 * 
+	 * 
+	 */
+	public void setTotalMessage(String totalMessage) {
+		this.totalMessage = totalMessage;
+	}
+
+	/**
+	 * 
+	 * @return
+	 */
+	public String getClientType() {
+		return clientType;
+	}
+
+	/**
+	 * 
+	 * @param clientType
+	 * 
+	 */
+	public void setClientType(String clientType) {
+		this.clientType = clientType;
+	}
+
+	/**
+	 * 
+	 * @return
+	 */
+	public String getUrl() {
+		return url;
+	}
+
+	/**
+	 * 
+	 * @param url
+	 * 
+	 */
+	public void setUrl(String url) {
+		this.url = url;
+	}
+
+}
diff --git a/src/main/java/com/att/nsa/cambria/transaction/impl/DMaaPSimpleTransactionFactory.java b/src/main/java/com/att/nsa/cambria/transaction/impl/DMaaPSimpleTransactionFactory.java
new file mode 100644
index 0000000..10e5da8
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/transaction/impl/DMaaPSimpleTransactionFactory.java
@@ -0,0 +1,62 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.transaction.impl;
+
+import org.json.JSONObject;
+
+import com.att.nsa.cambria.transaction.DMaaPTransactionFactory;
+import com.att.nsa.cambria.transaction.DMaaPTransactionObj;
+import com.att.nsa.cambria.transaction.TransactionObj;
+
+/**
+ * A factory for the simple Transaction implementation
+ * 
+ * 
+ * @author author
+ *
+ */
+public class DMaaPSimpleTransactionFactory implements DMaaPTransactionFactory<DMaaPTransactionObj> {
+	/**
+	 * 
+	 * @param data
+	 * @return DMaaPTransactionObj
+	 */
+	@Override
+	public DMaaPTransactionObj makeNewTransactionObj(String data) {
+		JSONObject jsonObject = new JSONObject(data);
+		return new TransactionObj(jsonObject.getString("transactionId"), jsonObject.getLong("totalMessageCount"),
+				jsonObject.getLong("successMessageCount"), jsonObject.getLong("failureMessageCount"));
+	}
+
+	/**
+	 * 
+	 * @param id
+	 * @return TransactionObj
+	 * 
+	 * 
+	 */
+	@Override
+	public DMaaPTransactionObj makeNewTransactionId(String id) {
+		return new TransactionObj(id);
+	}
+
+}
diff --git a/src/main/java/com/att/nsa/cambria/utils/ConfigurationReader.java b/src/main/java/com/att/nsa/cambria/utils/ConfigurationReader.java
new file mode 100644
index 0000000..4f75653
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/utils/ConfigurationReader.java
@@ -0,0 +1,499 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.utils;
+
+import javax.servlet.ServletException;
+
+import org.I0Itec.zkclient.ZkClient;
+import org.apache.curator.framework.CuratorFramework;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.stereotype.Component;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+//import com.att.nsa.apiServer.util.Emailer;
+import com.att.nsa.cambria.utils.Emailer;
+import com.att.nsa.cambria.backends.ConsumerFactory;
+import com.att.nsa.cambria.backends.MetricsSet;
+import com.att.nsa.cambria.backends.Publisher;
+import com.att.nsa.cambria.backends.kafka.KafkaConsumerCache.KafkaConsumerCacheException;
+import com.att.nsa.cambria.backends.memory.MemoryConsumerFactory;
+import com.att.nsa.cambria.backends.memory.MemoryMetaBroker;
+import com.att.nsa.cambria.backends.memory.MemoryQueue;
+import com.att.nsa.cambria.backends.memory.MemoryQueuePublisher;
+//import com.att.nsa.cambria.beans.DMaaPBlacklist;
+import com.att.nsa.cambria.beans.DMaaPCambriaLimiter;
+import com.att.nsa.cambria.beans.DMaaPZkConfigDb;
+import com.att.nsa.cambria.constants.CambriaConstants;
+import com.att.nsa.cambria.metabroker.Broker;
+import com.att.nsa.cambria.security.DMaaPAuthenticator;
+import com.att.nsa.cambria.security.impl.DMaaPOriginalUebAuthenticator;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.configs.confimpl.MemConfigDb;
+import com.att.nsa.drumlin.till.nv.rrNvReadable;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.invalidSettingValue;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
+import com.att.nsa.limits.Blacklist;
+import com.att.nsa.security.NsaAuthenticatorService;
+//import com.att.nsa.security.authenticators.OriginalUebAuthenticator;
+import com.att.nsa.security.db.BaseNsaApiDbImpl;
+import com.att.nsa.security.db.NsaApiDb;
+import com.att.nsa.security.db.NsaApiDb.KeyExistsException;
+import com.att.nsa.security.db.simple.NsaSimpleApiKey;
+import com.att.nsa.security.db.simple.NsaSimpleApiKeyFactory;
+
+/**
+ * Class is created for all the configuration for rest and service layer
+ * integration.
+ *
+ */
+@Component
+public class ConfigurationReader {
+
+//	private rrNvReadable settings;
+	private Broker fMetaBroker;
+	private ConsumerFactory fConsumerFactory;
+	private Publisher fPublisher;
+	private MetricsSet fMetrics;
+	@Autowired
+	private DMaaPCambriaLimiter fRateLimiter;
+	private NsaApiDb<NsaSimpleApiKey> fApiKeyDb;
+	/* private DMaaPTransactionObjDB<DMaaPTransactionObj> fTranDb; */
+	private DMaaPAuthenticator<NsaSimpleApiKey> fSecurityManager;
+	private NsaAuthenticatorService<NsaSimpleApiKey> nsaSecurityManager;
+	private static CuratorFramework curator;
+	private ZkClient zk;
+	private DMaaPZkConfigDb fConfigDb;
+	private MemoryQueue q;
+	private MemoryMetaBroker mmb;
+	private Blacklist fIpBlackList;
+	private Emailer fEmailer;
+
+	private static final EELFLogger log = EELFManager.getInstance().getLogger(ConfigurationReader.class);
+	//private static final Logger log = Logger.getLogger(ConfigurationReader.class.toString());
+
+	/**
+	 * constructor to initialize all the values
+	 * 
+	 * @param settings
+	 * @param fMetrics
+	 * @param zk
+	 * @param fConfigDb
+	 * @param fPublisher
+	 * @param curator
+	 * @param fConsumerFactory
+	 * @param fMetaBroker
+	 * @param q
+	 * @param mmb
+	 * @param fApiKeyDb
+	 * @param fSecurityManager
+	 * @throws missingReqdSetting
+	 * @throws invalidSettingValue
+	 * @throws ServletException
+	 * @throws KafkaConsumerCacheException
+	 * @throws ConfigDbException 
+	 */
+	@Autowired
+	public ConfigurationReader(@Qualifier("propertyReader") rrNvReadable settings,
+			@Qualifier("dMaaPMetricsSet") MetricsSet fMetrics, @Qualifier("dMaaPZkClient") ZkClient zk,
+			@Qualifier("dMaaPZkConfigDb") DMaaPZkConfigDb fConfigDb, @Qualifier("kafkaPublisher") Publisher fPublisher,
+			@Qualifier("curator") CuratorFramework curator,
+			@Qualifier("dMaaPKafkaConsumerFactory") ConsumerFactory fConsumerFactory,
+			@Qualifier("dMaaPKafkaMetaBroker") Broker fMetaBroker, @Qualifier("q") MemoryQueue q,
+			@Qualifier("mmb") MemoryMetaBroker mmb, @Qualifier("dMaaPNsaApiDb") NsaApiDb<NsaSimpleApiKey> fApiKeyDb,
+			/*
+			 * @Qualifier("dMaaPTranDb")
+			 * DMaaPTransactionObjDB<DMaaPTransactionObj> fTranDb,
+			 */
+			@Qualifier("dMaaPAuthenticatorImpl") DMaaPAuthenticator<NsaSimpleApiKey> fSecurityManager
+			)
+					throws missingReqdSetting, invalidSettingValue, ServletException, KafkaConsumerCacheException, ConfigDbException {
+		//this.settings = settings;
+		this.fMetrics = fMetrics;
+		this.zk = zk;
+		this.fConfigDb = fConfigDb;
+		this.fPublisher = fPublisher;
+		ConfigurationReader.curator = curator;
+		this.fConsumerFactory = fConsumerFactory;
+		this.fMetaBroker = fMetaBroker;
+		this.q = q;
+		this.mmb = mmb;
+		this.fApiKeyDb = fApiKeyDb;
+		/* this.fTranDb = fTranDb; */
+		this.fSecurityManager = fSecurityManager;
+		
+		long allowedtimeSkewMs=600000L;
+		String strallowedTimeSkewM= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"authentication.allowedTimeSkewMs");
+		if(null!=strallowedTimeSkewM)allowedtimeSkewMs= Long.parseLong(strallowedTimeSkewM);
+				
+	//	boolean requireSecureChannel = true;
+		//String strrequireSecureChannel= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"aauthentication.requireSecureChannel");
+		//if(strrequireSecureChannel!=null)requireSecureChannel=Boolean.parseBoolean(strrequireSecureChannel);
+		//this.nsaSecurityManager = new NsaAuthenticatorService<NsaSimpleApiKey>(this.fApiKeyDb, settings.getLong("authentication.allowedTimeSkewMs", 600000L), settings.getBoolean("authentication.requireSecureChannel", true));
+		//this.nsaSecurityManager = new NsaAuthenticatorService<NsaSimpleApiKey>(this.fApiKeyDb, allowedtimeSkewMs, requireSecureChannel);
+		
+		servletSetup();
+	}
+
+	protected void servletSetup()
+			throws rrNvReadable.missingReqdSetting, rrNvReadable.invalidSettingValue, ServletException, ConfigDbException {
+		try {
+
+			fMetrics.toJson();
+			fMetrics.setupCambriaSender();
+
+			// add the admin authenticator
+				//		final String adminSecret = settings.getString ( CambriaConstants.kSetting_AdminSecret, null );
+						final String adminSecret = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kSetting_AdminSecret);
+						//adminSecret = "fe3cCompound";
+						if ( adminSecret != null && adminSecret.length () > 0 )
+						{
+							try
+							{
+								
+								final NsaApiDb<NsaSimpleApiKey> adminDb = new BaseNsaApiDbImpl<NsaSimpleApiKey> ( new MemConfigDb(), new NsaSimpleApiKeyFactory() );
+								adminDb.createApiKey ( "admin", adminSecret );
+								//nsaSecurityManager.addAuthenticator ( new OriginalUebAuthenticator<NsaSimpleApiKey> ( adminDb, 10*60*1000 ) );
+						        fSecurityManager.addAuthenticator ( new DMaaPOriginalUebAuthenticator<NsaSimpleApiKey> ( adminDb, 10*60*1000 ) );
+							}
+							catch ( KeyExistsException e )
+							{
+								throw new RuntimeException ( "This key can't exist in a fresh in-memory DB!", e );
+							}
+						}
+						
+			// setup a backend
+			//final String type = settings.getString(CambriaConstants.kBrokerType, CambriaConstants.kBrokerType_Kafka);
+			 String type = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kBrokerType);
+			if (type==null) type = CambriaConstants.kBrokerType_Kafka;
+			if (CambriaConstants.kBrokerType_Kafka.equalsIgnoreCase(type)) {
+				log.info("Broker Type is:" + CambriaConstants.kBrokerType_Kafka);
+
+			} else if (CambriaConstants.kBrokerType_Memory.equalsIgnoreCase(type)) {
+				log.info("Broker Type is:" + CambriaConstants.kBrokerType_Memory);
+
+				fPublisher = new MemoryQueuePublisher(q, mmb);
+				fMetaBroker = mmb;
+				fConsumerFactory = new MemoryConsumerFactory(q);
+			} else {
+				throw new IllegalArgumentException(
+						"Unrecognized type for " + CambriaConstants.kBrokerType + ": " + type + ".");
+			}
+			
+			fIpBlackList = new Blacklist ( getfConfigDb(), getfConfigDb().parse ( "/ipBlacklist" ) );
+			this.fEmailer = new Emailer();
+			
+			log.info("Broker Type is:" + type);
+
+		} catch (SecurityException e) {
+			throw new ServletException(e);
+		}
+	}
+
+	/**
+	 * method returns metaBroker
+	 * 
+	 * @return
+	 */
+	public Broker getfMetaBroker() {
+		return fMetaBroker;
+	}
+
+	/**
+	 * method to set the metaBroker
+	 * 
+	 * @param fMetaBroker
+	 */
+	public void setfMetaBroker(Broker fMetaBroker) {
+		this.fMetaBroker = fMetaBroker;
+	}
+
+	/**
+	 * method to get ConsumerFactory Object
+	 * 
+	 * @return
+	 */
+	public ConsumerFactory getfConsumerFactory() {
+		return fConsumerFactory;
+	}
+
+	/**
+	 * method to set the consumerfactory object
+	 * 
+	 * @param fConsumerFactory
+	 */
+	public void setfConsumerFactory(ConsumerFactory fConsumerFactory) {
+		this.fConsumerFactory = fConsumerFactory;
+	}
+
+	/**
+	 * method to get Publisher object
+	 * 
+	 * @return
+	 */
+	public Publisher getfPublisher() {
+		return fPublisher;
+	}
+
+	/**
+	 * method to set Publisher object
+	 * 
+	 * @param fPublisher
+	 */
+	public void setfPublisher(Publisher fPublisher) {
+		this.fPublisher = fPublisher;
+	}
+
+	/**
+	 * method to get MetricsSet Object
+	 * 
+	 * @return
+	 */
+	public MetricsSet getfMetrics() {
+		return fMetrics;
+	}
+
+	/**
+	 * method to set MetricsSet Object
+	 * 
+	 * @param fMetrics
+	 */
+	public void setfMetrics(MetricsSet fMetrics) {
+		this.fMetrics = fMetrics;
+	}
+
+	/**
+	 * method to get DMaaPCambriaLimiter object
+	 * 
+	 * @return
+	 */
+	public DMaaPCambriaLimiter getfRateLimiter() {
+		return fRateLimiter;
+	}
+
+	/**
+	 * method to set DMaaPCambriaLimiter object
+	 * 
+	 * @param fRateLimiter
+	 */
+	public void setfRateLimiter(DMaaPCambriaLimiter fRateLimiter) {
+		this.fRateLimiter = fRateLimiter;
+	}
+
+	/**
+	 * Method to get DMaaPAuthenticator object
+	 * 
+	 * @return
+	 */
+	public DMaaPAuthenticator<NsaSimpleApiKey> getfSecurityManager() {
+		return fSecurityManager;
+	}
+
+	/**
+	 * method to set DMaaPAuthenticator object
+	 * 
+	 * @param fSecurityManager
+	 */
+	public void setfSecurityManager(DMaaPAuthenticator<NsaSimpleApiKey> fSecurityManager) {
+		this.fSecurityManager = fSecurityManager;
+	}
+
+	/**
+	 * method to get rrNvReadable object
+	 * 
+	 * @return
+	 */
+	/*public rrNvReadable getSettings() {
+		return settings;
+	}*/
+
+	/**
+	 * method to set rrNvReadable object
+	 * 
+	 * @param settings
+	 */
+	/*public void setSettings(rrNvReadable settings) {
+		this.settings = settings;
+	}*/
+
+	/**
+	 * method to get CuratorFramework object
+	 * 
+	 * @return
+	 */
+	public static CuratorFramework getCurator() {
+		return curator;
+	}
+
+	/**
+	 * method to set CuratorFramework object
+	 * 
+	 * @param curator
+	 */
+	public static void setCurator(CuratorFramework curator) {
+		ConfigurationReader.curator = curator;
+	}
+
+	/**
+	 * method to get ZkClient object
+	 * 
+	 * @return
+	 */
+	public ZkClient getZk() {
+		return zk;
+	}
+
+	/**
+	 * method to set ZkClient object
+	 * 
+	 * @param zk
+	 */
+	public void setZk(ZkClient zk) {
+		this.zk = zk;
+	}
+
+	/**
+	 * method to get DMaaPZkConfigDb object
+	 * 
+	 * @return
+	 */
+	public DMaaPZkConfigDb getfConfigDb() {
+		return fConfigDb;
+	}
+
+	/**
+	 * method to set DMaaPZkConfigDb object
+	 * 
+	 * @param fConfigDb
+	 */
+	public void setfConfigDb(DMaaPZkConfigDb fConfigDb) {
+		this.fConfigDb = fConfigDb;
+	}
+
+	/**
+	 * method to get MemoryQueue object
+	 * 
+	 * @return
+	 */
+	public MemoryQueue getQ() {
+		return q;
+	}
+
+	/**
+	 * method to set MemoryQueue object
+	 * 
+	 * @param q
+	 */
+	public void setQ(MemoryQueue q) {
+		this.q = q;
+	}
+
+	/**
+	 * method to get MemoryMetaBroker object
+	 * 
+	 * @return
+	 */
+	public MemoryMetaBroker getMmb() {
+		return mmb;
+	}
+
+	/**
+	 * method to set MemoryMetaBroker object
+	 * 
+	 * @param mmb
+	 */
+	public void setMmb(MemoryMetaBroker mmb) {
+		this.mmb = mmb;
+	}
+
+	/**
+	 * method to get NsaApiDb object
+	 * 
+	 * @return
+	 */
+	public NsaApiDb<NsaSimpleApiKey> getfApiKeyDb() {
+		return fApiKeyDb;
+	}
+
+	/**
+	 * method to set NsaApiDb object
+	 * 
+	 * @param fApiKeyDb
+	 */
+	public void setfApiKeyDb(NsaApiDb<NsaSimpleApiKey> fApiKeyDb) {
+		this.fApiKeyDb = fApiKeyDb;
+	}
+
+	/*
+	 * public DMaaPTransactionObjDB<DMaaPTransactionObj> getfTranDb() { return
+	 * fTranDb; }
+	 * 
+	 * public void setfTranDb(DMaaPTransactionObjDB<DMaaPTransactionObj>
+	 * fTranDb) { this.fTranDb = fTranDb; }
+	 */
+	/**
+	 * method to get the zookeeper connection String
+	 * 
+	 * @param settings
+	 * @return
+	 */
+	public static String getMainZookeeperConnectionString() {
+		//return settings.getString(CambriaConstants.kSetting_ZkConfigDbServers,			CambriaConstants.kDefault_ZkConfigDbServers);
+		
+		 String typeVal = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kSetting_ZkConfigDbServers);
+		 if (typeVal==null) typeVal=CambriaConstants.kDefault_ZkConfigDbServers;
+		 
+		 return typeVal;
+	}
+
+	public static String getMainZookeeperConnectionSRoot(){
+		String strVal=com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kSetting_ZkConfigDbRoot);
+	
+		if (null==strVal)
+			strVal=CambriaConstants.kDefault_ZkConfigDbRoot;
+	
+		return strVal;
+	}
+	
+	public Blacklist getfIpBlackList() {
+		return fIpBlackList;
+	}
+
+	public void setfIpBlackList(Blacklist fIpBlackList) {
+		this.fIpBlackList = fIpBlackList;
+	}
+
+	public NsaAuthenticatorService<NsaSimpleApiKey> getNsaSecurityManager() {
+		return nsaSecurityManager;
+	}
+
+	public void setNsaSecurityManager(NsaAuthenticatorService<NsaSimpleApiKey> nsaSecurityManager) {
+		this.nsaSecurityManager = nsaSecurityManager;
+	}
+	
+	public Emailer getSystemEmailer()
+	  {
+	    return this.fEmailer;
+	  }
+
+
+}
diff --git a/src/main/java/com/att/nsa/cambria/utils/DMaaPCuratorFactory.java b/src/main/java/com/att/nsa/cambria/utils/DMaaPCuratorFactory.java
new file mode 100644
index 0000000..4c44c23
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/utils/DMaaPCuratorFactory.java
@@ -0,0 +1,68 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.utils;
+
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.CuratorFrameworkFactory;
+import org.apache.curator.retry.ExponentialBackoffRetry;
+import com.att.nsa.cambria.constants.CambriaConstants;
+import com.att.nsa.drumlin.till.nv.rrNvReadable;
+
+/**
+ * 
+ * 
+ * @author author
+ *
+ *
+ */
+public class DMaaPCuratorFactory {
+	/**
+	 * 
+	 * method provide CuratorFramework object
+	 * 
+	 * @param settings
+	 * @return
+	 * 
+	 * 
+	 * 
+	 */
+	public static CuratorFramework getCurator(rrNvReadable settings) {
+		String Setting_ZkConfigDbServers =com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, CambriaConstants.kSetting_ZkConfigDbServers);
+		 
+		if(null==Setting_ZkConfigDbServers)
+			 Setting_ZkConfigDbServers =CambriaConstants.kDefault_ZkConfigDbServers; 
+		
+		String strSetting_ZkSessionTimeoutMs = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, CambriaConstants.kSetting_ZkSessionTimeoutMs);
+		if (strSetting_ZkSessionTimeoutMs==null) strSetting_ZkSessionTimeoutMs = CambriaConstants.kDefault_ZkSessionTimeoutMs+"";
+		int Setting_ZkSessionTimeoutMs = Integer.parseInt(strSetting_ZkSessionTimeoutMs);
+		
+		String str_ZkConnectionTimeoutMs = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, CambriaConstants.kSetting_ZkSessionTimeoutMs);
+		if (str_ZkConnectionTimeoutMs==null) str_ZkConnectionTimeoutMs = CambriaConstants.kDefault_ZkConnectionTimeoutMs+"";
+		int setting_ZkConnectionTimeoutMs = Integer.parseInt(str_ZkConnectionTimeoutMs);
+		
+		
+		CuratorFramework curator = CuratorFrameworkFactory.newClient(
+				Setting_ZkConfigDbServers,Setting_ZkSessionTimeoutMs,setting_ZkConnectionTimeoutMs
+				,new ExponentialBackoffRetry(1000, 5));
+		return curator;
+	}
+}
diff --git a/src/main/java/com/att/nsa/cambria/utils/DMaaPResponseBuilder.java b/src/main/java/com/att/nsa/cambria/utils/DMaaPResponseBuilder.java
new file mode 100644
index 0000000..c99e08c
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/utils/DMaaPResponseBuilder.java
@@ -0,0 +1,359 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.utils;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.PrintWriter;
+import java.io.Writer;
+
+import javax.servlet.http.HttpServletResponse;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.json.JSONException;
+import org.json.JSONObject;
+
+import com.att.nsa.cambria.beans.DMaaPContext;
+
+/**
+ * class is used to create response object which is given to user
+ * 
+ * @author author
+ *
+ */
+
+public class DMaaPResponseBuilder {
+
+	//private static Logger log = Logger.getLogger(DMaaPResponseBuilder.class);
+	private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPResponseBuilder.class);
+	protected static final int kBufferLength = 4096;
+
+	public static void setNoCacheHeadings(DMaaPContext ctx) {
+		HttpServletResponse response = ctx.getResponse();
+		response.addHeader("Cache-Control", "no-store, no-cache, must-revalidate");
+		response.addHeader("Pragma", "no-cache");
+		response.addHeader("Expires", "0");
+	}
+
+	/**
+	 * static method is used to create response object associated with
+	 * JSONObject
+	 * 
+	 * @param ctx
+	 * @param result
+	 * @throws JSONException
+	 * @throws IOException
+	 */
+	public static void respondOk(DMaaPContext ctx, JSONObject result) throws JSONException, IOException {
+
+		respondOkWithStream(ctx, "application/json", new ByteArrayInputStream(result.toString(4).getBytes()));
+
+	}
+
+	/**
+	 * method used to set staus to 204
+	 * 
+	 * @param ctx
+	 */
+	public static void respondOkNoContent(DMaaPContext ctx) {
+		try {
+			ctx.getResponse().setStatus(204);
+		} catch (Exception excp) {
+			log.error(excp.getMessage(), excp);
+		}
+	}
+
+	/**
+	 * static method is used to create response object associated with html
+	 * 
+	 * @param ctx
+	 * @param html
+	 */
+	public static void respondOkWithHtml(DMaaPContext ctx, String html) {
+		try {
+			respondOkWithStream(ctx, "text/html", new ByteArrayInputStream(html.toString().getBytes()));
+		} catch (Exception excp) {
+			log.error(excp.getMessage(), excp);
+		}
+	}
+
+	/**
+	 * method used to create response object associated with InputStream
+	 * 
+	 * @param ctx
+	 * @param mediaType
+	 * @param is
+	 * @throws IOException
+	 */
+	public static void respondOkWithStream(DMaaPContext ctx, String mediaType, final InputStream is)
+			throws IOException {
+		/*
+		 * creates response object associated with streamwriter
+		 */
+		respondOkWithStream(ctx, mediaType, new StreamWriter() {
+
+			public void write(OutputStream os) throws IOException {
+				copyStream(is, os);
+			}
+		});
+
+	}
+
+	/**
+	 * 
+	 * @param ctx
+	 * @param mediaType
+	 * @param writer
+	 * @throws IOException
+	 */
+	public static void respondOkWithStream(DMaaPContext ctx, String mediaType, StreamWriter writer) throws IOException {
+
+		ctx.getResponse().setStatus(200);
+		OutputStream os = getStreamForBinaryResponse(ctx, mediaType);
+		writer.write(os);
+
+	}
+
+	/**
+	 * static method to create error objects
+	 * 
+	 * @param ctx
+	 * @param errCode
+	 * @param msg
+	 */
+	public static void respondWithError(DMaaPContext ctx, int errCode, String msg) {
+		try {
+			ctx.getResponse().sendError(errCode, msg);
+		} catch (IOException excp) {
+			log.error(excp.getMessage(), excp);
+		}
+	}
+
+	/**
+	 * method to create error objects
+	 * 
+	 * @param ctx
+	 * @param errCode
+	 * @param body
+	 */
+	public static void respondWithError(DMaaPContext ctx, int errCode, JSONObject body) {
+		try {
+			sendErrorAndBody(ctx, errCode, body.toString(4), "application/json");
+		} catch (Exception excp) {
+			log.error(excp.getMessage(), excp);
+		}
+	}
+
+	/**
+	 * static method creates error object in JSON
+	 * 
+	 * @param ctx
+	 * @param errCode
+	 * @param msg
+	 */
+	public static void respondWithErrorInJson(DMaaPContext ctx, int errCode, String msg) {
+		try {
+			JSONObject o = new JSONObject();
+			o.put("status", errCode);
+			o.put("message", msg);
+			respondWithError(ctx, errCode, o);
+
+		} catch (Exception excp) {
+			log.error(excp.getMessage(), excp);
+		}
+	}
+
+	/**
+	 * static method used to copy the stream with the help of another method
+	 * copystream
+	 * 
+	 * @param in
+	 * @param out
+	 * @throws IOException
+	 */
+	public static void copyStream(InputStream in, OutputStream out) throws IOException {
+		copyStream(in, out, 4096);
+	}
+
+	/**
+	 * static method to copy the streams
+	 * 
+	 * @param in
+	 * @param out
+	 * @param bufferSize
+	 * @throws IOException
+	 */
+	public static void copyStream(InputStream in, OutputStream out, int bufferSize) throws IOException {
+		byte[] buffer = new byte[bufferSize];
+		int len;
+		while ((len = in.read(buffer)) != -1) {
+			out.write(buffer, 0, len);
+		}
+		out.close();
+	}
+
+	/**
+	 * interface used to define write method for outputStream
+	 */
+	public static abstract interface StreamWriter {
+		/**
+		 * abstract method used to write the response
+		 * 
+		 * @param paramOutputStream
+		 * @throws IOException
+		 */
+		public abstract void write(OutputStream paramOutputStream) throws IOException;
+	}
+
+	/**
+	 * static method returns stream for binary response
+	 * 
+	 * @param ctx
+	 * @return
+	 * @throws IOException
+	 */
+	public static OutputStream getStreamForBinaryResponse(DMaaPContext ctx) throws IOException {
+		return getStreamForBinaryResponse(ctx, "application/octet-stream");
+	}
+
+	/**
+	 * static method returns stream for binaryResponses
+	 * 
+	 * @param ctx
+	 * @param contentType
+	 * @return
+	 * @throws IOException
+	 */
+	public static OutputStream getStreamForBinaryResponse(DMaaPContext ctx, String contentType) throws IOException {
+		ctx.getResponse().setContentType(contentType);
+
+		boolean fResponseEntityAllowed = (!(ctx.getRequest().getMethod().equalsIgnoreCase("HEAD")));
+
+		OutputStream os = null;
+		if (fResponseEntityAllowed) {
+			os = ctx.getResponse().getOutputStream();
+		} else {
+			os = new NullStream();
+		}
+		return os;
+	}
+
+	/**
+	 * 
+	 * @author author
+	 *
+	 */
+	private static class NullStream extends OutputStream {
+		/**
+		 * @param b
+		 *            integer
+		 */
+		public void write(int b) {
+		}
+	}
+
+	private static class NullWriter extends Writer {
+		/**
+		 * write method
+		 * @param cbuf
+		 * @param off
+		 * @param len
+		 */
+		public void write(char[] cbuf, int off, int len) {
+		}
+
+		/**
+		 * flush method
+		 */
+		public void flush() {
+		}
+
+		/**
+		 * close method
+		 */
+		public void close() {
+		}
+	}
+
+	/**
+	 * sttaic method fetch stream for text
+	 * 
+	 * @param ctx
+	 * @param err
+	 * @param content
+	 * @param mimeType
+	 */
+	public static void sendErrorAndBody(DMaaPContext ctx, int err, String content, String mimeType) {
+		try {
+			setStatus(ctx, err);
+			getStreamForTextResponse(ctx, mimeType).println(content);
+		} catch (IOException e) {
+			log.error(new StringBuilder().append("Error sending error response: ").append(e.getMessage()).toString(),
+					e);
+		}
+	}
+
+	/**
+	 * method to set the code
+	 * 
+	 * @param ctx
+	 * @param code
+	 */
+	public static void setStatus(DMaaPContext ctx, int code) {
+		ctx.getResponse().setStatus(code);
+	}
+
+	/**
+	 * static method returns stream for text response
+	 * 
+	 * @param ctx
+	 * @return
+	 * @throws IOException
+	 */
+	public static PrintWriter getStreamForTextResponse(DMaaPContext ctx) throws IOException {
+		return getStreamForTextResponse(ctx, "text/html");
+	}
+
+	/**
+	 * static method returns stream for text response
+	 * 
+	 * @param ctx
+	 * @param contentType
+	 * @return
+	 * @throws IOException
+	 */
+	public static PrintWriter getStreamForTextResponse(DMaaPContext ctx, String contentType) throws IOException {
+		ctx.getResponse().setContentType(contentType);
+
+		PrintWriter pw = null;
+		boolean fResponseEntityAllowed = (!(ctx.getRequest().getMethod().equalsIgnoreCase("HEAD")));
+
+		if (fResponseEntityAllowed) {
+			pw = ctx.getResponse().getWriter();
+		} else {
+			pw = new PrintWriter(new NullWriter());
+		}
+		return pw;
+	}
+}
\ No newline at end of file
diff --git a/src/main/java/com/att/nsa/cambria/utils/Emailer.java b/src/main/java/com/att/nsa/cambria/utils/Emailer.java
new file mode 100644
index 0000000..a71370a
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/utils/Emailer.java
@@ -0,0 +1,214 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.utils;
+
+import java.io.IOException;
+import java.util.Properties;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+
+import javax.mail.BodyPart;
+import javax.mail.Message;
+import javax.mail.Multipart;
+import javax.mail.PasswordAuthentication;
+import javax.mail.Session;
+import javax.mail.Transport;
+import javax.mail.internet.InternetAddress;
+import javax.mail.internet.MimeBodyPart;
+import javax.mail.internet.MimeMessage;
+import javax.mail.internet.MimeMultipart;
+
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.att.nsa.cambria.constants.CambriaConstants;
+import com.att.nsa.drumlin.till.nv.rrNvReadable;
+
+/**
+ * Send an email from a message.
+ * 
+ * @author author
+ */
+public class Emailer
+{
+	public static final String kField_To = "to";
+	public static final String kField_Subject = "subject";
+	public static final String kField_Message = "message";
+
+	public Emailer()
+	{
+		fExec = Executors.newCachedThreadPool ();
+	//	fSettings = settings;
+	}
+
+	public void send ( String to, String subj, String body ) throws IOException
+	{
+		final String[] addrs = to.split ( "," );
+
+		if ( to.length () > 0 )
+		{
+			final MailTask mt = new MailTask ( addrs, subj, body );
+			fExec.submit ( mt );
+		}
+		else
+		{
+			log.warn ( "At least one address is required." );
+		}
+	}
+
+	public void close ()
+	{
+		fExec.shutdown ();
+	}
+
+	private final ExecutorService fExec;
+	//private final rrNvReadable fSettings;
+
+	//private static final Logger log = LoggerFactory.getLogger ( Emailer.class );
+
+	private static final EELFLogger log = EELFManager.getInstance().getLogger(Emailer.class);
+	
+	public static final String kSetting_MailAuthUser = "mailLogin";
+	public static final String kSetting_MailAuthPwd = "mailPassword";
+	public static final String kSetting_MailFromEmail = "mailFromEmail";
+	public static final String kSetting_MailFromName = "mailFromName";
+	public static final String kSetting_SmtpServer = "mailSmtpServer";
+	public static final String kSetting_SmtpServerPort = "mailSmtpServerPort";
+	public static final String kSetting_SmtpServerSsl = "mailSmtpServerSsl";
+	public static final String kSetting_SmtpServerUseAuth = "mailSmtpServerUseAuth";
+
+	private class MailTask implements Runnable
+	{
+		public MailTask ( String[] to, String subject, String msgBody )
+		{
+			fToAddrs = to;
+			fSubject = subject;
+			fBody = msgBody;
+		}
+
+		private String getSetting ( String settingKey, String defval )
+		{
+			//return fSettings.getString ( settingKey, defval );
+			String strSet = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,settingKey);
+			if(strSet==null)strSet=defval;
+			return strSet;
+		}
+
+		// we need to get setting values from the evaluator but also the channel config
+		private void makeSetting ( Properties props, String propKey, String settingKey, String defval )
+		{
+			props.put ( propKey, getSetting ( settingKey, defval ) );
+		}
+
+		private void makeSetting ( Properties props, String propKey, String settingKey, int defval )
+		{
+			makeSetting ( props, propKey, settingKey, "" + defval );
+		}
+
+		private void makeSetting ( Properties props, String propKey, String settingKey, boolean defval )
+		{
+			makeSetting ( props, propKey, settingKey, "" + defval );
+		}
+
+		@Override
+		public void run ()
+		{
+			final StringBuffer tag = new StringBuffer ();
+			final StringBuffer addrList = new StringBuffer ();
+			tag.append ( "(" );
+			for ( String to : fToAddrs )
+			{
+				if ( addrList.length () > 0 )
+				{
+					addrList.append ( ", " );
+				}
+				addrList.append ( to );
+			}
+			tag.append ( addrList.toString () );
+			tag.append ( ") \"" );
+			tag.append ( fSubject );
+			tag.append ( "\"" );
+			
+			log.info ( "sending mail to " + tag );
+
+			try
+			{
+				final Properties prop = new Properties ();
+				makeSetting ( prop, "mail.smtp.port", kSetting_SmtpServerPort, 587 );
+				prop.put ( "mail.smtp.socketFactory.fallback", "false" );
+				prop.put ( "mail.smtp.quitwait", "false" );
+				makeSetting ( prop, "mail.smtp.host", kSetting_SmtpServer, "smtp.it.att.com" );
+				makeSetting ( prop, "mail.smtp.auth", kSetting_SmtpServerUseAuth, true );
+				makeSetting ( prop, "mail.smtp.starttls.enable", kSetting_SmtpServerSsl, true );
+
+				final String un = getSetting ( kSetting_MailAuthUser, "" );
+				final String pw = getSetting ( kSetting_MailAuthPwd, "" );
+				final Session session = Session.getInstance ( prop,
+					new javax.mail.Authenticator()
+					{
+						@Override
+						protected PasswordAuthentication getPasswordAuthentication()
+						{
+							return new PasswordAuthentication ( un, pw );
+						}
+					}
+				);
+				
+				final Message msg = new MimeMessage ( session );
+
+				final InternetAddress from = new InternetAddress (
+					getSetting ( kSetting_MailFromEmail, "team@sa2020.it.att.com" ),
+					getSetting ( kSetting_MailFromName, "The GFP/SA2020 Team" ) );
+				msg.setFrom ( from );
+				msg.setReplyTo ( new InternetAddress[] { from } );
+				msg.setSubject ( fSubject );
+
+				for ( String toAddr : fToAddrs )
+				{
+					final InternetAddress to = new InternetAddress ( toAddr );
+					msg.addRecipient ( Message.RecipientType.TO, to );
+				}
+
+				final Multipart multipart = new MimeMultipart ( "related" );
+				final BodyPart htmlPart = new MimeBodyPart ();
+				htmlPart.setContent ( fBody, "text/plain" );
+				multipart.addBodyPart ( htmlPart );
+				msg.setContent ( multipart );
+
+				Transport.send ( msg );
+
+				log.info ( "mailing " + tag + " off without error" );
+			}
+			catch ( Exception e )
+			{
+				log.warn ( "Exception caught for " + tag, e );
+			}
+		}
+
+		private final String[] fToAddrs;
+		private final String fSubject;
+		private final String fBody;
+	}
+}
diff --git a/src/main/java/com/att/nsa/cambria/utils/PropertyReader.java b/src/main/java/com/att/nsa/cambria/utils/PropertyReader.java
new file mode 100644
index 0000000..bda1c14
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/utils/PropertyReader.java
@@ -0,0 +1,133 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.utils;
+
+import java.io.File;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.util.HashMap;
+import java.util.Map;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+import com.att.nsa.cambria.constants.CambriaConstants;
+import com.att.nsa.drumlin.till.nv.impl.nvPropertiesFile;
+import com.att.nsa.drumlin.till.nv.impl.nvReadableStack;
+import com.att.nsa.drumlin.till.nv.impl.nvReadableTable;
+
+/**
+ * 
+ * @author 
+ *
+ *
+ */
+public class PropertyReader extends nvReadableStack {
+	/**
+	 * 
+	 * initializing logger
+	 * 
+	 */
+	//private static final Logger LOGGER = Logger.getLogger(PropertyReader.class);
+	private static final EELFLogger log = EELFManager.getInstance().getLogger(PropertyReader.class);
+//	private static final String MSGRTR_PROPERTIES_FILE = "msgRtrApi.properties";
+
+	/**
+	 * constructor initialization
+	 * 
+	 * @throws loadException
+	 * 
+	 */
+	public PropertyReader() throws loadException {
+	/*	Map<String, String> argMap = new HashMap<String, String>();
+		final String config = getSetting(argMap, CambriaConstants.kConfig, MSGRTR_PROPERTIES_FILE);
+		final URL settingStream = findStream(config, ConfigurationReader.class);
+		push(new nvPropertiesFile(settingStream));
+		push(new nvReadableTable(argMap));*/
+	}
+
+	/**
+	 * 
+	 * 
+	 * @param argMap
+	 * @param key
+	 * @param defaultValue
+	 * @return
+	 * 
+	 */
+	@SuppressWarnings("unused")
+	private static String getSetting(Map<String, String> argMap, final String key, final String defaultValue) {
+		String val = (String) argMap.get(key);
+		if (null == val) {
+			return defaultValue;
+		}
+		return val;
+	}
+
+	/**
+	 * 
+	 * @param resourceName
+	 * @param clazz
+	 * @return
+	 * @exception MalformedURLException
+	 * 
+	 */
+	/*public static URL findStream(final String resourceName, Class<?> clazz) {
+		try {
+			File file = new File(resourceName);
+
+			if (file.isAbsolute()) {
+				return file.toURI().toURL();
+			}
+
+			String filesRoot = System.getProperty("RRWT_FILES", null);
+
+			if (null != filesRoot) {
+
+				String fullPath = filesRoot + "/" + resourceName;
+
+				LOGGER.debug("Looking for [" + fullPath + "].");
+
+				file = new File(fullPath);
+				if (file.exists()) {
+					return file.toURI().toURL();
+				}
+			}
+
+			URL res = clazz.getClassLoader().getResource(resourceName);
+
+			if (null != res) {
+				return res;
+			}
+
+			res = ClassLoader.getSystemResource(resourceName);
+
+			if (null != res) {
+				return res;
+			}
+		} catch (MalformedURLException e) {
+			LOGGER.error("Unexpected failure to convert a local filename into a URL: " + e.getMessage(), e);
+		}
+		return null;
+	}
+*/
+}
diff --git a/src/main/java/com/att/nsa/cambria/utils/Utils.java b/src/main/java/com/att/nsa/cambria/utils/Utils.java
new file mode 100644
index 0000000..6538576
--- /dev/null
+++ b/src/main/java/com/att/nsa/cambria/utils/Utils.java
@@ -0,0 +1,145 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.cambria.utils;
+
+import java.text.DecimalFormat;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.Enumeration;
+import java.util.LinkedList;
+import java.util.List;
+
+import javax.servlet.http.HttpServletRequest;
+
+import com.att.nsa.cambria.beans.DMaaPContext;
+/**
+ * This is an utility class for various operations for formatting
+ * @author author
+ *
+ */
+public class Utils {
+
+	private static final String DATE_FORMAT = "dd-MM-yyyy::hh:mm:ss:SSS";
+	public static final String CAMBRIA_AUTH_HEADER = "X-CambriaAuth";
+	private static final String BATCH_ID_FORMAT = "000000";
+
+	private Utils() {
+		super();
+	}
+
+	/**
+	 * Formatting the date 
+	 * @param date
+	 * @return date or null
+	 */
+	public static String getFormattedDate(Date date) {
+		SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT);
+		if (null != date){
+			return sdf.format(date);
+		}
+		return null;
+	}
+	/**
+	 * to get the details of User Api Key
+	 * @param request
+	 * @return authkey or null
+	 */
+	public static String getUserApiKey(HttpServletRequest request) {
+		final String auth = request.getHeader(CAMBRIA_AUTH_HEADER);
+		if (null != auth) {
+			final String[] splittedAuthKey = auth.split(":");
+			return splittedAuthKey[0];
+		}else if (null!=request.getHeader("Authorization")){
+			/**
+			 * AAF implementation enhancement
+			 */
+			 String user= request.getUserPrincipal().getName().toString();
+			return user.substring(0, user.lastIndexOf("@"));
+		}
+		return null;
+	}
+	/**
+	 * to format the batch sequence id
+	 * @param batchId
+	 * @return batchId
+	 */
+	public static String getFromattedBatchSequenceId(Long batchId) {
+		DecimalFormat format = new DecimalFormat(BATCH_ID_FORMAT);
+		return format.format(batchId);
+	}
+
+	/**
+	 * to get the message length in bytes
+	 * @param message
+	 * @return bytes or 0
+	 */
+	public static long messageLengthInBytes(String message) {
+		if (null != message) {
+			return message.getBytes().length;
+		}
+		return 0;
+	}
+	/**
+	 * To get transaction id details
+	 * @param transactionId
+	 * @return transactionId or null
+	 */
+	public static String getResponseTransactionId(String transactionId) {
+		if (null != transactionId && !transactionId.isEmpty()) {
+			return transactionId.substring(0, transactionId.lastIndexOf("::"));
+		}
+		return null;
+	}
+
+	/**
+	 * get the thread sleep time
+	 * @param ratePerMinute
+	 * @return ratePerMinute or 0
+	 */
+	public static long getSleepMsForRate ( double ratePerMinute )
+	{
+		if ( ratePerMinute <= 0.0 ) return 0;
+		return Math.max ( 1000, Math.round ( 60 * 1000 / ratePerMinute ) );
+	}
+
+	  public static String getRemoteAddress(DMaaPContext ctx)
+	  {
+	    String reqAddr = ctx.getRequest().getRemoteAddr();
+	    String fwdHeader = getFirstHeader("X-Forwarded-For",ctx);
+	    return ((fwdHeader != null) ? fwdHeader : reqAddr);
+	  }
+	  public static String getFirstHeader(String h,DMaaPContext ctx)
+	  {
+	    List l = getHeader(h,ctx);
+	    return ((l.size() > 0) ? (String)l.iterator().next() : null);
+	  }
+	  public static List<String> getHeader(String h,DMaaPContext ctx)
+	  {
+	    LinkedList list = new LinkedList();
+	    Enumeration e = ctx.getRequest().getHeaders(h);
+	    while (e.hasMoreElements())
+	    {
+	      list.add(e.nextElement().toString());
+	    }
+	    return list;
+	  }
+}
diff --git a/src/main/java/com/att/nsa/filter/ContentLengthFilter.java b/src/main/java/com/att/nsa/filter/ContentLengthFilter.java
new file mode 100644
index 0000000..d70da66
--- /dev/null
+++ b/src/main/java/com/att/nsa/filter/ContentLengthFilter.java
@@ -0,0 +1,134 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.filter;
+
+import java.io.IOException;
+
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
+
+import org.apache.http.HttpStatus;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.json.JSONObject;
+import org.springframework.context.ApplicationContext;
+import org.springframework.web.context.support.WebApplicationContextUtils;
+
+import com.att.nsa.cambria.CambriaApiException;
+import com.att.nsa.cambria.exception.DMaaPErrorMessages;
+import com.att.nsa.cambria.exception.DMaaPResponseCode;
+import com.att.nsa.cambria.exception.ErrorResponse;
+
+/**
+ * Servlet Filter implementation class ContentLengthFilter
+ */
+public class ContentLengthFilter implements Filter {
+
+	private DefaultLength defaultLength;
+
+	private FilterConfig filterConfig = null;
+	DMaaPErrorMessages errorMessages = null;
+	//private Logger log = Logger.getLogger(ContentLengthFilter.class.toString());
+	private static final EELFLogger log = EELFManager.getInstance().getLogger(ContentLengthFilter.class);
+	/**
+	 * Default constructor.
+	 */
+
+	public ContentLengthFilter() {
+		// TODO Auto-generated constructor stub
+	}
+
+	/**
+	 * @see Filter#destroy()
+	 */
+	public void destroy() {
+		// TODO Auto-generated method stub
+	}
+
+	/**
+	 * @see Filter#doFilter(ServletRequest, ServletResponse, FilterChain)
+	 */
+	public void doFilter(ServletRequest req, ServletResponse res, FilterChain chain) throws IOException,
+			ServletException {
+		// TODO Auto-generated method stub
+		// place your code here
+		log.info("inside servlet do filter content length checking before pub/sub");
+		HttpServletRequest request = (HttpServletRequest) req;
+		JSONObject jsonObj = null;
+		int requestLength = 0;
+		try {
+			// retrieving content length from message header
+
+			if (null != request.getHeader("Content-Length")) {
+				requestLength = Integer.parseInt(request.getHeader("Content-Length"));
+			}
+			// retrieving encoding from message header
+			String transferEncoding = request.getHeader("Transfer-Encoding");
+			// checking for no encoding, chunked and requestLength greater then
+			// default length
+			if (null != transferEncoding && !(transferEncoding.contains("chunked"))
+					&& (requestLength > Integer.parseInt(defaultLength.getDefaultLength()))) {
+				jsonObj = new JSONObject().append("defaultlength", defaultLength)
+						.append("requestlength", requestLength);
+				log.error("message length is greater than default");
+				throw new CambriaApiException(jsonObj);
+			} else if (null == transferEncoding && (requestLength > Integer.parseInt(defaultLength.getDefaultLength()))) {
+				jsonObj = new JSONObject().append("defaultlength", defaultLength.getDefaultLength()).append(
+						"requestlength", requestLength);
+				log.error("Request message is not chunked or request length is greater than default length");
+				throw new CambriaApiException(jsonObj);
+			} else {
+				chain.doFilter(req, res);
+			}
+		} catch (CambriaApiException | NumberFormatException e) {
+			log.error("message size is greater then default");
+			ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_EXPECTATION_FAILED,
+					DMaaPResponseCode.MSG_SIZE_EXCEEDS_MSG_LIMIT.getResponseCode(), errorMessages.getMsgSizeExceeds()
+							+ jsonObj.toString());
+			log.info(errRes.toString());
+			// throw new CambriaApiException(errRes);
+		}
+
+	}
+
+	/**
+	 * @see Filter#init(FilterConfig)
+	 */
+	public void init(FilterConfig fConfig) throws ServletException {
+		// TODO Auto-generated method stub
+		this.filterConfig = fConfig;
+		log.info("Filter Content Length Initialize");
+		ApplicationContext ctx = WebApplicationContextUtils.getRequiredWebApplicationContext(fConfig
+				.getServletContext());
+		DefaultLength defLength = (DefaultLength) ctx.getBean("defLength");
+		DMaaPErrorMessages errorMessages = (DMaaPErrorMessages) ctx.getBean("DMaaPErrorMessages");
+		this.errorMessages = errorMessages;
+		this.defaultLength = defLength;
+
+	}
+
+}
diff --git a/src/main/java/com/att/nsa/filter/DefaultLength.java b/src/main/java/com/att/nsa/filter/DefaultLength.java
new file mode 100644
index 0000000..c8a6d0a
--- /dev/null
+++ b/src/main/java/com/att/nsa/filter/DefaultLength.java
@@ -0,0 +1,37 @@
+/*******************************************************************************
+ *  ============LICENSE_START=======================================================
+ *  org.onap.dmaap
+ *  ================================================================================
+ *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *  ============LICENSE_END=========================================================
+ *
+ *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *  
+ *******************************************************************************/
+package com.att.nsa.filter;
+
+
+public class DefaultLength {
+	
+	String defaultLength;
+
+	public String getDefaultLength() {
+		return defaultLength;
+	}
+
+	public void setDefaultLength(String defaultLength) {
+		this.defaultLength = defaultLength;
+	}
+
+}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/apiServer/metrics/cambria/DMaaPMetricsSender.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/apiServer/metrics/cambria/DMaaPMetricsSender.java
deleted file mode 100644
index b8e5212..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/apiServer/metrics/cambria/DMaaPMetricsSender.java
+++ /dev/null
@@ -1,198 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.apiServer.metrics.cambria;
-
-import java.io.IOException;
-import java.net.InetAddress;
-import java.net.UnknownHostException;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.concurrent.ScheduledExecutorService;
-import java.util.concurrent.ScheduledFuture;
-import java.util.concurrent.TimeUnit;
-
-import org.json.JSONException;
-import org.json.JSONObject;
-//import org.slf4j.Logger;
-//import org.slf4j.LoggerFactory;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants.CambriaConstants;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metrics.publisher.CambriaPublisher;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metrics.publisher.DMaaPCambriaClientFactory;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import com.att.nsa.apiServer.metrics.cambria.MetricsSender;
-import com.att.nsa.drumlin.till.nv.rrNvReadable;
-import com.att.nsa.metrics.CdmMetricsRegistry;
-import com.att.nsa.metrics.impl.CdmConstant;
-
-/**
- * MetricsSender will send the given metrics registry content as an event on the
- * Cambria event broker to the given topic.
- * 
- * @author author
- *
- */
-public class DMaaPMetricsSender implements Runnable {
-	public static final String kSetting_CambriaEnabled = "metrics.send.cambria.enabled";
-	public static final String kSetting_CambriaBaseUrl = "metrics.send.cambria.baseUrl";
-	public static final String kSetting_CambriaTopic = "metrics.send.cambria.topic";
-	public static final String kSetting_CambriaSendFreqSecs = "metrics.send.cambria.sendEverySeconds";
-
-	/**
-	 * Schedule a periodic send of the given metrics registry using the given
-	 * settings container for the Cambria location, topic, and send frequency.
-	 * <br/>
-	 * <br/>
-	 * If the enabled flag is false, this method returns null.
-	 * 
-	 * @param scheduler
-	 * @param metrics
-	 * @param settings
-	 * @param defaultTopic
-	 * @return a handle to the scheduled task
-	 */
-	public static ScheduledFuture<?> sendPeriodically(ScheduledExecutorService scheduler, CdmMetricsRegistry metrics,
-			 String defaultTopic) {
-		log.info("Inside : DMaaPMetricsSender : sendPeriodically");
-	String	cambriaSetting= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_CambriaEnabled);
-	boolean setEnable=true;
-	if (cambriaSetting!=null && cambriaSetting.equals("false") )
-	setEnable= false;
-	//System.out.println(setEnable+"XXXXXXXXXXXXXXXX");
-		if (setEnable) {
-			String Setting_CambriaBaseUrl=com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_CambriaEnabled);
-			
-			Setting_CambriaBaseUrl=Setting_CambriaBaseUrl==null?"localhost":Setting_CambriaBaseUrl;
-			
-			String Setting_CambriaTopic=com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_CambriaTopic);
-			if(Setting_CambriaTopic==null) Setting_CambriaTopic = "msgrtr.apinode.metrics.dmaap";     
-			
-	//		Setting_CambriaBaseUrl=Setting_CambriaBaseUrl==null?defaultTopic:Setting_CambriaBaseUrl;
-			
-			String Setting_CambriaSendFreqSecs=com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_CambriaSendFreqSecs);
-			
-			int _CambriaSendFreqSecs =30;
-			if(Setting_CambriaSendFreqSecs!=null){
-				 _CambriaSendFreqSecs = Integer.parseInt(Setting_CambriaSendFreqSecs);
-			}
-			
-
-			return DMaaPMetricsSender.sendPeriodically(scheduler, metrics,
-					Setting_CambriaBaseUrl,Setting_CambriaTopic,_CambriaSendFreqSecs
-				);
-			/*return DMaaPMetricsSender.sendPeriodically(scheduler, metrics,
-					settings.getString(kSetting_CambriaBaseUrl, "localhost"),
-					settings.getString(kSetting_CambriaTopic, defaultTopic),
-					settings.getInt(kSetting_CambriaSendFreqSecs, 30));*/
-		} else {
-			return null;
-		}
-	}
-
-	/**
-	 * Schedule a periodic send of the metrics registry to the given Cambria
-	 * broker and topic.
-	 * 
-	 * @param scheduler
-	 * @param metrics
-	 *            the registry to send
-	 * @param cambriaBaseUrl
-	 *            the base URL for Cambria
-	 * @param topic
-	 *            the topic to publish on
-	 * @param everySeconds
-	 *            how frequently to publish
-	 * @return a handle to the scheduled task
-	 */
-	public static ScheduledFuture<?> sendPeriodically(ScheduledExecutorService scheduler, CdmMetricsRegistry metrics,
-			String cambriaBaseUrl, String topic, int everySeconds) {
-		return scheduler.scheduleAtFixedRate(new DMaaPMetricsSender(metrics, cambriaBaseUrl, topic), everySeconds,
-				everySeconds, TimeUnit.SECONDS);
-	}
-
-	/**
-	 * Create a metrics sender.
-	 * 
-	 * @param metrics
-	 * @param cambriaBaseUrl
-	 * @param topic
-	 */
-	public DMaaPMetricsSender(CdmMetricsRegistry metrics, String cambriaBaseUrl, String topic) {
-		try {
-			fMetrics = metrics;
-			fHostname = InetAddress.getLocalHost().getHostName();
-
-			// setup a "simple" publisher that will send metrics immediately
-			fCambria = DMaaPCambriaClientFactory.createSimplePublisher(cambriaBaseUrl, topic);
-		} catch (UnknownHostException e) {
-			log.warn("Unable to get localhost address in MetricsSender constructor.", e);
-			throw new RuntimeException(e);
-		}
-	}
-
-	/**
-	 * Send on demand.
-	 */
-	public void send() {
-		try {
-			final JSONObject o = fMetrics.toJson();
-			o.put("hostname", fHostname);
-			o.put("now", System.currentTimeMillis());
-			o.put("metricsSendTime", addTimeStamp());
-			o.put("transactionEnabled", false);
-			fCambria.send(fHostname, o.toString());
-		} catch (JSONException e) {
-			log.warn("Error posting metrics to Cambria: " + e.getMessage());
-		} catch (IOException e) {
-			log.warn("Error posting metrics to Cambria: " + e.getMessage());
-		}
-	}
-
-	/**
-	 * Run() calls send(). It's meant for use in a background-scheduled task.
-	 */
-	@Override
-	public void run() {
-		send();
-	}
-
-	private final CdmMetricsRegistry fMetrics;
-	private final CambriaPublisher fCambria;
-	private final String fHostname;
-
-	//private static final Logger log = LoggerFactory.getLogger(MetricsSender.class);
-
-	private static final EELFLogger log = EELFManager.getInstance().getLogger(MetricsSender.class);
-	/**
-	 * method creates and returnd CdmConstant object using current timestamp
-	 * 
-	 * @return
-	 */
-	public CdmConstant addTimeStamp() {
-		// Add the timestamp with every metrics send
-		final long metricsSendTime = System.currentTimeMillis();
-		final Date d = new Date(metricsSendTime);
-		final String text = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssz").format(d);
-		return new CdmConstant(metricsSendTime / 1000, "Metrics Send Time (epoch); " + text);
-	}
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/CambriaApiException.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/CambriaApiException.java
deleted file mode 100644
index e627f23..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/CambriaApiException.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria;
-
-import org.json.JSONObject;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception.ErrorResponse;
-
-import com.att.nsa.apiServer.NsaAppException;
-
-public class CambriaApiException extends NsaAppException
-{
-	
-	private ErrorResponse errRes;
-	/**
-	 * Implements constructor CambriaApiException
-	 * @param jsonObject
-	 * 
-	 */
-	public CambriaApiException ( JSONObject jsonObject )
-	{
-		super ( jsonObject );
-	}
-
-	/**
-	 * Implements constructor CambriaApiException
-	 * @param status
-	 * @param msg
-	 */
-	public CambriaApiException ( int status, String msg )
-	{
-		super ( status, msg );
-	}
-
-	/**
-	 * Implements constructor CambriaApiException
-	 * @param status
-	 * @param jsonObject
-	 */
-	public CambriaApiException ( int status, JSONObject jsonObject )
-	{
-		super ( status, jsonObject );
-	}
-	
-	public CambriaApiException (ErrorResponse errRes)
-	{
-		super(errRes.getHttpStatusCode(),errRes.getErrorMessage());
-		this.errRes = errRes;
-	}
-	
-	/*
-	 * defined long type constant serialVersionUID
-	 */
-	private static final long serialVersionUID = 1L;
-	public ErrorResponse getErrRes() {
-		return errRes;
-	}
-
-	public void setErrRes(ErrorResponse errRes) {
-		this.errRes = errRes;
-	}
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/CambriaApiVersionInfo.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/CambriaApiVersionInfo.java
deleted file mode 100644
index ec9e43f..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/CambriaApiVersionInfo.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.Properties;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-/**
- * CambriaApiVersionInfo will provide the version of cambria code
- * 
- * @author author
- *
- */
-public class CambriaApiVersionInfo {
-    
-	/**
-	 * 3 constants are defined:-
-	 * PROPS,VERSION and LOG
-	 */
-	
-	private static final Properties PROPS = new Properties();
-    private static final String VERSION;
-
-
-    private static final EELFLogger LOG = EELFManager.getInstance().getLogger(CambriaApiVersionInfo.class);
-    
-    /**
-     * private constructor created with no argument
-     * to avoid default constructor
-     */
-    private CambriaApiVersionInfo()
-    {
-    	
-    }
-    
-    /**
-     * returns version of String type
-     */
-    public static String getVersion() {
-        return VERSION;
-    }
-
-    /** 
-     * 
-     * defines static initialization method
-     * It initializes VERSION Constant
-     * it handles exception in try catch block 
-     * and throws IOException
-     * 
-     */
-    
-    static {
-        String use = null;
-        try {
-            final InputStream is = CambriaApiVersionInfo.class
-                    .getResourceAsStream("/cambriaApiVersion.properties");
-            if (is != null) {
-            	PROPS.load(is);
-                use = PROPS.getProperty("cambriaApiVersion", null);
-            }
-        } catch (IOException e) {
-            LOG.error("Failed due to IO EXception:"+e);
-        }
-        VERSION = use;
-    }
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/Consumer.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/Consumer.java
deleted file mode 100644
index 8e5aa76..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/Consumer.java
+++ /dev/null
@@ -1,96 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends;
-
-/**
- * A consumer interface. Consumers pull the next message from a given topic.
- * @author author
- */
-public interface Consumer
-{	
-	/**
-	 * A message interface provide the offset and message
-	 * @author author
-	 *
-	 */
-	public interface Message
-	{	
-		/**
-		 * returning the offset of that particular message 
-		 * @return long
-		 */
-		long getOffset ();
-		/**
-		 * returning the message 
-		 * @return message
-		 */
-		String getMessage ();
-	}
-
-	/**
-	 * Get this consumer's name
-	 * @return name
-	 */
-	String getName ();
-
-	/**
-	 * Get creation time in ms
-	 * @return
-	 */
-	long getCreateTimeMs ();
-
-	/**
-	 * Get last access time in ms
-	 * @return
-	 */
-	long getLastAccessMs ();
-	
-	/**
-	 * Get the next message from this source. This method must not block.
-	 * @return the next message, or null if none are waiting
-	 */
-	Message nextMessage ();
-
-	/**
-	 * Get the next message from this source. This method must not block.
-	 * @param atOffset start with the next message at or after atOffset. -1 means next from last request
-	 * @return the next message, or null if none are waiting
-	 */
-//	Message nextMessage ( long atOffset );
-
-	/**
-	 * Close/clean up this consumer
-	 */
-	void close();
-	
-	/**
-	 * Commit the offset of the last consumed message
-	 * 
-	 */
-	void commitOffsets();
-	
-	/**
-	 * Get the offset this consumer is currently at
-	 * @return offset
-	 */
-	long getOffset();
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/ConsumerFactory.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/ConsumerFactory.java
deleted file mode 100644
index dddca63..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/ConsumerFactory.java
+++ /dev/null
@@ -1,110 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends;
-
-import java.util.Collection;
-
-/**
- * This is the factory class to instantiate the consumer
- * 
- * @author author
- *
- */
-
-public interface ConsumerFactory {
-	public static final String kSetting_EnableCache = "cambria.consumer.cache.enabled";
-	public static boolean kDefault_IsCacheEnabled = true;
-
-	/**
-	 * User defined exception for Unavailable Exception
-	 * 
-	 * @author author
-	 *
-	 */
-	public class UnavailableException extends Exception {
-		/**
-		 * Unavailable Exception with message
-		 * 
-		 * @param msg
-		 */
-		public UnavailableException(String msg) {
-			super(msg);
-		}
-
-		/**
-		 * Unavailable Exception with the throwable object
-		 * 
-		 * @param t
-		 */
-		public UnavailableException(Throwable t) {
-			super(t);
-		}
-
-		/**
-		 * Unavailable Exception with the message and cause
-		 * 
-		 * @param msg
-		 * @param cause
-		 */
-		public UnavailableException(String msg, Throwable cause) {
-			super(msg, cause);
-		}
-
-		private static final long serialVersionUID = 1L;
-	}
-
-	/**
-	 * For admin use, drop all cached consumers.
-	 */
-	public void dropCache();
-
-	/**
-	 * Get or create a consumer for the given set of info (topic, group, id)
-	 * 
-	 * @param topic
-	 * @param consumerGroupId
-	 * @param clientId
-	 * @param timeoutMs
-	 * @return
-	 * @throws UnavailableException
-	 */
-	public Consumer getConsumerFor(String topic, String consumerGroupId,
-			String clientId, int timeoutMs) throws UnavailableException;
-
-	/**
-	 * For factories that employ a caching mechanism, this allows callers to
-	 * explicitly destory a consumer that resides in the factory's cache.
-	 * 
-	 * @param topic
-	 * @param consumerGroupId
-	 * @param clientId
-	 */
-	public void destroyConsumer(String topic, String consumerGroupId,
-			String clientId);
-
-	/**
-	 * For admin/debug, we provide access to the consumers
-	 * 
-	 * @return a collection of consumers
-	 */
-	public Collection<? extends Consumer> getConsumers();
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/MetricsSet.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/MetricsSet.java
deleted file mode 100644
index f0900ff..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/MetricsSet.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends;
-
-import com.att.nsa.metrics.CdmMetricsRegistry;
-/**
- * This interface will help to generate metrics
- * @author author
- *
- */
-public interface MetricsSet extends CdmMetricsRegistry{
-
-	/**
-	 * This method will setup cambria sender code
-	 */
-	public void setupCambriaSender ();
-	/**
-	 * This method will define on route complete
-	 * @param name
-	 * @param durationMs
-	 */
-	public void onRouteComplete ( String name, long durationMs );
-	/**
-	 * This method will help the kafka publisher while publishing the messages
-	 * @param amount
-	 */
-	public void publishTick ( int amount );
-	/**
-	 * This method will help the kafka consumer while consuming the messages
-	 * @param amount
-	 */
-	public void consumeTick ( int amount );
-	/**
-	 * This method will call if the kafka consumer cache missed 
-	 */
-	public void onKafkaConsumerCacheMiss ();
-	/**
-	 * This method will call if the kafka consumer cache will be hit while publishing/consuming the messages
-	 */
-	public void onKafkaConsumerCacheHit ();
-	/**
-	 * This method will call if the kafka consumer cache claimed
-	 */
-	public void onKafkaConsumerClaimed ();
-	/**
-	 * This method will call if Kafka consumer is timed out
-	 */
-	public void onKafkaConsumerTimeout ();
-
-
-
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/Publisher.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/Publisher.java
deleted file mode 100644
index 2557980..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/Publisher.java
+++ /dev/null
@@ -1,98 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.LogDetails;
-
-import kafka.producer.KeyedMessage;
-
-/**
- * A publisher interface. Publishers receive messages and post them to a topic.
- * @author author
- */
-public interface Publisher
-{
-	/**
-	 * A message interface. The message has a key and a body.
-	 * @author author
-	 */
-	public interface message
-	{
-		/**
-		 * Get the key for this message. The key is used to partition messages
-		 * into "sub-streams" that have guaranteed order. The key can be null,
-		 * which means the message can be processed without any concern for order.
-		 * 
-		 * @return a key, possibly null
-		 */
-		String getKey();
-
-		/**
-		 * Get the message body.
-		 * @return a message body
-		 */
-		String getMessage();
-		/**
-		 * set the logging params for transaction enabled logging 
-		 * @param logDetails
-		 */
-		void setLogDetails (LogDetails logDetails);
-		/**
-		 * Get the log details for transaction enabled logging
-		 * @return LogDetails
-		 */
-		LogDetails getLogDetails ();
-		
-		/**
-		 * boolean transactionEnabled
-		 * @return true/false
-		 */
-		boolean isTransactionEnabled();
-		/**
-		 * Set the transaction enabled flag from prop file or topic based implementation
-		 * @param transactionEnabled
-		 */
-		void setTransactionEnabled(boolean transactionEnabled);
-	}
-
-	/**
-	 * Send a single message to a topic. Equivalent to sendMessages with a list of size 1.
-	 * @param topic
-	 * @param msg
-	 * @throws IOException
-	 */
-	public void sendMessage ( String topic, message msg ) throws IOException;
-
-	/**
-	 * Send messages to a topic.
-	 * @param topic
-	 * @param msgs
-	 * @throws IOException
-	 */
-	public void sendMessages ( String topic, List<? extends message> msgs ) throws IOException;
-	
-	public void sendBatchMessage(String topic ,ArrayList<KeyedMessage<String,String>> kms) throws IOException;
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/kafka/KafkaConsumer.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/kafka/KafkaConsumer.java
deleted file mode 100644
index 1ea7c0d..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/kafka/KafkaConsumer.java
+++ /dev/null
@@ -1,245 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.kafka;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Consumer;
-
-import kafka.consumer.ConsumerIterator;
-import kafka.consumer.KafkaStream;
-import kafka.javaapi.consumer.ConsumerConnector;
-import kafka.message.MessageAndMetadata;
-
-//import org.slf4j.Logger;
-//import org.slf4j.LoggerFactory;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-
-/**
- * A consumer instance that's created per-request. These are stateless so that
- * clients can connect to this service as a proxy.
- * 
- * @author author
- *
- */
-public class KafkaConsumer implements Consumer {
-	private enum State {
-		OPENED, CLOSED
-	}
-
-	/**
-	 * KafkaConsumer() is constructor. It has following 4 parameters:-
-	 * @param topic
-	 * @param group
-	 * @param id
-	 * @param cc
-	 * 
-	 */
-	
-	public KafkaConsumer(String topic, String group, String id, ConsumerConnector cc) {
-		fTopic = topic;
-		fGroup = group;
-		fId = id;
-		fConnector = cc;
-
-		fCreateTimeMs = System.currentTimeMillis();
-		fLastTouch = fCreateTimeMs;
-
-		fLogTag = fGroup + "(" + fId + ")/" + fTopic;
-		offset = 0;
-
-		state = KafkaConsumer.State.OPENED;
-
-		final Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
-		topicCountMap.put(fTopic, 1);
-		final Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = fConnector
-				.createMessageStreams(topicCountMap);
-		final List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(fTopic);
-		fStream = streams.iterator().next();
-	}
-
-	
-	/** getName() method returns string type value.
-	 * returns 3 parameters in string:- 
-	 * fTopic,fGroup,fId
-	 * @Override
-	 */
-	public String getName() {
-		return fTopic + " : " + fGroup + " : " + fId;
-	}
-
-	/** getCreateTimeMs() method returns long type value.
-	 * returns fCreateTimeMs variable value 
-	 * @Override
-	 * 
-	 */
-	public long getCreateTimeMs() {
-		return fCreateTimeMs;
-	}
-
-	/** getLastAccessMs() method returns long type value.
-	 * returns fLastTouch variable value 
-	 * @Override
-	 * 
-	 */
-	public long getLastAccessMs() {
-		return fLastTouch;
-	}
-
-	
-	/** 
-	 * nextMessage() is synchronized method that means at a time only one object can access it.
-	 * getName() method returns String which is of type Consumer.Message
-	 * @Override
-	 * */
-	public synchronized Consumer.Message nextMessage() {
-		if (getState() == KafkaConsumer.State.CLOSED) {
-			log.warn("nextMessage() called on closed KafkaConsumer " + getName());
-			return null;
-		}
-
-		try {
-			ConsumerIterator<byte[], byte[]> it = fStream.iterator();
-			if (it.hasNext()) {
-				final MessageAndMetadata<byte[], byte[]> msg = it.next();
-				offset = msg.offset();
-
-				return new Consumer.Message() {
-					@Override
-					public long getOffset() {
-						return msg.offset();
-					}
-
-					@Override
-					public String getMessage() {
-						return new String(msg.message());
-					}
-				};
-			}
-		} catch (kafka.consumer.ConsumerTimeoutException x) {
-			log.debug(fLogTag + ": ConsumerTimeoutException in Kafka consumer; returning null. ");
-		} catch (java.lang.IllegalStateException x) {
-			log.error(fLogTag + ": Illegal state exception in Kafka consumer; dropping stream. " + x.getMessage());
-		}
-
-		return null;
-	}
-	
-	/** getOffset() method returns long type value.
-	 * returns offset variable value 
-	 * @Override
-	 * 
-	 */
-	public long getOffset() {
-		return offset;
-	}
-
-	/** commit offsets 
-	 * commitOffsets() method will be called on closed of KafkaConsumer.
-	 * @Override
-	 * 
-	 */
-	public void commitOffsets() {
-		if (getState() == KafkaConsumer.State.CLOSED) {
-			log.warn("commitOffsets() called on closed KafkaConsumer " + getName());
-			return;
-		}
-		fConnector.commitOffsets();
-	}
-
-	/**
-	 * updating fLastTouch with current time in ms
-	 */
-	public void touch() {
-		fLastTouch = System.currentTimeMillis();
-	}
-	
-	/** getLastTouch() method returns long type value.
-	 * returns fLastTouch variable value
-	 * 
-	 */
-	public long getLastTouch() {
-		return fLastTouch;
-	}
-
-	/**
-	 *   setting the kafkaConsumer state to closed
-	 */
-	public synchronized void close() {
-		if (getState() == KafkaConsumer.State.CLOSED) {
-			log.warn("close() called on closed KafkaConsumer " + getName());
-			return;
-		}
-
-		setState(KafkaConsumer.State.CLOSED);
-		fConnector.shutdown();
-	}
-	
-	/**
-	 * getConsumerGroup() returns Consumer group
-	 * @return
-	 */
-	public String getConsumerGroup() {
-		return fGroup;
-	}
-	
-	/**
-	 * getConsumerId returns Consumer Id
-	 * @return
-	 */
-	public String getConsumerId() {
-		return fId;
-	}
-
-	/**
-	 * getState returns kafkaconsumer state
-	 * @return
-	 */	
-	private KafkaConsumer.State getState() {
-		return this.state;
-	}
-	
-	/**
-	 * setState() sets the kafkaConsumer state
-	 * @param state
-	 */
-	private void setState(KafkaConsumer.State state) {
-		this.state = state;
-	}
-
-	private ConsumerConnector fConnector;
-	private final String fTopic;
-	private final String fGroup;
-	private final String fId;
-	private final String fLogTag;
-	private final KafkaStream<byte[], byte[]> fStream;
-	private long fCreateTimeMs;
-	private long fLastTouch;
-	private long offset;
-	private KafkaConsumer.State state;
-	private static final EELFLogger log = EELFManager.getInstance().getLogger(KafkaConsumer.class);
-	//private static final Logger log = LoggerFactory.getLogger(KafkaConsumer.class);
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/kafka/KafkaConsumerCache.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/kafka/KafkaConsumerCache.java
deleted file mode 100644
index 4cf7f3a..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/kafka/KafkaConsumerCache.java
+++ /dev/null
@@ -1,614 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.kafka;
-
-import java.io.IOException;
-import java.util.Collection;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map.Entry;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.Executors;
-import java.util.concurrent.ScheduledExecutorService;
-import java.util.concurrent.TimeUnit;
-
-import org.I0Itec.zkclient.exception.ZkException;
-import org.I0Itec.zkclient.exception.ZkInterruptedException;
-import org.apache.curator.framework.CuratorFramework;
-import org.apache.curator.framework.imps.CuratorFrameworkState;
-import org.apache.curator.framework.recipes.cache.ChildData;
-import org.apache.curator.framework.recipes.cache.PathChildrenCache;
-import org.apache.curator.framework.recipes.cache.PathChildrenCacheEvent;
-import org.apache.curator.framework.recipes.cache.PathChildrenCacheListener;
-import org.apache.curator.framework.state.ConnectionState;
-import org.apache.curator.framework.state.ConnectionStateListener;
-import org.apache.curator.utils.EnsurePath;
-import org.apache.curator.utils.ZKPaths;
-import org.apache.http.annotation.NotThreadSafe;
-import org.apache.zookeeper.KeeperException;
-import org.apache.zookeeper.KeeperException.NoNodeException;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Consumer;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.MetricsSet;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants.CambriaConstants;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.ConfigurationReader;
-
-//import org.slf4j.Logger;
-//import org.slf4j.LoggerFactory;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import com.att.ajsc.filemonitor.AJSCPropertiesMap;
-import com.att.nsa.drumlin.till.nv.rrNvReadable;
-
-/**
- * @NotThreadSafe but expected to be used within KafkaConsumerFactory, which
- *                must be
- * @author author
- *
- */
-@NotThreadSafe
-public class KafkaConsumerCache {
-
-	private static final String kSetting_ConsumerHandoverWaitMs = "cambria.consumer.cache.handoverWaitMs";
-	private static final int kDefault_ConsumerHandoverWaitMs = 500;
-
-	private static final String kSetting_SweepEverySeconds = "cambria.consumer.cache.sweepFreqSeconds";
-	private static final String kSetting_TouchEveryMs = "cambria.consumer.cache.touchFreqMs";
-
-	private static final String kSetting_ZkBasePath = "cambria.consumer.cache.zkBasePath";
-	private static final String kDefault_ZkBasePath = CambriaConstants.kDefault_ZkRoot + "/consumerCache";
-
-	// kafka defaults to timing out a client after 6 seconds of inactivity, but
-	// it heartbeats even when the client isn't fetching. Here, we don't
-	// want to prematurely rebalance the consumer group. Assuming clients are
-	// hitting
-	// the server at least every 30 seconds, timing out after 2 minutes should
-	// be okay.
-	// FIXME: consider allowing the client to specify its expected call rate?
-	private static final long kDefault_MustTouchEveryMs = 1000 * 60 * 2;
-
-	// check for expirations pretty regularly
-	private static final long kDefault_SweepEverySeconds = 15;
-
-	private enum Status {
-		NOT_STARTED, CONNECTED, DISCONNECTED, SUSPENDED
-	}
-
-	/**
-	 * User defined exception class for kafka consumer cache
-	 * 
-	 * @author author
-	 *
-	 */
-	public class KafkaConsumerCacheException extends Exception {
-		/**
-		 * To throw the exception
-		 * 
-		 * @param t
-		 */
-		KafkaConsumerCacheException(Throwable t) {
-			super(t);
-		}
-
-		/**
-		 * 
-		 * @param s
-		 */
-		public KafkaConsumerCacheException(String s) {
-			super(s);
-		}
-
-		private static final long serialVersionUID = 1L;
-	}
-
-	/**
-	 * Creates a KafkaConsumerCache object. Before it is used, you must call
-	 * startCache()
-	 * 
-	 * @param apiId
-	 * @param s
-	 * @param metrics
-	 */
-	public KafkaConsumerCache(String apiId,  MetricsSet metrics) {
-
-		if (apiId == null) {
-			throw new IllegalArgumentException("API Node ID must be specified.");
-		}
-
-		fApiId = apiId;
-	//	fSettings = s;
-		fMetrics = metrics;
-		String strkSetting_ZkBasePath= AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_ZkBasePath);
-		if(null==strkSetting_ZkBasePath)strkSetting_ZkBasePath = kDefault_ZkBasePath;
-		fBaseZkPath = strkSetting_ZkBasePath;
-
-		fConsumers = new ConcurrentHashMap<String, KafkaConsumer>();
-		fSweepScheduler = Executors.newScheduledThreadPool(1);
-
-		curatorConsumerCache = null;
-
-		status = Status.NOT_STARTED;
-
-		listener = new ConnectionStateListener() {
-			public void stateChanged(CuratorFramework client, ConnectionState newState) {
-				if (newState == ConnectionState.LOST) {
-					log.info("ZooKeeper connection expired");
-					handleConnectionLoss();
-				} else if (newState == ConnectionState.READ_ONLY) {
-					log.warn("ZooKeeper connection set to read only mode.");
-				} else if (newState == ConnectionState.RECONNECTED) {
-					log.info("ZooKeeper connection re-established");
-					handleReconnection();
-				} else if (newState == ConnectionState.SUSPENDED) {
-					log.warn("ZooKeeper connection has been suspended.");
-					handleConnectionSuspended();
-				}
-			}
-		};
-	}
-
-	/**
-	 * Start the cache service. This must be called before any get/put
-	 * operations.
-	 * 
-	 * @param mode
-	 *            DMAAP or cambria
-	 * @param curator
-	 * @throws IOException
-	 * @throws KafkaConsumerCacheException
-	 */
-	public void startCache(String mode, CuratorFramework curator) throws KafkaConsumerCacheException {
-		try {
-
-			// CuratorFramework curator = null;
-
-			// Changed the class from where we are initializing the curator
-			// framework
-			if (mode != null && mode.equals(CambriaConstants.CAMBRIA)) {
-				curator = ConfigurationReader.getCurator();
-			} else if (mode != null && mode.equals(CambriaConstants.DMAAP)) {
-				curator = getCuratorFramework(curator);
-			}
-
-			curator.getConnectionStateListenable().addListener(listener);
-
-			setStatus(Status.CONNECTED);
-
-			curatorConsumerCache = new PathChildrenCache(curator, fBaseZkPath, true);
-			curatorConsumerCache.start();
-
-			curatorConsumerCache.getListenable().addListener(new PathChildrenCacheListener() {
-				public void childEvent(CuratorFramework client, PathChildrenCacheEvent event) throws Exception {
-					switch (event.getType()) {
-					case CHILD_ADDED: {
-						final String apiId = new String(event.getData().getData());
-						final String consumer = ZKPaths.getNodeFromPath(event.getData().getPath());
-
-						log.info(apiId + " started consumer " + consumer);
-						break;
-					}
-					case CHILD_UPDATED: {
-						final String apiId = new String(event.getData().getData());
-						final String consumer = ZKPaths.getNodeFromPath(event.getData().getPath());
-
-						if (fConsumers.containsKey(consumer)) {
-							log.info(apiId + " claimed consumer " + consumer + " from " + fApiId);
-
-							dropClaimedConsumer(consumer);
-						}
-
-						break;
-					}
-					case CHILD_REMOVED: {
-						final String consumer = ZKPaths.getNodeFromPath(event.getData().getPath());
-
-						if (fConsumers.containsKey(consumer)) {
-							log.info("Someone wanted consumer " + consumer + " gone;  removing it from the cache");
-							dropConsumer(consumer, false);
-						}
-
-						break;
-					}
-					default:
-						break;
-					}
-				}
-			});
-
-			// initialize the ZK path
-			EnsurePath ensurePath = new EnsurePath(fBaseZkPath);
-			ensurePath.ensure(curator.getZookeeperClient());
-
-			//final long freq = fSettings.getLong(kSetting_SweepEverySeconds, kDefault_SweepEverySeconds);
-			long freq = kDefault_SweepEverySeconds;
-			String strkSetting_SweepEverySeconds = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_SweepEverySeconds);
-			if(null==strkSetting_SweepEverySeconds) strkSetting_SweepEverySeconds = kDefault_SweepEverySeconds+"";
-			
-			  freq = Long.parseLong(strkSetting_SweepEverySeconds);
-					
-			fSweepScheduler.scheduleAtFixedRate(new sweeper(), freq, freq, TimeUnit.SECONDS);
-			log.info("KafkaConsumerCache started");
-			log.info("sweeping cached clients every " + freq + " seconds");
-		} catch (ZkException e) {
-			throw new KafkaConsumerCacheException(e);
-		} catch (Exception e) {
-			throw new KafkaConsumerCacheException(e);
-		}
-	}
-
-	/**
-	 * Getting the curator oject to start the zookeeper connection estabished
-	 * 
-	 * @param curator
-	 * @return curator object
-	 */
-	public static CuratorFramework getCuratorFramework(CuratorFramework curator) {
-		if (curator.getState() == CuratorFrameworkState.LATENT) {
-			curator.start();
-
-			try {
-				curator.blockUntilConnected();
-			} catch (InterruptedException e) {
-				// Ignore
-				log.error("error while setting curator framework :" + e.getMessage());
-			}
-		}
-
-		return curator;
-	}
-
-	/**
-	 * Stop the cache service.
-	 */
-	public void stopCache() {
-		setStatus(Status.DISCONNECTED);
-
-		final CuratorFramework curator = ConfigurationReader.getCurator();
-
-		if (curator != null) {
-			try {
-				curator.getConnectionStateListenable().removeListener(listener);
-				curatorConsumerCache.close();
-				log.info("Curator client closed");
-			} catch (ZkInterruptedException e) {
-				log.warn("Curator client close interrupted: " + e.getMessage());
-			} catch (IOException e) {
-				log.warn("Error while closing curator PathChildrenCache for KafkaConsumerCache" + e.getMessage());
-			}
-
-			curatorConsumerCache = null;
-		}
-
-		if (fSweepScheduler != null) {
-			fSweepScheduler.shutdownNow();
-			log.info("cache sweeper stopped");
-		}
-
-		if (fConsumers != null) {
-			fConsumers.clear();
-			fConsumers = null;
-		}
-
-		setStatus(Status.NOT_STARTED);
-
-		log.info("Consumer cache service stopped");
-	}
-
-	/**
-	 * Get a cached consumer by topic, group, and id, if it exists (and remains
-	 * valid) In addition, this method waits for all other consumer caches in
-	 * the cluster to release their ownership and delete their version of this
-	 * consumer.
-	 * 
-	 * @param topic
-	 * @param consumerGroupId
-	 * @param clientId
-	 * @return a consumer, or null
-	 */
-	public KafkaConsumer getConsumerFor(String topic, String consumerGroupId, String clientId)
-			throws KafkaConsumerCacheException {
-		if (getStatus() != KafkaConsumerCache.Status.CONNECTED)
-			throw new KafkaConsumerCacheException("The cache service is unavailable.");
-
-		final String consumerKey = makeConsumerKey(topic, consumerGroupId, clientId);
-		final KafkaConsumer kc = fConsumers.get(consumerKey);
-
-		if (kc != null) {
-			log.debug("Consumer cache hit for [" + consumerKey + "], last was at " + kc.getLastTouch());
-			kc.touch();
-			fMetrics.onKafkaConsumerCacheHit();
-		} else {
-			log.debug("Consumer cache miss for [" + consumerKey + "]");
-			fMetrics.onKafkaConsumerCacheMiss();
-		}
-
-		return kc;
-	}
-
-	/**
-	 * Put a consumer into the cache by topic, group and ID
-	 * 
-	 * @param topic
-	 * @param consumerGroupId
-	 * @param consumerId
-	 * @param consumer
-	 * @throws KafkaConsumerCacheException
-	 */
-	public void putConsumerFor(String topic, String consumerGroupId, String consumerId, KafkaConsumer consumer)
-			throws KafkaConsumerCacheException {
-		if (getStatus() != KafkaConsumerCache.Status.CONNECTED)
-			throw new KafkaConsumerCacheException("The cache service is unavailable.");
-
-		final String consumerKey = makeConsumerKey(topic, consumerGroupId, consumerId);
-		fConsumers.put(consumerKey, consumer);
-	}
-
-	public Collection<? extends Consumer> getConsumers() {
-		return new LinkedList<KafkaConsumer>(fConsumers.values());
-	}
-
-	/**
-	 * This method is to drop all the consumer
-	 */
-	public void dropAllConsumers() {
-		for (Entry<String, KafkaConsumer> entry : fConsumers.entrySet()) {
-			dropConsumer(entry.getKey(), true);
-		}
-
-		// consumers should be empty here
-		if (fConsumers.size() > 0) {
-			log.warn("During dropAllConsumers, the consumer map is not empty.");
-			fConsumers.clear();
-		}
-	}
-
-	/**
-	 * Drop a consumer from our cache due to a timeout
-	 * 
-	 * @param key
-	 */
-	private void dropTimedOutConsumer(String key) {
-		fMetrics.onKafkaConsumerTimeout();
-
-		if (!fConsumers.containsKey(key)) {
-			log.warn("Attempted to drop a timed out consumer which was not in our cache: " + key);
-			return;
-		}
-
-		// First, drop this consumer from our cache
-		dropConsumer(key, true);
-
-		final CuratorFramework curator = ConfigurationReader.getCurator();
-
-		try {
-			curator.delete().guaranteed().forPath(fBaseZkPath + "/" + key);
-		} catch (NoNodeException e) {
-			log.warn("A consumer was deleted from " + fApiId
-					+ "'s cache, but no Cambria API node had ownership of it in ZooKeeper");
-		} catch (Exception e) {
-			log.debug("Unexpected exception while deleting consumer: " + e.getMessage());
-		}
-
-		log.info("Dropped " + key + " consumer due to timeout");
-	}
-
-	/**
-	 * Drop a consumer from our cache due to another API node claiming it as
-	 * their own.
-	 * 
-	 * @param key
-	 */
-	private void dropClaimedConsumer(String key) {
-		// if the consumer is still in our cache, it implies a claim.
-		if (fConsumers.containsKey(key)) {
-			fMetrics.onKafkaConsumerClaimed();
-			log.info("Consumer [" + key + "] claimed by another node.");
-		}
-
-		dropConsumer(key, false);
-	}
-
-	/**
-	 * Removes the consumer from the cache and closes its connection to the
-	 * kafka broker(s).
-	 * 
-	 * @param key
-	 * @param dueToTimeout
-	 */
-	private void dropConsumer(String key, boolean dueToTimeout) {
-		final KafkaConsumer kc = fConsumers.remove(key);
-
-		if (kc != null) {
-			log.info("closing Kafka consumer " + key);
-			kc.close();
-		}
-	}
-
-//	private final rrNvReadable fSettings;
-	private final MetricsSet fMetrics;
-	private final String fBaseZkPath;
-	private final ScheduledExecutorService fSweepScheduler;
-	private final String fApiId;
-	private final ConnectionStateListener listener;
-
-	private ConcurrentHashMap<String, KafkaConsumer> fConsumers;
-	private PathChildrenCache curatorConsumerCache;
-
-	private volatile Status status;
-
-	private void handleReconnection() {
-
-		log.info("Reading current cache data from ZK and synchronizing local cache");
-
-		final List<ChildData> cacheData = curatorConsumerCache.getCurrentData();
-
-		// Remove all the consumers in this API nodes cache that now belong to
-		// other API nodes.
-		for (ChildData cachedConsumer : cacheData) {
-			final String consumerId = ZKPaths.getNodeFromPath(cachedConsumer.getPath());
-			final String owningApiId = (cachedConsumer.getData() != null) ? new String(cachedConsumer.getData())
-					: "undefined";
-
-			if (!fApiId.equals(owningApiId)) {
-				fConsumers.remove(consumerId);
-			}
-		}
-
-		setStatus(Status.CONNECTED);
-	}
-
-	private void handleConnectionSuspended() {
-		log.info("Suspending cache until ZK connection is re-established");
-
-		setStatus(Status.SUSPENDED);
-	}
-
-	private void handleConnectionLoss() {
-		log.info("Clearing consumer cache (shutting down all Kafka consumers on this node)");
-
-		setStatus(Status.DISCONNECTED);
-
-		closeAllCachedConsumers();
-		fConsumers.clear();
-	}
-
-	private void closeAllCachedConsumers() {
-		for (Entry<String, KafkaConsumer> entry : fConsumers.entrySet()) {
-			entry.getValue().close();
-		}
-	}
-
-	private static String makeConsumerKey(String topic, String consumerGroupId, String clientId) {
-		return topic + "::" + consumerGroupId + "::" + clientId;
-	}
-
-	/**
-	 * This method is to get a lock
-	 * 
-	 * @param topic
-	 * @param consumerGroupId
-	 * @param consumerId
-	 * @throws KafkaConsumerCacheException
-	 */
-	public void signalOwnership(final String topic, final String consumerGroupId, final String consumerId)
-			throws KafkaConsumerCacheException {
-		// get a lock at <base>/<topic>::<consumerGroupId>::<consumerId>
-		final String consumerKey = makeConsumerKey(topic, consumerGroupId, consumerId);
-
-		try {
-			final String consumerPath = fBaseZkPath + "/" + consumerKey;
-
-			log.debug(fApiId + " attempting to claim ownership of consumer " + consumerKey);
-
-			final CuratorFramework curator = ConfigurationReader.getCurator();
-
-			try {
-				curator.setData().forPath(consumerPath, fApiId.getBytes());
-			} catch (KeeperException.NoNodeException e) {
-				curator.create().creatingParentsIfNeeded().forPath(consumerPath, fApiId.getBytes());
-			}
-
-			log.info(fApiId + " successfully claimed ownership of consumer " + consumerKey);
-		} catch (Exception e) {
-			log.error(fApiId + " failed to claim ownership of consumer " + consumerKey);
-			throw new KafkaConsumerCacheException(e);
-		}
-
-		log.info("Backing off to give the Kafka broker time to clean up the ZK data for this consumer");
-
-		try {
-			int kSetting_ConsumerHandoverWaitMs = kDefault_ConsumerHandoverWaitMs;
-			String strkSetting_ConsumerHandoverWaitMs= AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_ConsumerHandoverWaitMs+"");
-			if(strkSetting_ConsumerHandoverWaitMs!=null) kSetting_ConsumerHandoverWaitMs = Integer.parseInt(strkSetting_ConsumerHandoverWaitMs);
-			
-					Thread.sleep(kSetting_ConsumerHandoverWaitMs);
-			//Thread.sleep(fSettings.getInt(kSetting_ConsumerHandoverWaitMs, kDefault_ConsumerHandoverWaitMs));
-		} catch (InterruptedException e) {
-			// Ignore
-		}
-	}
-
-	private void sweep() {
-		final LinkedList<String> removals = new LinkedList<String>();
-		long mustTouchEveryMs = kDefault_MustTouchEveryMs;
-		String strkSetting_TouchEveryMs = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_TouchEveryMs);
-		//if(null!=strkSetting_TouchEveryMs) strkSetting_TouchEveryMs = kDefault_MustTouchEveryMs+"";
-		if(null!=strkSetting_TouchEveryMs)
-		{
-		  mustTouchEveryMs = Long.parseLong(strkSetting_TouchEveryMs);	
-		}
-
-		//final long mustTouchEveryMs = fSettings.getLong(kSetting_TouchEveryMs, kDefault_MustTouchEveryMs);
-		final long oldestAllowedTouchMs = System.currentTimeMillis() - mustTouchEveryMs;
-
-		for (Entry<String, KafkaConsumer> e : fConsumers.entrySet()) {
-			final long lastTouchMs = e.getValue().getLastTouch();
-
-			log.debug("consumer " + e.getKey() + " last touched at " + lastTouchMs);
-
-			if (lastTouchMs < oldestAllowedTouchMs) {
-				log.info("consumer " + e.getKey() + " has expired");
-				removals.add(e.getKey());
-			}
-		}
-
-		for (String key : removals) {
-			dropTimedOutConsumer(key);
-		}
-	}
-
-	/**
-	 * Creating a thread to run the sweep method
-	 * 
-	 * @author author
-	 *
-	 */
-	private class sweeper implements Runnable {
-		/**
-		 * run method
-		 */
-		public void run() {
-			sweep();
-		}
-	}
-
-	/**
-	 * This method is to drop consumer
-	 * 
-	 * @param topic
-	 * @param consumerGroup
-	 * @param clientId
-	 */
-	public void dropConsumer(String topic, String consumerGroup, String clientId) {
-		dropConsumer(makeConsumerKey(topic, consumerGroup, clientId), false);
-	}
-
-	private Status getStatus() {
-		return this.status;
-	}
-
-	private void setStatus(Status status) {
-		this.status = status;
-	}
-
-	private static final EELFLogger log = EELFManager.getInstance().getLogger(KafkaConsumerCache.class);
-	//private static final Logger log = LoggerFactory.getLogger(KafkaConsumerCache.class);
-}
\ No newline at end of file
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/kafka/KafkaPublisher.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/kafka/KafkaPublisher.java
deleted file mode 100644
index 90e5ce0..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/kafka/KafkaPublisher.java
+++ /dev/null
@@ -1,169 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.kafka;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Properties;
-
-import kafka.common.FailedToSendMessageException;
-import kafka.javaapi.producer.Producer;
-import kafka.producer.KeyedMessage;
-import kafka.producer.ProducerConfig;
-
-import org.json.JSONException;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Publisher;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants.CambriaConstants;
-
-//import org.slf4j.Logger;
-//import org.slf4j.LoggerFactory;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import org.springframework.beans.factory.annotation.Qualifier;
-
-import com.att.nsa.drumlin.till.nv.rrNvReadable;
-
-/**
- * Sends raw JSON objects into Kafka.
- * 
- * Could improve space: BSON rather than JSON?
- * 
- * @author author
- *
- */
-
-public class KafkaPublisher implements Publisher {
-	/**
-	 * constructor initializing
-	 * 
-	 * @param settings
-	 * @throws rrNvReadable.missingReqdSetting
-	 */
-	public KafkaPublisher(@Qualifier("propertyReader") rrNvReadable settings) throws rrNvReadable.missingReqdSetting {
-		//fSettings = settings;
-
-		final Properties props = new Properties();
-		/*transferSetting(fSettings, props, "metadata.broker.list", "localhost:9092");
-		transferSetting(fSettings, props, "request.required.acks", "1");
-		transferSetting(fSettings, props, "message.send.max.retries", "5");
-		transferSetting(fSettings, props, "retry.backoff.ms", "150"); */
-		String kafkaConnUrl= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"kafka.metadata.broker.list"); 
-		System.out.println("kafkaConnUrl:- "+kafkaConnUrl);
-		if(null==kafkaConnUrl){ 
- 
-			kafkaConnUrl="localhost:9092"; 
-		}		
-		transferSetting( props, "metadata.broker.list", kafkaConnUrl);
-		transferSetting( props, "request.required.acks", "1");
-		transferSetting( props, "message.send.max.retries", "5");
-		transferSetting(props, "retry.backoff.ms", "150"); 
-
-		props.put("serializer.class", "kafka.serializer.StringEncoder");
-
-		fConfig = new ProducerConfig(props);
-		fProducer = new Producer<String, String>(fConfig);
-	}
-
-	/**
-	 * Send a message with a given topic and key.
-	 * 
-	 * @param msg
-	 * @throws FailedToSendMessageException
-	 * @throws JSONException
-	 */
-	@Override
-	public void sendMessage(String topic, message msg) throws IOException, FailedToSendMessageException {
-		final List<message> msgs = new LinkedList<message>();
-		msgs.add(msg);
-		sendMessages(topic, msgs);
-	}
-
-	/**
-	 * method publishing batch messages
-	 * 
-	 * @param topic
-	 * @param kms
-	 * throws IOException
-	 */
-	public void sendBatchMessage(String topic, ArrayList<KeyedMessage<String, String>> kms) throws IOException {
-		try {
-			fProducer.send(kms);
-
-		} catch (FailedToSendMessageException excp) { 
-			log.error("Failed to send message(s) to topic [" + topic + "].", excp);
-			throw new FailedToSendMessageException(excp.getMessage(), excp);
-		}
-
-	}
-
-	/**
-	 * Send a set of messages. Each must have a "key" string value.
-	 * 
-	 * @param topic
-	 * @param msg
-	 * @throws FailedToSendMessageException
-	 * @throws JSONException
-	 */
-	@Override
-	public void sendMessages(String topic, List<? extends message> msgs)
-			throws IOException, FailedToSendMessageException {
-		log.info("sending " + msgs.size() + " events to [" + topic + "]");
-
-		final List<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>(msgs.size());
-		for (message o : msgs) {
-			final KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, o.getKey(), o.toString());
-			kms.add(data);
-		}
-		try {
-			fProducer.send(kms);
-
-		} catch (FailedToSendMessageException excp) {
-			log.error("Failed to send message(s) to topic [" + topic + "].", excp);
-			throw new FailedToSendMessageException(excp.getMessage(), excp);
-		}
-	}
-
-	//private final rrNvReadable fSettings;
-
-	private ProducerConfig fConfig;
-	private Producer<String, String> fProducer;
-
-  /**
-   * It sets the key value pair
-   * @param topic
-   * @param msg 
-   * @param key
-   * @param defVal
-   */
-	private void transferSetting(Properties props, String key, String defVal) {
-		String kafka_prop= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"kafka." + key);
-		if (null==kafka_prop) kafka_prop=defVal;
-		//props.put(key, settings.getString("kafka." + key, defVal));
-		props.put(key, kafka_prop);
-	}
-
-	//private static final Logger log = LoggerFactory.getLogger(KafkaPublisher.class);
-
-	private static final EELFLogger log = EELFManager.getInstance().getLogger(KafkaPublisher.class);
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MemoryConsumerFactory.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MemoryConsumerFactory.java
deleted file mode 100644
index b42a22b..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MemoryConsumerFactory.java
+++ /dev/null
@@ -1,160 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.memory;
-
-import java.util.ArrayList;
-import java.util.Collection;
-
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Consumer;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.ConsumerFactory;
-/**
- * 
- * @author author
- *
- */
-public class MemoryConsumerFactory implements ConsumerFactory
-{
-	/**
-	 * 
-	 * Initializing constructor
-	 * @param q
-	 */
-	public MemoryConsumerFactory ( MemoryQueue q )
-	{
-		fQueue = q;
-	}
-
-	/**
-	 * 
-	 * @param topic
-	 * @param consumerGroupId
-	 * @param clientId
-	 * @param timeoutMs
-	 * @return Consumer
-	 */
-	@Override
-	public Consumer getConsumerFor ( String topic, String consumerGroupId, String clientId, int timeoutMs )
-	{
-		return new MemoryConsumer ( topic, consumerGroupId );
-	}
-
-	private final MemoryQueue fQueue;
-
-	/**
-	 * 
-	 * Define nested inner class
-	 *
-	 */
-	private class MemoryConsumer implements Consumer
-	{
-		/**
-		 * 
-		 * Initializing MemoryConsumer constructor 
-		 * @param topic
-		 * @param consumer
-		 * 
-		 */
-		public MemoryConsumer ( String topic, String consumer )
-		{
-			fTopic = topic;
-			fConsumer = consumer;
-			fCreateMs = System.currentTimeMillis ();
-			fLastAccessMs = fCreateMs;
-		}
-
-		@Override
-		/**
-		 * 
-		 * return consumer details  
-		 */
-		public Message nextMessage ()
-		{
-			return fQueue.get ( fTopic, fConsumer );
-		}
-
-		private final String fTopic;
-		private final String fConsumer;
-		private final long fCreateMs;
-		private long fLastAccessMs;
-
-		@Override
-		public void close() {
-			//Nothing to close/clean up.
-		}
-		/**
-		 * 
-		 */
-		public void commitOffsets()
-		{
-			// ignoring this aspect
-		}
-		/**
-		 * get offset
-		 */
-		public long getOffset()
-		{
-			return 0;
-		}
-
-		@Override
-		/**
-		 * get consumer topic name
-		 */
-		public String getName ()
-		{
-			return fTopic + "/" + fConsumer;
-		}
-
-		@Override
-		public long getCreateTimeMs ()
-		{
-			return fCreateMs;
-		}
-
-		@Override
-		public long getLastAccessMs ()
-		{
-			return fLastAccessMs;
-		}
-	}
-
-	@Override
-	public void destroyConsumer(String topic, String consumerGroupId,
-			String clientId) {
-		//No cache for memory consumers, so NOOP
-	}
-
-	@Override
-	public void dropCache ()
-	{
-		// nothing to do - there's no cache here
-	}
-
-	@Override
-	/**
-	 * @return ArrayList<MemoryConsumer>
-	 */
-	public Collection<? extends Consumer> getConsumers ()
-	{
-		return new ArrayList<MemoryConsumer> ();
-	}
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MemoryMetaBroker.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MemoryMetaBroker.java
deleted file mode 100644
index 221e58a..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MemoryMetaBroker.java
+++ /dev/null
@@ -1,200 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.memory;
-
-import java.util.HashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Set;
-import java.util.TreeSet;
-
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metabroker.Broker;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metabroker.Topic;
-
-import com.att.nsa.configs.ConfigDb;
-import com.att.nsa.drumlin.till.nv.rrNvReadable;
-import com.att.nsa.security.NsaAcl;
-import com.att.nsa.security.NsaApiKey;
-
-/**
- * 
- * @author author
- *
- */
-public class MemoryMetaBroker implements Broker {
-	/**
-	 * 
-	 * @param mq
-	 * @param configDb
-	 * @param settings
-	 */
-	public MemoryMetaBroker(MemoryQueue mq, ConfigDb configDb) {
-	//public MemoryMetaBroker(MemoryQueue mq, ConfigDb configDb, rrNvReadable settings) {
-		fQueue = mq;
-		fTopics = new HashMap<String, MemTopic>();
-	}
-
-	@Override
-	public List<Topic> getAllTopics() {
-		return new LinkedList<Topic>(fTopics.values());
-	}
-
-	@Override
-	public Topic getTopic(String topic) {
-		return fTopics.get(topic);
-	}
-
-	@Override
-	public Topic createTopic(String topic, String desc, String ownerApiId, int partitions, int replicas,
-			boolean transactionEnabled) throws TopicExistsException {
-		if (getTopic(topic) != null) {
-			throw new TopicExistsException(topic);
-		}
-		fQueue.createTopic(topic);
-		fTopics.put(topic, new MemTopic(topic, desc, ownerApiId, transactionEnabled));
-		return getTopic(topic);
-	}
-
-	@Override
-	public void deleteTopic(String topic) {
-		fTopics.remove(topic);
-		fQueue.removeTopic(topic);
-	}
-
-	private final MemoryQueue fQueue;
-	private final HashMap<String, MemTopic> fTopics;
-
-	private static class MemTopic implements Topic {
-		/**
-		 * constructor initialization
-		 * 
-		 * @param name
-		 * @param desc
-		 * @param owner
-		 * @param transactionEnabled
-		 */
-		public MemTopic(String name, String desc, String owner, boolean transactionEnabled) {
-			fName = name;
-			fDesc = desc;
-			fOwner = owner;
-			ftransactionEnabled = transactionEnabled;
-			fReaders = null;
-			fWriters = null;
-		}
-
-		@Override
-		public String getOwner() {
-			return fOwner;
-		}
-
-		@Override
-		public NsaAcl getReaderAcl() {
-			return fReaders;
-		}
-
-		@Override
-		public NsaAcl getWriterAcl() {
-			return fWriters;
-		}
-
-		@Override
-		public void checkUserRead(NsaApiKey user) throws AccessDeniedException {
-			if (fReaders != null && (user == null || !fReaders.canUser(user.getKey()))) {
-				throw new AccessDeniedException(user == null ? "" : user.getKey());
-			}
-		}
-
-		@Override
-		public void checkUserWrite(NsaApiKey user) throws AccessDeniedException {
-			if (fWriters != null && (user == null || !fWriters.canUser(user.getKey()))) {
-				throw new AccessDeniedException(user == null ? "" : user.getKey());
-			}
-		}
-
-		@Override
-		public String getName() {
-			return fName;
-		}
-
-		@Override
-		public String getDescription() {
-			return fDesc;
-		}
-
-		@Override
-		public void permitWritesFromUser(String publisherId, NsaApiKey asUser) throws AccessDeniedException {
-			if (!fOwner.equals(asUser.getKey())) {
-				throw new AccessDeniedException("User does not own this topic " + fName);
-			}
-			if (fWriters == null) {
-				fWriters = new NsaAcl();
-			}
-			fWriters.add(publisherId);
-		}
-
-		@Override
-		public void denyWritesFromUser(String publisherId, NsaApiKey asUser) throws AccessDeniedException {
-			if (!fOwner.equals(asUser.getKey())) {
-				throw new AccessDeniedException("User does not own this topic " + fName);
-			}
-			fWriters.remove(publisherId);
-		}
-
-		@Override
-		public void permitReadsByUser(String consumerId, NsaApiKey asUser) throws AccessDeniedException {
-			if (!fOwner.equals(asUser.getKey())) {
-				throw new AccessDeniedException("User does not own this topic " + fName);
-			}
-			if (fReaders == null) {
-				fReaders = new NsaAcl();
-			}
-			fReaders.add(consumerId);
-		}
-
-		@Override
-		public void denyReadsByUser(String consumerId, NsaApiKey asUser) throws AccessDeniedException {
-			if (!fOwner.equals(asUser.getKey())) {
-				throw new AccessDeniedException("User does not own this topic " + fName);
-			}
-			fReaders.remove(consumerId);
-		}
-
-		private final String fName;
-		private final String fDesc;
-		private final String fOwner;
-		private NsaAcl fReaders;
-		private NsaAcl fWriters;
-		private boolean ftransactionEnabled;
-
-		@Override
-		public boolean isTransactionEnabled() {
-			return ftransactionEnabled;
-		}
-
-		@Override
-		public Set<String> getOwners() {
-			final TreeSet<String> set = new TreeSet<String> ();
-			set.add ( fOwner );
-			return set;
-		}
-	}
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MemoryQueue.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MemoryQueue.java
deleted file mode 100644
index e6b98b3..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MemoryQueue.java
+++ /dev/null
@@ -1,207 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.memory;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Consumer;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Publisher.message;
-
-/**
- * When broker type is memory, then this class is doing all the topic related
- * operations
- * 
- * @author author
- *
- */
-public class MemoryQueue {
-	// map from topic to list of msgs
-	private HashMap<String, LogBuffer> fQueue;
-	private HashMap<String, HashMap<String, Integer>> fOffsets;
-
-	/**
-	 * constructor storing hashMap objects in Queue and Offsets object
-	 */
-	public MemoryQueue() {
-		fQueue = new HashMap<String, LogBuffer>();
-		fOffsets = new HashMap<String, HashMap<String, Integer>>();
-	}
-
-	/**
-	 * method used to create topic
-	 * 
-	 * @param topic
-	 */
-	public synchronized void createTopic(String topic) {
-		LogBuffer q = fQueue.get(topic);
-		if (q == null) {
-			q = new LogBuffer(1024 * 1024);
-			fQueue.put(topic, q);
-		}
-	}
-
-	/**
-	 * method used to remove topic
-	 * 
-	 * @param topic
-	 */
-	public synchronized void removeTopic(String topic) {
-		LogBuffer q = fQueue.get(topic);
-		if (q != null) {
-			fQueue.remove(topic);
-		}
-	}
-
-	/**
-	 * method to write message on topic
-	 * 
-	 * @param topic
-	 * @param m
-	 */
-	public synchronized void put(String topic, message m) {
-		LogBuffer q = fQueue.get(topic);
-		if (q == null) {
-			createTopic(topic);
-			q = fQueue.get(topic);
-		}
-		q.push(m.getMessage());
-	}
-
-	/**
-	 * method to read consumer messages
-	 * 
-	 * @param topic
-	 * @param consumerName
-	 * @return
-	 */
-	public synchronized Consumer.Message get(String topic, String consumerName) {
-		final LogBuffer q = fQueue.get(topic);
-		if (q == null) {
-			return null;
-		}
-
-		HashMap<String, Integer> offsetMap = fOffsets.get(consumerName);
-		if (offsetMap == null) {
-			offsetMap = new HashMap<String, Integer>();
-			fOffsets.put(consumerName, offsetMap);
-		}
-		Integer offset = offsetMap.get(topic);
-		if (offset == null) {
-			offset = 0;
-		}
-
-		final msgInfo result = q.read(offset);
-		if (result != null && result.msg != null) {
-			offsetMap.put(topic, result.offset + 1);
-		}
-		return result;
-	}
-
-	/**
-	 * static inner class used to details about consumed messages
-	 * 
-	 * @author author
-	 *
-	 */
-	private static class msgInfo implements Consumer.Message {
-		/**
-		 * published message which is consumed
-		 */
-		public String msg;
-		/**
-		 * offset associated with message
-		 */
-		public int offset;
-
-		/**
-		 * get offset of messages
-		 */
-		@Override
-		public long getOffset() {
-			return offset;
-		}
-
-		/**
-		 * get consumed message
-		 */
-		@Override
-		public String getMessage() {
-			return msg;
-		}
-	}
-
- /**
- * 
- * @author author
- *
- * private LogBuffer class has synchronized push and read method
- */
-	private class LogBuffer {
-		private int fBaseOffset;
-		private final int fMaxSize;
-		private final ArrayList<String> fList;
-
-		/**
-		 * constructor initializing the offset, maxsize and list
-		 * 
-		 * @param maxSize
-		 */
-		public LogBuffer(int maxSize) {
-			fBaseOffset = 0;
-			fMaxSize = maxSize;
-			fList = new ArrayList<String>();
-		}
-
-		/**
-		 * pushing message
-		 * 
-		 * @param msg
-		 */
-		public synchronized void push(String msg) {
-			fList.add(msg);
-			while (fList.size() > fMaxSize) {
-				fList.remove(0);
-				fBaseOffset++;
-			}
-		}
-
-		/**
-		 * reading messages
-		 * 
-		 * @param offset
-		 * @return
-		 */
-		public synchronized msgInfo read(int offset) {
-			final int actual = Math.max(0, offset - fBaseOffset);
-
-			final msgInfo mi = new msgInfo();
-			mi.msg = (actual >= fList.size()) ? null : fList.get(actual);
-			if (mi.msg == null)
-				return null;
-
-			mi.offset = actual + fBaseOffset;
-			return mi;
-		}
-
-	}
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MemoryQueuePublisher.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MemoryQueuePublisher.java
deleted file mode 100644
index bf94b9c..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MemoryQueuePublisher.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.memory;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Publisher;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metabroker.Broker.TopicExistsException;
-
-import kafka.producer.KeyedMessage;
-
-/**
- * 
- * @author author
- *
- */
-public class MemoryQueuePublisher implements Publisher {
-	/**
-	 * 
-	 * @param q
-	 * @param b
-	 */
-	public MemoryQueuePublisher(MemoryQueue q, MemoryMetaBroker b) {
-		fBroker = b;
-		fQueue = q;
-	}
-
-	/**
-	 * sendBatchMessages
-	 * 
-	 * @param topic
-	 * @param kms
-	 */
-	public void sendBatchMessage(String topic, ArrayList<KeyedMessage<String, String>> kms) throws IOException {
-	}
-	
-	/**
-	 * 
-	 * @param topic
-	 * @param msg
-	 * @throws IOException
-	 */
-	@Override
-	public void sendMessage(String topic, message msg) throws IOException {
-		if (null == fBroker.getTopic(topic)) {
-			try {
-				fBroker.createTopic(topic, topic, null, 8, 3, false);
-			} catch (TopicExistsException e) {
-				throw new RuntimeException(e);
-			}
-		}
-		fQueue.put(topic, msg);
-	}
-
-	@Override
-	/**
-	 * @param topic
-	 * @param msgs
-	 * @throws IOException
-	 */
-	public void sendMessages(String topic, List<? extends message> msgs) throws IOException {
-		for (message m : msgs) {
-			sendMessage(topic, m);
-		}
-	}
-
-	private final MemoryMetaBroker fBroker;
-	private final MemoryQueue fQueue;
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MessageDropper.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MessageDropper.java
deleted file mode 100644
index 0414e41..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MessageDropper.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.memory;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Publisher;
-
-import kafka.producer.KeyedMessage;
-
-/**
- * class is used to message publishing
- * 
- * @author author
- *
- */
-public class MessageDropper implements Publisher {
-	/**
-	 * publish single messages
-	 * param topic
-	 * param msg
-	 */
-	@Override
-	public void sendMessage(String topic, message msg) throws IOException {
-	}
-
-	/**
-	 * publish multiple messages
-	 */
-	@Override
-	public void sendMessages(String topic, List<? extends message> msgs) throws IOException {
-	}
-
-	/**
-	 * publish batch messages
-	 */
-	@Override
-	public void sendBatchMessage(String topic, ArrayList<KeyedMessage<String, String>> kms) throws IOException {
-	}
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MessageLogger.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MessageLogger.java
deleted file mode 100644
index 63ea3b6..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/backends/memory/MessageLogger.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.memory;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Publisher;
-
-import kafka.producer.KeyedMessage;
-
-/**
- * class used for logging perspective
- * 
- * @author author
- *
- */
-public class MessageLogger implements Publisher {
-	public MessageLogger() {
-	}
-
-	public void setFile(File f) throws FileNotFoundException {
-		fStream = new FileOutputStream(f, true);
-	}
-
-	/** 
-	 * 
-	 * @param topic
-	 * @param msg
-	 * @throws IOException
-	 */
-	@Override
-	public void sendMessage(String topic, message msg) throws IOException {
-		logMsg(msg);
-	}
-
-	/**
-	 * @param topic
-	 * @param msgs
-	 * @throws IOException
-	 */
-	@Override
-	public void sendMessages(String topic, List<? extends message> msgs) throws IOException {
-		for (message m : msgs) {
-			logMsg(m);
-		}
-	}
-
-	/**
-	 * @param topic
-	 * @param kms
-	 * @throws IOException
-	 */
-	@Override
-	public void sendBatchMessage(String topic, ArrayList<KeyedMessage<String, String>> kms) throws
-
-	IOException {
-	}
-
-	private FileOutputStream fStream;
-
-	/**
-	 * 
-	 * @param msg
-	 * @throws IOException
-	 */
-	private void logMsg(message msg) throws IOException {
-		String key = msg.getKey();
-		if (key == null)
-			key = "<none>";
-
-		fStream.write('[');
-		fStream.write(key.getBytes());
-		fStream.write("] ".getBytes());
-		fStream.write(msg.getMessage().getBytes());
-		fStream.write('\n');
-	}
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/ApiKeyBean.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/ApiKeyBean.java
deleted file mode 100644
index 43bc584..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/ApiKeyBean.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans;
-
-import java.io.Serializable;
-
-import javax.xml.bind.annotation.XmlRootElement;
-
-import com.att.nsa.drumlin.till.data.uniqueStringGenerator;
-/**
- * 
- * @author author
- *
- */
-@XmlRootElement
-public class ApiKeyBean implements Serializable {
-
-	private static final long serialVersionUID = -8219849086890567740L;
-	
-	private static final String KEY_CHARS = "ABCDEFGHJIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
-	
-	private String email;
-	private String description;
-	/**
-	 * constructor
-	 */
-	public ApiKeyBean() {
-		super();
-	}
-/**
- * 
- * @param email
- * @param description
- */
-	public ApiKeyBean(String email, String description) {
-		super();
-		this.email = email;
-		this.description = description;
-	}
-
-	public String getEmail() {
-		return email;
-	}
-
-	public void setEmail(String email) {
-		this.email = email;
-	}
-
-	public String getDescription() {
-		return description;
-	}
-
-	public void setDescription(String description) {
-		this.description = description;
-	}
-
-	public String getKey() {
-		return generateKey(16);
-	}
-
-	public String getSharedSecret() {
-		return generateKey(24);
-	}
-	
-	private static String generateKey ( int length  ) {
-		return uniqueStringGenerator.createKeyUsingAlphabet ( KEY_CHARS, length );
-	}
-	
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPCambriaLimiter.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPCambriaLimiter.java
deleted file mode 100644
index f4855b9..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPCambriaLimiter.java
+++ /dev/null
@@ -1,227 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans;
-
-import java.util.HashMap;
-import java.util.concurrent.TimeUnit;
-
-//import org.slf4j.Logger;
-//import org.slf4j.LoggerFactory;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants.CambriaConstants;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception.DMaaPResponseCode;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception.ErrorResponse;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.beans.factory.annotation.Qualifier;
-import org.springframework.stereotype.Component;
-
-import com.att.nsa.drumlin.service.standards.HttpStatusCodes;
-import com.att.nsa.drumlin.till.nv.rrNvReadable;
-import com.att.nsa.drumlin.till.nv.rrNvReadable.invalidSettingValue;
-import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
-import com.att.nsa.metrics.impl.CdmRateTicker;
-
-/**
- * class provide rate information
- * 
- * @author author
- *
- */
-@Component
-public class DMaaPCambriaLimiter {
-	/**
-	 * constructor initializes
-	 * 
-	 * @param settings
-	 * @throws missingReqdSetting
-	 * @throws invalidSettingValue
-	 */
-	@Autowired
-	public DMaaPCambriaLimiter(@Qualifier("propertyReader") rrNvReadable settings)
-			throws missingReqdSetting, invalidSettingValue {
-		fRateInfo = new HashMap<String, RateInfo>();
-		fMaxEmptyPollsPerMinute = settings.getDouble(CambriaConstants.kSetting_MaxEmptyPollsPerMinute,
-				CambriaConstants.kDefault_MaxEmptyPollsPerMinute);
-		fWindowLengthMins = settings.getInt(CambriaConstants.kSetting_RateLimitWindowLength,
-				CambriaConstants.kDefault_RateLimitWindowLength);
-		fSleepMs = settings.getLong(CambriaConstants.kSetting_MaxEmptyPollsPerMinute,
-				CambriaConstants.kDefault_SleepMsOnRateLimit);
-	}
-
-	/**
-	 * static method provide the sleep time
-	 * 
-	 * @param ratePerMinute
-	 * @return
-	 */
-	public static long getSleepMsForRate(double ratePerMinute) {
-		if (ratePerMinute <= 0.0)
-			return 0;
-		return Math.max(1000, Math.round(60 * 1000 / ratePerMinute));
-	}
-
-	/**
-	 * Construct a rate limiter.
-	 * 
-	 * @param maxEmptyPollsPerMinute
-	 *            Pass <= 0 to deactivate rate limiting.
-	 *            @param windowLengthMins
-	 */
-	public DMaaPCambriaLimiter(double maxEmptyPollsPerMinute, int windowLengthMins) {
-		this(maxEmptyPollsPerMinute, windowLengthMins, getSleepMsForRate(maxEmptyPollsPerMinute));
-	}
-
-	/**
-	 * Construct a rate limiter
-	 * 
-	 * @param maxEmptyPollsPerMinute
-	 *            Pass <= 0 to deactivate rate limiting.
-	 * @param sleepMs
-	 * @param windowLengthMins
-	 */
-	public DMaaPCambriaLimiter(double maxEmptyPollsPerMinute, int windowLengthMins, long sleepMs) {
-		fRateInfo = new HashMap<String, RateInfo>();
-		fMaxEmptyPollsPerMinute = Math.max(0, maxEmptyPollsPerMinute);
-		fWindowLengthMins = windowLengthMins;
-		fSleepMs = Math.max(0, sleepMs);
-	}
-
-	/**
-	 * Tell the rate limiter about a call to a topic/group/id. If the rate is
-	 * too high, this call delays its return and throws an exception.
-	 * 
-	 * @param topic
-	 * @param consumerGroup
-	 * @param clientId
-	 * @throws CambriaApiException
-	 */
-	public void onCall(String topic, String consumerGroup, String clientId) throws CambriaApiException {
-		// do nothing if rate is configured 0 or less
-		if (fMaxEmptyPollsPerMinute <= 0) {
-			return;
-		}
-
-		// setup rate info for this tuple
-		final RateInfo ri = getRateInfo(topic, consumerGroup, clientId);
-
-		final double rate = ri.onCall();
-		log.info(ri.getLabel() + ": " + rate + " empty replies/minute.");
-
-		if (rate > fMaxEmptyPollsPerMinute) {
-			try {
-				log.warn(ri.getLabel() + ": " + rate + " empty replies/minute, limit is " + fMaxEmptyPollsPerMinute
-						+ ".");
-				if (fSleepMs > 0) {
-					log.warn(ri.getLabel() + ": " + "Slowing response with " + fSleepMs
-							+ " ms sleep, then responding in error.");
-					Thread.sleep(fSleepMs);
-				} else {
-					log.info(ri.getLabel() + ": " + "No sleep configured, just throwing error.");
-				}
-			} catch (InterruptedException e) {
-				// ignore
-			}
-			ErrorResponse errRes = new ErrorResponse(HttpStatusCodes.k429_tooManyRequests, 
-					DMaaPResponseCode.TOO_MANY_REQUESTS.getResponseCode(), 
-					"This client is making too many requests. Please use a long poll "
-							+ "setting to decrease the number of requests that result in empty responses. ");
-			log.info(errRes.toString());
-			throw new CambriaApiException(errRes);
-		}
-	}
-
-	/**
-	 * 
-	 * @param topic
-	 * @param consumerGroup
-	 * @param clientId
-	 * @param sentCount
-	 */
-	public void onSend(String topic, String consumerGroup, String clientId, long sentCount) {
-		// check for good replies
-		if (sentCount > 0) {
-			// that was a good send, reset the metric
-			getRateInfo(topic, consumerGroup, clientId).reset();
-		}
-	}
-
-	private static class RateInfo {
-		/**
-		 * constructor initialzes
-		 * 
-		 * @param label
-		 * @param windowLengthMinutes
-		 */
-		public RateInfo(String label, int windowLengthMinutes) {
-			fLabel = label;
-			fCallRateSinceLastMsgSend = new CdmRateTicker("Call rate since last msg send", 1, TimeUnit.MINUTES,
-					windowLengthMinutes, TimeUnit.MINUTES);
-		}
-
-		public String getLabel() {
-			return fLabel;
-		}
-
-		/**
-		 * CdmRateTicker is reset
-		 */
-		public void reset() {
-			fCallRateSinceLastMsgSend.reset();
-		}
-
-		/**
-		 * 
-		 * @return
-		 */
-		public double onCall() {
-			fCallRateSinceLastMsgSend.tick();
-			return fCallRateSinceLastMsgSend.getRate();
-		}
-
-		private final String fLabel;
-		private final CdmRateTicker fCallRateSinceLastMsgSend;
-	}
-
-	private final HashMap<String, RateInfo> fRateInfo;
-	private final double fMaxEmptyPollsPerMinute;
-	private final int fWindowLengthMins;
-	private final long fSleepMs;
-	//private static final Logger log = LoggerFactory.getLogger(DMaaPCambriaLimiter.class);
-	private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPCambriaLimiter.class);
-	private RateInfo getRateInfo(String topic, String consumerGroup, String clientId) {
-		final String key = makeKey(topic, consumerGroup, clientId);
-		RateInfo ri = fRateInfo.get(key);
-		if (ri == null) {
-			ri = new RateInfo(key, fWindowLengthMins);
-			fRateInfo.put(key, ri);
-		}
-		return ri;
-	}
-
-	private String makeKey(String topic, String group, String id) {
-		return topic + "::" + group + "::" + id;
-	}
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPContext.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPContext.java
deleted file mode 100644
index 5f132b7..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPContext.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans;
-
-import java.text.SimpleDateFormat;
-import java.util.Date;
-
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import javax.servlet.http.HttpSession;
-
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.ConfigurationReader;
-
-/**
- * DMaaPContext provide and maintain all the configurations , Http request/response
- * Session and consumer Request Time
- * @author author
- *
- */
-public class DMaaPContext {
-
-    private ConfigurationReader configReader;
-    private HttpServletRequest request;
-    private HttpServletResponse response;
-    private HttpSession session;
-    private String consumerRequestTime;
-    static int i=0;
-    
-    public synchronized static long getBatchID() {
-    	try{
-    		final long metricsSendTime = System.currentTimeMillis();
-    		final Date d = new Date(metricsSendTime);
-    		final String text = new SimpleDateFormat("ddMMyyyyHHmmss").format(d);
-    		long dt= Long.valueOf(text)+i;
-    		i++;
-    		return dt;
-    	}
-    	catch(NumberFormatException ex){
-    		return 0;
-    	}
-    }
-    
-    public HttpServletRequest getRequest() {
-        return request;
-    }
-
-    public void setRequest(HttpServletRequest request) {
-        this.request = request;
-    }
-
-    public HttpServletResponse getResponse() {
-        return response;
-    }
-
-    public void setResponse(HttpServletResponse response) {
-        this.response = response;
-    }
-
-    public HttpSession getSession() {
-        this.session = request.getSession();
-        return session;
-    }
-
-    public void setSession(HttpSession session) {
-        this.session = session;
-    }
-
-    public ConfigurationReader getConfigReader() {
-        return configReader;
-    }
-
-    public void setConfigReader(ConfigurationReader configReader) {
-        this.configReader = configReader;
-    }
-
-    public String getConsumerRequestTime() {
-        return consumerRequestTime;
-    }
-
-    public void setConsumerRequestTime(String consumerRequestTime) {
-        this.consumerRequestTime = consumerRequestTime;
-    }
-    
-    
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPKafkaConsumerFactory.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPKafkaConsumerFactory.java
deleted file mode 100644
index 1b62ec8..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPKafkaConsumerFactory.java
+++ /dev/null
@@ -1,320 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans;
-
-import java.net.InetAddress;
-import java.net.UnknownHostException;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.Properties;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.curator.framework.CuratorFramework;
-import org.apache.curator.framework.recipes.locks.InterProcessMutex;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Consumer;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.ConsumerFactory;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.MetricsSet;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.kafka.KafkaConsumer;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.kafka.KafkaConsumerCache;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.kafka.KafkaConsumerCache.KafkaConsumerCacheException;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants.CambriaConstants;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.ConfigurationReader;
-
-//import org.slf4j.Logger;
-//import org.slf4j.LoggerFactory;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import org.springframework.beans.factory.annotation.Qualifier;
-
-import com.att.nsa.drumlin.till.nv.rrNvReadable;
-import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
-import com.att.ajsc.filemonitor.AJSCPropertiesMap;
-import kafka.consumer.ConsumerConfig;
-import kafka.javaapi.consumer.ConsumerConnector;
-
-/**
- * @author author
- *
- */
-public class DMaaPKafkaConsumerFactory implements ConsumerFactory {
-
-	//private static final Logger log = LoggerFactory			.getLogger(DMaaPKafkaConsumerFactory.class);
-	private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPKafkaConsumerFactory.class);
-	/**
-	 * constructor initialization
-	 * 
-	 * @param settings
-	 * @param metrics
-	 * @param curator
-	 * @throws missingReqdSetting
-	 * @throws KafkaConsumerCacheException
-	 * @throws UnknownHostException
-	 */
-	public DMaaPKafkaConsumerFactory(
-			@Qualifier("propertyReader") rrNvReadable settings,
-			@Qualifier("dMaaPMetricsSet") MetricsSet metrics,
-			@Qualifier("curator") CuratorFramework curator)
-			throws missingReqdSetting, KafkaConsumerCacheException,
-			UnknownHostException {
-		/*final String apiNodeId = settings.getString(
-				CambriaConstants.kSetting_ApiNodeIdentifier,
-				InetAddress.getLocalHost().getCanonicalHostName()
-						+ ":"
-						+ settings.getInt(CambriaConstants.kSetting_Port,
-								CambriaConstants.kDefault_Port));*/
-		 String apiNodeId = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
-		CambriaConstants.kSetting_ApiNodeIdentifier);
-		if (apiNodeId == null){
-			
-			apiNodeId=InetAddress.getLocalHost().getCanonicalHostName()
-			+ ":"
-			+ settings.getInt(CambriaConstants.kSetting_Port,
-					CambriaConstants.kDefault_Port);
-		}
-		
-		log.info("This Cambria API Node identifies itself as [" + apiNodeId
-				+ "].");
-		final String mode = CambriaConstants.DMAAP;
-		/*fSettings = settings;
-		fZooKeeper = fSettings.getString(kSettings_KafkaZookeeper, settings
-				.getString(CambriaConstants.kSetting_ZkConfigDbServers,
-						CambriaConstants.kDefault_ZkConfigDbServers));*/
-
-		String strkSettings_KafkaZookeeper = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSettings_KafkaZookeeper);
-		if(null==strkSettings_KafkaZookeeper){
-			 strkSettings_KafkaZookeeper = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kSetting_ZkConfigDbServers);
-			if (null==strkSettings_KafkaZookeeper) strkSettings_KafkaZookeeper = CambriaConstants.kDefault_ZkConfigDbServers;
-			
-		}
-		fZooKeeper=  strkSettings_KafkaZookeeper;
-		
-		//final boolean isCacheEnabled = fSettings.getBoolean(
-			//	kSetting_EnableCache, kDefault_IsCacheEnabled);
-		boolean kSetting_EnableCache= kDefault_IsCacheEnabled;
-		String strkSetting_EnableCache = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_EnableCache+"");
-		if(null!=strkSetting_EnableCache)kSetting_EnableCache=Boolean.parseBoolean(strkSetting_EnableCache);
-			
-				final boolean isCacheEnabled = kSetting_EnableCache;
-				
-				
-		fCache = (isCacheEnabled) ? new KafkaConsumerCache(apiNodeId, 
-				metrics) : null;
-		if (fCache != null) {
-			fCache.startCache(mode, curator);
-		}
-	}
-
-	@Override
-	public Consumer getConsumerFor(String topic, String consumerGroupName,
-			String consumerId, int timeoutMs) throws UnavailableException {
-		KafkaConsumer kc;
-
-		try {
-			kc = (fCache != null) ? fCache.getConsumerFor(topic,
-					consumerGroupName, consumerId) : null;
-		} catch (KafkaConsumerCacheException e) {
-			throw new UnavailableException(e);
-		}
-
-		if (kc == null) {
-			
-			final InterProcessMutex ipLock = new InterProcessMutex( ConfigurationReader.getCurator(), "/consumerFactory/" + topic + "/" + consumerGroupName + "/" + consumerId);
-//			final InterProcessMutex fLock = new InterProcessMutex(
-//					ConfigurationReader.getCurator(), "/consumerFactory/"
-//							+ topic + "/" + consumerGroupName + "/"
-//							+ consumerId);
-			boolean locked = false;
-			try {
-			
-				locked = ipLock.acquire(30, TimeUnit.SECONDS);
-				if (!locked) {
-					// FIXME: this seems to cause trouble in some cases. This exception
-					// gets thrown routinely. Possibly a consumer trying multiple servers
-					// at once, producing a never-ending cycle of overlapping locks?
-					// The problem is that it throws and winds up sending a 503 to the
-					// client, which would be incorrect if the client is causing trouble
-					// by switching back and forth.
-					
-					throw new UnavailableException("Could not acquire lock in order to create (topic, group, consumer) = " + "(" + topic + ", " + consumerGroupName + ", " + consumerId + ")");
-				}
-				
-//				if (!fLock.acquire(30, TimeUnit.SECONDS)) {
-//					throw new UnavailableException(
-//							"Could not acquire lock in order to create (topic, group, consumer) = "
-//									+ "(" + topic + ", " + consumerGroupName
-//									+ ", " + consumerId + ")");
-//				}
-
-				fCache.signalOwnership(topic, consumerGroupName, consumerId);
-
-				log.info("Creating Kafka consumer for group ["
-						+ consumerGroupName + "], consumer [" + consumerId
-						+ "], on topic [" + topic + "].");
-
-				final String fakeGroupName = consumerGroupName + "--" + topic;
-
-				final ConsumerConfig ccc = createConsumerConfig(fakeGroupName,
-						consumerId);
-				final ConsumerConnector cc = kafka.consumer.Consumer
-						.createJavaConsumerConnector(ccc);
-				kc = new KafkaConsumer(topic, consumerGroupName, consumerId, cc);
-
-				if (fCache != null) {
-					fCache.putConsumerFor(topic, consumerGroupName, consumerId,
-							kc);
-				}
-			} catch (org.I0Itec.zkclient.exception.ZkTimeoutException x) {
-				log.warn("Kafka consumer couldn't connect to ZK.");
-				throw new UnavailableException("Couldn't connect to ZK.");
-			} catch (KafkaConsumerCacheException e) {
-				log.warn("Failed to cache consumer (this may have performance implications): "
-						+ e.getMessage());
-			} catch (Exception e) {
-				throw new UnavailableException(
-						"Error while acquiring consumer factory lock", e);
-			} finally {
-				if ( locked )
-				{
-					try {
-						ipLock.release();
-					} catch (Exception e) {
-						throw new UnavailableException("Error while releasing consumer factory lock", e);
-					}
-				}	
-			}
-		}
-
-		return kc;
-	}
-
-	@Override
-	public synchronized void destroyConsumer(String topic,
-			String consumerGroup, String clientId) {
-		if (fCache != null) {
-			fCache.dropConsumer(topic, consumerGroup, clientId);
-		}
-	}
-
-	@Override
-	public synchronized Collection<? extends Consumer> getConsumers() {
-		return fCache.getConsumers();
-	}
-
-	@Override
-	public synchronized void dropCache() {
-		fCache.dropAllConsumers();
-	}
-
-	private ConsumerConfig createConsumerConfig(String groupId,
-			String consumerId) {
-		final Properties props = new Properties();
-		props.put("zookeeper.connect", fZooKeeper);
-		props.put("group.id", groupId);
-		props.put("consumer.id", consumerId);
-		//props.put("auto.commit.enable", "false");
-		// additional settings: start with our defaults, then pull in configured
-		// overrides
-		props.putAll(KafkaInternalDefaults);
-		for (String key : KafkaConsumerKeys) {
-			transferSettingIfProvided(props, key, "kafka");
-		}
-
-		return new ConsumerConfig(props);
-	}
-
-	//private final rrNvReadable fSettings;
-	private final KafkaConsumerCache fCache;
-
-	private String fZooKeeper;
-
-	private static final String kSettings_KafkaZookeeper = "kafka.client.zookeeper";
-
-	private static final HashMap<String, String> KafkaInternalDefaults = new HashMap<String, String>();
-
-	/**
-	 * putting values in hashmap like consumer timeout, zookeeper time out, etc
-	 * 
-	 * @param setting
-	 */
-	public static void populateKafkaInternalDefaultsMap() {
-			//@Qualifier("propertyReader") rrNvReadable setting) {
-		try {
-			
-			HashMap<String, String> map1= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperties(CambriaConstants.msgRtr_prop);
-        				
-			KafkaInternalDefaults.put("consumer.timeout.ms",
-							//	AJSCPropertiesMap.get(CambriaConstants.msgRtr_prop, "consumer.timeout.ms"));
-			map1.get( "consumer.timeout.ms"));
-			
-			KafkaInternalDefaults.put("zookeeper.connection.timeout.ms",
-					//AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "zookeeper.connection.timeout.ms"));
-					map1.get("zookeeper.connection.timeout.ms"));
-			KafkaInternalDefaults.put("zookeeper.session.timeout.ms",
-					//AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "zookeeper.session.timeout.ms"));
-			map1.get("zookeeper.session.timeout.ms"));
-			KafkaInternalDefaults.put("zookeeper.sync.time.ms",
-				//	AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "zookeeper.sync.time.ms"));
-			map1.get( "zookeeper.sync.time.ms"));
-			KafkaInternalDefaults.put("auto.commit.interval.ms",
-					//AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "auto.commit.interval.ms"));
-			map1.get( "auto.commit.interval.ms"));
-			KafkaInternalDefaults.put("fetch.message.max.bytes",
-					//AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "fetch.message.max.bytes"));
-			map1.get("fetch.message.max.bytes"));
-			KafkaInternalDefaults.put("auto.commit.enable",
-			//		AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "auto.commit.enable"));
-			map1.get("auto.commit.enable"));
-		} catch (Exception e) {
-			log.error("Failed to load Kafka Internal Properties.", e);
-		}
-	}
-
-	private static final String KafkaConsumerKeys[] = { "socket.timeout.ms",
-			"socket.receive.buffer.bytes", "fetch.message.max.bytes",
-			"auto.commit.interval.ms", "queued.max.message.chunks",
-			"rebalance.max.retries", "fetch.min.bytes", "fetch.wait.max.bytes",
-			"rebalance.backoff.ms", "refresh.leader.backoff.ms",
-			"auto.offset.reset", "consumer.timeout.ms",
-			"zookeeper.session.timeout.ms", "zookeeper.connection.timeout.ms",
-			"zookeeper.sync.time.ms" };
-
-	private static String makeLongKey(String key, String prefix) {
-		return prefix + "." + key;
-	}
-
-	private void transferSettingIfProvided(Properties target, String key,
-			String prefix) {
-		String keyVal= AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,makeLongKey(key, prefix));
-	
-	//	if (fSettings.hasValueFor(makeLongKey(key, prefix))) {
-		if (null!=keyVal) {
-	//		final String val = fSettings
-		//			.getString(makeLongKey(key, prefix), "");
-			log.info("Setting [" + key + "] to " + keyVal + ".");
-			target.put(key, keyVal);
-		}
-	}
-
-	}
-
-
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPKafkaMetaBroker.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPKafkaMetaBroker.java
deleted file mode 100644
index aad992c..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPKafkaMetaBroker.java
+++ /dev/null
@@ -1,462 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans;
-
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Properties;
-import java.util.Set;
-import java.util.TreeSet;
-
-import org.I0Itec.zkclient.ZkClient;
-import org.I0Itec.zkclient.exception.ZkNoNodeException;
-//import org.apache.log4-j.Logger;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-
-import org.json.JSONArray;
-import org.json.JSONObject;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metabroker.Broker;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metabroker.Topic;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.ConfigurationReader;
-import org.springframework.beans.factory.annotation.Qualifier;
-
-import com.att.nsa.configs.ConfigDb;
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.configs.ConfigPath;
-import com.att.nsa.drumlin.service.standards.HttpStatusCodes;
-import com.att.nsa.drumlin.till.nv.rrNvReadable;
-import com.att.nsa.security.NsaAcl;
-import com.att.nsa.security.NsaAclUtils;
-import com.att.nsa.security.NsaApiKey;
-
-import kafka.admin.AdminUtils;
-import kafka.utils.ZKStringSerializer$;
-
-/**
- * class performing all topic operations
- * 
- * @author author
- *
- */
-
-public class DMaaPKafkaMetaBroker implements Broker {
-
-	//private static final Logger log = Logger.getLogger(DMaaPKafkaMetaBroker.class);
-	private static final EELFLogger log = EELFManager.getInstance().getLogger(ConfigurationReader.class);
-	
-
-	/**
-	 * DMaaPKafkaMetaBroker constructor initializing
-	 * 
-	 * @param settings
-	 * @param zk
-	 * @param configDb
-	 */
-	public DMaaPKafkaMetaBroker(@Qualifier("propertyReader") rrNvReadable settings,
-			@Qualifier("dMaaPZkClient") ZkClient zk, @Qualifier("dMaaPZkConfigDb") ConfigDb configDb) {
-		//fSettings = settings;
-		fZk = zk;
-		fCambriaConfig = configDb;
-		fBaseTopicData = configDb.parse("/topics");
-	}
-
-	@Override
-	public List<Topic> getAllTopics() throws ConfigDbException {
-		log.info("Retrieving list of all the topics.");
-		final LinkedList<Topic> result = new LinkedList<Topic>();
-		try {
-			log.info("Retrieving all topics from root: " + zkTopicsRoot);
-			final List<String> topics = fZk.getChildren(zkTopicsRoot);
-			for (String topic : topics) {
-				result.add(new KafkaTopic(topic, fCambriaConfig, fBaseTopicData));
-			}
-
-			JSONObject dataObj = new JSONObject();
-			dataObj.put("topics", new JSONObject());
-
-			for (String topic : topics) {
-				dataObj.getJSONObject("topics").put(topic, new JSONObject());
-			}
-		} catch (ZkNoNodeException excp) {
-			// very fresh kafka doesn't have any topics or a topics node
-			log.error("ZK doesn't have a Kakfa topics node at " + zkTopicsRoot, excp);
-		}
-		return result;
-	}
-
-	@Override
-	public Topic getTopic(String topic) throws ConfigDbException {
-		if (fZk.exists(zkTopicsRoot + "/" + topic)) {
-			return getKafkaTopicConfig(fCambriaConfig, fBaseTopicData, topic);
-		}
-		// else: no such topic in kafka
-		return null;
-	}
-
-	/**
-	 * static method get KafkaTopic object
-	 * 
-	 * @param db
-	 * @param base
-	 * @param topic
-	 * @return
-	 * @throws ConfigDbException
-	 */
-	public static KafkaTopic getKafkaTopicConfig(ConfigDb db, ConfigPath base, String topic) throws ConfigDbException {
-		return new KafkaTopic(topic, db, base);
-	}
-
-	/**
-	 * creating topic
-	 */
-	@Override
-	public Topic createTopic(String topic, String desc, String ownerApiKey, int partitions, int replicas,
-			boolean transactionEnabled) throws TopicExistsException, CambriaApiException {
-		log.info("Creating topic: " + topic);
-		try {
-			log.info("Check if topic [" + topic + "] exist.");
-			// first check for existence "our way"
-			final Topic t = getTopic(topic);
-			if (t != null) {
-				log.info("Could not create topic [" + topic + "]. Topic Already exists.");
-				throw new TopicExistsException("Could not create topic [" + topic + "]. Topic Alreay exists.");
-			}
-		} catch (ConfigDbException e1) {
-			log.error("Topic [" + topic + "] could not be created. Couldn't check topic data in config db.", e1);
-			throw new CambriaApiException(HttpStatusCodes.k503_serviceUnavailable,
-					"Couldn't check topic data in config db.");
-		}
-
-		// we only allow 3 replicas. (If we don't test this, we get weird
-		// results from the cluster,
-		// so explicit test and fail.)
-		if (replicas < 1 || replicas > 3) {
-			log.info("Topic [" + topic + "] could not be created. The replica count must be between 1 and 3.");
-			throw new CambriaApiException(HttpStatusCodes.k400_badRequest,
-					"The replica count must be between 1 and 3.");
-		}
-		if (partitions < 1) {
-			log.info("Topic [" + topic + "] could not be created. The partition count must be at least 1.");
-			throw new CambriaApiException(HttpStatusCodes.k400_badRequest, "The partition count must be at least 1.");
-		}
-
-		// create via kafka
-		try {
-			ZkClient zkClient = null;
-			try {
-				log.info("Loading zookeeper client for creating topic.");
-				// FIXME: use of this scala module$ thing is a goofy hack to
-				// make Kafka aware of the
-				// topic creation. (Otherwise, the topic is only partially
-				// created in ZK.)
-				zkClient = new ZkClient(ConfigurationReader.getMainZookeeperConnectionString(), 10000, 10000,
-						ZKStringSerializer$.MODULE$);
-
-				log.info("Zookeeper client loaded successfully. Creating topic.");
-				AdminUtils.createTopic(zkClient, topic, partitions, replicas, new Properties());
-			} catch (kafka.common.TopicExistsException e) {
-				log.error("Topic [" + topic + "] could not be created. " + e.getMessage(), e);
-				throw new TopicExistsException(topic);
-			} catch (ZkNoNodeException e) {
-				log.error("Topic [" + topic + "] could not be created. The Kafka cluster is not setup.", e);
-				// Kafka throws this when the server isn't running (and perhaps
-				// hasn't ever run)
-				throw new CambriaApiException(HttpStatusCodes.k503_serviceUnavailable,
-						"The Kafka cluster is not setup.");
-			} catch (kafka.admin.AdminOperationException e) {
-				// Kafka throws this when the server isn't running (and perhaps
-				// hasn't ever run)
-				log.error("The Kafka cluster can't handle your request. Talk to the administrators: " + e.getMessage(),
-						e);
-				throw new CambriaApiException(HttpStatusCodes.k503_serviceUnavailable,
-						"The Kafka cluster can't handle your request. Talk to the administrators.");
-			} finally {
-				log.info("Closing zookeeper connection.");
-				if (zkClient != null)
-					zkClient.close();
-			}
-
-			log.info("Creating topic entry for topic: " + topic);
-			// underlying Kafka topic created. now setup our API info
-			return createTopicEntry(topic, desc, ownerApiKey, transactionEnabled);
-		} catch (ConfigDbException excp) {
-			log.error("Failed to create topic data. Talk to the administrators: " + excp.getMessage(), excp);
-			throw new CambriaApiException(HttpStatusCodes.k503_serviceUnavailable,
-					"Failed to create topic data. Talk to the administrators.");
-		}
-	}
-
-	@Override
-	public void deleteTopic(String topic) throws CambriaApiException, TopicExistsException {
-		log.info("Deleting topic: " + topic);
-		ZkClient zkClient = null;
-		try {
-			log.info("Loading zookeeper client for topic deletion.");
-			// FIXME: use of this scala module$ thing is a goofy hack to make
-			// Kafka aware of the
-			// topic creation. (Otherwise, the topic is only partially created
-			// in ZK.)
-			zkClient = new ZkClient(ConfigurationReader.getMainZookeeperConnectionString(), 10000, 10000,
-					ZKStringSerializer$.MODULE$);
-
-			log.info("Zookeeper client loaded successfully. Deleting topic.");
-			AdminUtils.deleteTopic(zkClient, topic);
-		} catch (kafka.common.TopicExistsException e) {
-			log.error("Failed to delete topic [" + topic + "]. " + e.getMessage(), e);
-			throw new TopicExistsException(topic);
-		} catch (ZkNoNodeException e) {
-			log.error("Failed to delete topic [" + topic + "]. The Kafka cluster is not setup." + e.getMessage(), e);
-			// Kafka throws this when the server isn't running (and perhaps
-			// hasn't ever run)
-			throw new CambriaApiException(HttpStatusCodes.k503_serviceUnavailable, "The Kafka cluster is not setup.");
-		} catch (kafka.admin.AdminOperationException e) {
-			// Kafka throws this when the server isn't running (and perhaps
-			// hasn't ever run)
-			log.error("The Kafka cluster can't handle your request. Talk to the administrators." + e.getMessage(), e);
-			throw new CambriaApiException(HttpStatusCodes.k503_serviceUnavailable,
-					"The Kafka cluster can't handle your request. Talk to the administrators.");
-		} finally {
-			log.info("Closing zookeeper connection.");
-			if (zkClient != null)
-				zkClient.close();
-		}
-
-		// throw new UnsupportedOperationException ( "We can't programmatically
-		// delete Kafka topics yet." );
-	}
-
-	//private final rrNvReadable fSettings;
-	private final ZkClient fZk;
-	private final ConfigDb fCambriaConfig;
-	private final ConfigPath fBaseTopicData;
-
-	private static final String zkTopicsRoot = "/brokers/topics";
-	private static final JSONObject kEmptyAcl = new JSONObject();
-
-	/**
-	 * method Providing KafkaTopic Object associated with owner and
-	 * transactionenabled or not
-	 * 
-	 * @param name
-	 * @param desc
-	 * @param owner
-	 * @param transactionEnabled
-	 * @return
-	 * @throws ConfigDbException
-	 */
-	public KafkaTopic createTopicEntry(String name, String desc, String owner, boolean transactionEnabled)
-			throws ConfigDbException {
-		return createTopicEntry(fCambriaConfig, fBaseTopicData, name, desc, owner, transactionEnabled);
-	}
-
-	/**
-	 * static method giving kafka topic object
-	 * 
-	 * @param db
-	 * @param basePath
-	 * @param name
-	 * @param desc
-	 * @param owner
-	 * @param transactionEnabled
-	 * @return
-	 * @throws ConfigDbException
-	 */
-	public static KafkaTopic createTopicEntry(ConfigDb db, ConfigPath basePath, String name, String desc, String owner,
-			boolean transactionEnabled) throws ConfigDbException {
-		final JSONObject o = new JSONObject();
-		o.put("owner", owner);
-		o.put("description", desc);
-		o.put("txenabled", transactionEnabled);
-		db.store(basePath.getChild(name), o.toString());
-		return new KafkaTopic(name, db, basePath);
-	}
-
-	/**
-	 * class performing all user opearation like user is eligible to read,
-	 * write. permitting a user to write and read,
-	 * 
-	 * @author author
-	 *
-	 */
-	public static class KafkaTopic implements Topic {
-		/**
-		 * constructor initializes
-		 * 
-		 * @param name
-		 * @param configdb
-		 * @param baseTopic
-		 * @throws ConfigDbException
-		 */
-		public KafkaTopic(String name, ConfigDb configdb, ConfigPath baseTopic) throws ConfigDbException {
-			fName = name;
-			fConfigDb = configdb;
-			fBaseTopicData = baseTopic;
-
-			String data = fConfigDb.load(fBaseTopicData.getChild(fName));
-			if (data == null) {
-				data = "{}";
-			}
-
-			final JSONObject o = new JSONObject(data);
-			fOwner = o.optString("owner", "");
-			fDesc = o.optString("description", "");
-			fTransactionEnabled = o.optBoolean("txenabled", false);// default
-																	// value is
-																	// false
-			// if this topic has an owner, it needs both read/write ACLs. If there's no
-						// owner (or it's empty), null is okay -- this is for existing or implicitly
-						// created topics.
-						JSONObject readers = o.optJSONObject ( "readers" );
-						if ( readers == null && fOwner.length () > 0 ) readers = kEmptyAcl;
-						fReaders = fromJson ( readers );
-
-						JSONObject writers = o.optJSONObject ( "writers" );
-						if ( writers == null && fOwner.length () > 0 ) writers = kEmptyAcl;
-						fWriters = fromJson ( writers );
-		}
-		 private NsaAcl fromJson(JSONObject o) {
-				NsaAcl acl = new NsaAcl();
-				if (o != null) {
-					JSONArray a = o.optJSONArray("allowed");
-					if (a != null) {
-						for (int i = 0; i < a.length(); ++i) {
-							String user = a.getString(i);
-							acl.add(user);
-						}
-					}
-				}
-				return acl;
-			}
-		@Override
-		public String getName() {
-			return fName;
-		}
-
-		@Override
-		public String getOwner() {
-			return fOwner;
-		}
-
-		@Override
-		public String getDescription() {
-			return fDesc;
-		}
-
-		@Override
-		public NsaAcl getReaderAcl() {
-			return fReaders;
-		}
-
-		@Override
-		public NsaAcl getWriterAcl() {
-			return fWriters;
-		}
-
-		@Override
-		public void checkUserRead(NsaApiKey user) throws AccessDeniedException  {
-			NsaAclUtils.checkUserAccess ( fOwner, getReaderAcl(), user );
-		}
-
-		@Override
-		public void checkUserWrite(NsaApiKey user) throws AccessDeniedException  {
-			NsaAclUtils.checkUserAccess ( fOwner, getWriterAcl(), user );
-		}
-
-		@Override
-		public void permitWritesFromUser(String pubId, NsaApiKey asUser)
-				throws ConfigDbException, AccessDeniedException {
-			updateAcl(asUser, false, true, pubId);
-		}
-
-		@Override
-		public void denyWritesFromUser(String pubId, NsaApiKey asUser) throws ConfigDbException, AccessDeniedException {
-			updateAcl(asUser, false, false, pubId);
-		}
-
-		@Override
-		public void permitReadsByUser(String consumerId, NsaApiKey asUser)
-				throws ConfigDbException, AccessDeniedException {
-			updateAcl(asUser, true, true, consumerId);
-		}
-
-		@Override
-		public void denyReadsByUser(String consumerId, NsaApiKey asUser)
-				throws ConfigDbException, AccessDeniedException {
-			updateAcl(asUser, true, false, consumerId);
-		}
-
-		private void updateAcl(NsaApiKey asUser, boolean reader, boolean add, String key)
-				throws ConfigDbException, AccessDeniedException{
-			try
-			{
-				final NsaAcl acl = NsaAclUtils.updateAcl ( this, asUser, key, reader, add );
-	
-				// we have to assume we have current data, or load it again. for the expected use
-				// case, assuming we can overwrite the data is fine.
-				final JSONObject o = new JSONObject ();
-				o.put ( "owner", fOwner );
-				o.put ( "readers", safeSerialize ( reader ? acl : fReaders ) );
-				o.put ( "writers", safeSerialize ( reader ? fWriters : acl ) );
-				fConfigDb.store ( fBaseTopicData.getChild ( fName ), o.toString () );
-				
-				log.info ( "ACL_UPDATE: " + asUser.getKey () + " " + ( add ? "added" : "removed" ) + ( reader?"subscriber":"publisher" ) + " " + key + " on " + fName );
-	
-			}
-			catch ( ConfigDbException x )
-			{
-				throw x;
-			}
-			catch ( AccessDeniedException x )
-			{
-				throw x;
-			}
-			
-		}
-
-		private JSONObject safeSerialize(NsaAcl acl) {
-			return acl == null ? null : acl.serialize();
-		}
-
-		private final String fName;
-		private final ConfigDb fConfigDb;
-		private final ConfigPath fBaseTopicData;
-		private final String fOwner;
-		private final String fDesc;
-		private final NsaAcl fReaders;
-		private final NsaAcl fWriters;
-		private boolean fTransactionEnabled;
-
-		public boolean isTransactionEnabled() {
-			return fTransactionEnabled;
-		}
-
-		@Override
-		public Set<String> getOwners() {
-			final TreeSet<String> owners = new TreeSet<String> ();
-			owners.add ( fOwner );
-			return owners;
-		}
-	}
-
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPMetricsSet.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPMetricsSet.java
deleted file mode 100644
index f98eeee..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPMetricsSet.java
+++ /dev/null
@@ -1,233 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans;
-
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.concurrent.Executors;
-import java.util.concurrent.ScheduledExecutorService;
-import java.util.concurrent.TimeUnit;
-
-import org.onap.dmaap.messagerouter.msgrtr.nsa.apiServer.metrics.cambria.DMaaPMetricsSender;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiVersionInfo;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.MetricsSet;
-
-import com.att.nsa.drumlin.till.nv.rrNvReadable;
-import com.att.nsa.metrics.impl.CdmConstant;
-import com.att.nsa.metrics.impl.CdmCounter;
-import com.att.nsa.metrics.impl.CdmMetricsRegistryImpl;
-import com.att.nsa.metrics.impl.CdmMovingAverage;
-import com.att.nsa.metrics.impl.CdmRateTicker;
-import com.att.nsa.metrics.impl.CdmSimpleMetric;
-import com.att.nsa.metrics.impl.CdmStringConstant;
-import com.att.nsa.metrics.impl.CdmTimeSince;
-
-/*@Component("dMaaPMetricsSet")*/
-/**
- * Metrics related information
- * 
- * @author author
- *
- */
-public class DMaaPMetricsSet extends CdmMetricsRegistryImpl implements MetricsSet {
-
-	private final CdmStringConstant fVersion;
-	private final CdmConstant fStartTime;
-	private final CdmTimeSince fUpTime;
-
-	private final CdmCounter fRecvTotal;
-	private final CdmRateTicker fRecvEpsInstant;
-	private final CdmRateTicker fRecvEpsShort;
-	private final CdmRateTicker fRecvEpsLong;
-
-	private final CdmCounter fSendTotal;
-	private final CdmRateTicker fSendEpsInstant;
-	private final CdmRateTicker fSendEpsShort;
-	private final CdmRateTicker fSendEpsLong;
-
-	private final CdmCounter fKafkaConsumerCacheMiss;
-	private final CdmCounter fKafkaConsumerCacheHit;
-
-	private final CdmCounter fKafkaConsumerClaimed;
-	private final CdmCounter fKafkaConsumerTimeout;
-
-	private final CdmSimpleMetric fFanOutRatio;
-
-	private final HashMap<String, CdmRateTicker> fPathUseRates;
-	private final HashMap<String, CdmMovingAverage> fPathAvgs;
-
-	private rrNvReadable fSettings;
-
-	private final ScheduledExecutorService fScheduler;
-
-	/**
-	 * Constructor initialization
-	 * 
-	 * @param cs
-	 */
-	//public DMaaPMetricsSet() {
-		public DMaaPMetricsSet(rrNvReadable cs) {
-		//fSettings = cs;
-
-		fVersion = new CdmStringConstant("Version " + CambriaApiVersionInfo.getVersion());
-		super.putItem("version", fVersion);
-
-		final long startTime = System.currentTimeMillis();
-		final Date d = new Date(startTime);
-		final String text = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssz").format(d);
-		fStartTime = new CdmConstant(startTime / 1000, "Start Time (epoch); " + text);
-		super.putItem("startTime", fStartTime);
-
-		fUpTime = new CdmTimeSince("seconds since start");
-		super.putItem("upTime", fUpTime);
-
-		fRecvTotal = new CdmCounter("Total events received since start");
-		super.putItem("recvTotalEvents", fRecvTotal);
-
-		fRecvEpsInstant = new CdmRateTicker("recv eps (1 min)", 1, TimeUnit.SECONDS, 1, TimeUnit.MINUTES);
-		super.putItem("recvEpsInstant", fRecvEpsInstant);
-
-		fRecvEpsShort = new CdmRateTicker("recv eps (10 mins)", 1, TimeUnit.SECONDS, 10, TimeUnit.MINUTES);
-		super.putItem("recvEpsShort", fRecvEpsShort);
-
-		fRecvEpsLong = new CdmRateTicker("recv eps (1 hr)", 1, TimeUnit.SECONDS, 1, TimeUnit.HOURS);
-		super.putItem("recvEpsLong", fRecvEpsLong);
-
-		fSendTotal = new CdmCounter("Total events sent since start");
-		super.putItem("sendTotalEvents", fSendTotal);
-
-		fSendEpsInstant = new CdmRateTicker("send eps (1 min)", 1, TimeUnit.SECONDS, 1, TimeUnit.MINUTES);
-		super.putItem("sendEpsInstant", fSendEpsInstant);
-
-		fSendEpsShort = new CdmRateTicker("send eps (10 mins)", 1, TimeUnit.SECONDS, 10, TimeUnit.MINUTES);
-		super.putItem("sendEpsShort", fSendEpsShort);
-
-		fSendEpsLong = new CdmRateTicker("send eps (1 hr)", 1, TimeUnit.SECONDS, 1, TimeUnit.HOURS);
-		super.putItem("sendEpsLong", fSendEpsLong);
-
-		fKafkaConsumerCacheMiss = new CdmCounter("Kafka Consumer Cache Misses");
-		super.putItem("kafkaConsumerCacheMiss", fKafkaConsumerCacheMiss);
-
-		fKafkaConsumerCacheHit = new CdmCounter("Kafka Consumer Cache Hits");
-		super.putItem("kafkaConsumerCacheHit", fKafkaConsumerCacheHit);
-
-		fKafkaConsumerClaimed = new CdmCounter("Kafka Consumers Claimed");
-		super.putItem("kafkaConsumerClaims", fKafkaConsumerClaimed);
-
-		fKafkaConsumerTimeout = new CdmCounter("Kafka Consumers Timedout");
-		super.putItem("kafkaConsumerTimeouts", fKafkaConsumerTimeout);
-
-		// FIXME: CdmLevel is not exactly a great choice
-		fFanOutRatio = new CdmSimpleMetric() {
-			@Override
-			public String getRawValueString() {
-				return getRawValue().toString();
-			}
-
-			@Override
-			public Number getRawValue() {
-				final double s = fSendTotal.getValue();
-				final double r = fRecvTotal.getValue();
-				return r == 0.0 ? 0.0 : s / r;
-			}
-
-			@Override
-			public String summarize() {
-				return getRawValueString() + " sends per recv";
-			}
-
-		};
-		super.putItem("fanOut", fFanOutRatio);
-
-		// these are added to the metrics catalog as they're discovered
-		fPathUseRates = new HashMap<String, CdmRateTicker>();
-		fPathAvgs = new HashMap<String, CdmMovingAverage>();
-
-		fScheduler = Executors.newScheduledThreadPool(1);
-	}
-
-	@Override
-	public void setupCambriaSender() {
-		DMaaPMetricsSender.sendPeriodically(fScheduler, this,  "cambria.apinode.metrics.dmaap");
-	}
-
-	@Override
-	public void onRouteComplete(String name, long durationMs) {
-		CdmRateTicker ticker = fPathUseRates.get(name);
-		if (ticker == null) {
-			ticker = new CdmRateTicker("calls/min on path " + name + "", 1, TimeUnit.MINUTES, 1, TimeUnit.HOURS);
-			fPathUseRates.put(name, ticker);
-			super.putItem("pathUse_" + name, ticker);
-		}
-		ticker.tick();
-
-		CdmMovingAverage durs = fPathAvgs.get(name);
-		if (durs == null) {
-			durs = new CdmMovingAverage("ms avg duration on path " + name + ", last 10 minutes", 10, TimeUnit.MINUTES);
-			fPathAvgs.put(name, durs);
-			super.putItem("pathDurationMs_" + name, durs);
-		}
-		durs.tick(durationMs);
-	}
-
-	@Override
-	public void publishTick(int amount) {
-		if (amount > 0) {
-			fRecvTotal.bumpBy(amount);
-			fRecvEpsInstant.tick(amount);
-			fRecvEpsShort.tick(amount);
-			fRecvEpsLong.tick(amount);
-		}
-	}
-
-	@Override
-	public void consumeTick(int amount) {
-		if (amount > 0) {
-			fSendTotal.bumpBy(amount);
-			fSendEpsInstant.tick(amount);
-			fSendEpsShort.tick(amount);
-			fSendEpsLong.tick(amount);
-		}
-	}
-
-	@Override
-	public void onKafkaConsumerCacheMiss() {
-		fKafkaConsumerCacheMiss.bump();
-	}
-
-	@Override
-	public void onKafkaConsumerCacheHit() {
-		fKafkaConsumerCacheHit.bump();
-	}
-
-	@Override
-	public void onKafkaConsumerClaimed() {
-		fKafkaConsumerClaimed.bump();
-	}
-
-	@Override
-	public void onKafkaConsumerTimeout() {
-		fKafkaConsumerTimeout.bump();
-	}
-
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPNsaApiDb.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPNsaApiDb.java
deleted file mode 100644
index 2826289..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPNsaApiDb.java
+++ /dev/null
@@ -1,139 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans;
-
-import java.security.Key;
-
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants.CambriaConstants;
-//import org.apache.log4-j.Logger;
-import org.springframework.beans.factory.annotation.Autowired;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import com.att.nsa.configs.ConfigDb;
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.configs.confimpl.EncryptingLayer;
-import com.att.nsa.drumlin.till.nv.rrNvReadable;
-import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
-import com.att.nsa.security.db.BaseNsaApiDbImpl;
-import com.att.nsa.security.db.EncryptingApiDbImpl;
-import com.att.nsa.security.db.NsaApiDb;
-import com.att.nsa.security.db.simple.NsaSimpleApiKey;
-import com.att.nsa.security.db.simple.NsaSimpleApiKeyFactory;
-import com.att.nsa.util.rrConvertor;
-
-/**
- * 
- * @author author
- *
- */
-public class DMaaPNsaApiDb {
-	
-	//private rrNvReadable settings;
-	private DMaaPZkConfigDb cdb;
-	
-	//private static final Logger log = Logger
-		//	.getLogger(DMaaPNsaApiDb.class.toString());
-	private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPNsaApiDb.class);
-	
-/**
- * 
- * Constructor initialized
- * @param settings
- * @param cdb
- */
-	@Autowired
-	public DMaaPNsaApiDb(rrNvReadable settings, DMaaPZkConfigDb cdb) {
-		//this.setSettings(settings);
-		this.setCdb(cdb);
-	}
-	/**
-	 * 
-	 * @param settings
-	 * @param cdb
-	 * @return
-	 * @throws ConfigDbException
-	 * @throws missingReqdSetting
-	 */
-	public static NsaApiDb<NsaSimpleApiKey> buildApiKeyDb(
-			rrNvReadable settings, ConfigDb cdb) throws ConfigDbException,
-			missingReqdSetting {
-		// Cambria uses an encrypted api key db
-
-		//final String keyBase64 = settings.getString("cambria.secureConfig.key",			null);
-		final String keyBase64 =com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"cambria.secureConfig.key");
-		
-		
-	//	final String initVectorBase64 = settings.getString(				"cambria.secureConfig.iv", null);
-	final String initVectorBase64 =com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"cambria.secureConfig.iv");
-		// if neither value was provided, don't encrypt api key db
-		if (keyBase64 == null && initVectorBase64 == null) {
-			log.info("This server is configured to use an unencrypted API key database. See the settings documentation.");
-			return new BaseNsaApiDbImpl<NsaSimpleApiKey>(cdb,
-					new NsaSimpleApiKeyFactory());
-		} else if (keyBase64 == null) {
-			// neither or both, otherwise something's goofed
-			throw new missingReqdSetting("cambria.secureConfig.key");
-		} else if (initVectorBase64 == null) {
-			// neither or both, otherwise something's goofed
-			throw new missingReqdSetting("cambria.secureConfig.iv");
-		} else {
-			log.info("This server is configured to use an encrypted API key database.");
-			final Key key = EncryptingLayer.readSecretKey(keyBase64);
-			final byte[] iv = rrConvertor.base64Decode(initVectorBase64);
-			return new EncryptingApiDbImpl<NsaSimpleApiKey>(cdb,
-					new NsaSimpleApiKeyFactory(), key, iv);
-		}
-	}
-
-	/**
-	 * @return
-	 * returns settings
-	 */
-/*	public rrNvReadable getSettings() {
-		return settings;
-	}*/
-
-	/**
-	 * @param settings
-	 * set settings
-	 */
-	/*public void setSettings(rrNvReadable settings) {
-		this.settings = settings;
-	}*/
-
-	 /**
-	 * @return
-	 * returns cbd
-	 */
-	public DMaaPZkConfigDb getCdb() {
-		return cdb;
-	}
-	/**
-	 * @param cdb
-	 * set cdb
-	 */
-	public void setCdb(DMaaPZkConfigDb cdb) {
-		this.cdb = cdb;
-	}
-
-
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPZkClient.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPZkClient.java
deleted file mode 100644
index 45af1f6..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPZkClient.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans;
-
-import org.I0Itec.zkclient.ZkClient;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.ConfigurationReader;
-import org.springframework.beans.factory.annotation.Qualifier;
-
-import com.att.nsa.drumlin.till.nv.rrNvReadable;
-
-/**
- * Created for Zookeeper client which will read configuration and settings parameter
- * @author author
- *
- */
-public class DMaaPZkClient extends ZkClient {
-
-	/**
-	 * This constructor will get the settings value from rrNvReadable
-     * and ConfigurationReader's zookeeper connection
-	 * @param settings
-	 */
-	public DMaaPZkClient(@Qualifier("propertyReader") rrNvReadable settings) {
-		super(ConfigurationReader.getMainZookeeperConnectionString());
-	}
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPZkConfigDb.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPZkConfigDb.java
deleted file mode 100644
index db59224..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/DMaaPZkConfigDb.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans;
-
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants.CambriaConstants;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.ConfigurationReader;
-import org.springframework.beans.factory.annotation.Qualifier;
-
-import com.att.nsa.configs.confimpl.ZkConfigDb;
-import com.att.nsa.drumlin.till.nv.rrNvReadable;
-//import com.att.nsa.configs.confimpl.ZkConfigDb;
-/**
- * Provide the zookeeper config db connection 
- * @author author
- *
- */
-public class DMaaPZkConfigDb extends ZkConfigDb {
-	/**
-	 * This Constructor will provide the configuration details from the property reader
-     * and DMaaPZkClient
-	 * @param zk
-	 * @param settings
-	 */
-	public DMaaPZkConfigDb(@Qualifier("dMaaPZkClient") DMaaPZkClient zk,
-			@Qualifier("propertyReader") rrNvReadable settings) {
-		
-		//super(com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kSetting_ZkConfigDbRoot)==null?CambriaConstants.kDefault_ZkConfigDbRoot:com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kSetting_ZkConfigDbRoot));
-		super(ConfigurationReader.getMainZookeeperConnectionString(),ConfigurationReader.getMainZookeeperConnectionSRoot());
-		
-	}
-	
-	
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/LogDetails.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/LogDetails.java
deleted file mode 100644
index f28e9ed..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/LogDetails.java
+++ /dev/null
@@ -1,214 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-/**
- * 
- */
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans;
-
-import java.util.Date;
-
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants.CambriaConstants;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.Utils;
-
-/**
- * @author author
- *
- */
-
-public class LogDetails {
-	
-	private String publisherId;
-	private String topicId;
-	private String subscriberGroupId;
-	private String subscriberId;
-	private String publisherIp;
-	private String messageBatchId;
-	private String messageSequence;
-	private String messageTimestamp;
-	private String consumeTimestamp;
-	private String transactionIdTs;	
-	private String serverIp;
-	
-	private long messageLengthInBytes; 
-	private long totalMessageCount;
-	
-	private boolean transactionEnabled;
-	/**
-	 * This is for transaction enabled logging details
-	 *
-	 */
-	public LogDetails() {
-		super();
-	}
-
-	public String getTransactionId() {
-		StringBuilder transactionId = new StringBuilder();
-		transactionId.append(transactionIdTs);
-		transactionId.append(CambriaConstants.TRANSACTION_ID_SEPARATOR);
-		transactionId.append(publisherIp);
-		transactionId.append(CambriaConstants.TRANSACTION_ID_SEPARATOR);
-		transactionId.append(messageBatchId);
-		transactionId.append(CambriaConstants.TRANSACTION_ID_SEPARATOR);
-		transactionId.append(messageSequence);
-
-		return transactionId.toString();
-	}
-
-	public String getPublisherId() {
-		return publisherId;
-	}
-
-	public void setPublisherId(String publisherId) {
-		this.publisherId = publisherId;
-	}
-
-	public String getTopicId() {
-		return topicId;
-	}
-
-	public void setTopicId(String topicId) {
-		this.topicId = topicId;
-	}
-
-	public String getSubscriberGroupId() {
-		return subscriberGroupId;
-	}
-
-	public void setSubscriberGroupId(String subscriberGroupId) {
-		this.subscriberGroupId = subscriberGroupId;
-	}
-
-	public String getSubscriberId() {
-		return subscriberId;
-	}
-
-	public void setSubscriberId(String subscriberId) {
-		this.subscriberId = subscriberId;
-	}
-
-	public String getPublisherIp() {
-		return publisherIp;
-	}
-
-	public void setPublisherIp(String publisherIp) {
-		this.publisherIp = publisherIp;
-	}
-
-	public String getMessageBatchId() {
-		return messageBatchId;
-	}
-
-	public void setMessageBatchId(Long messageBatchId) {
-		this.messageBatchId = Utils.getFromattedBatchSequenceId(messageBatchId);
-	}
-
-	public String getMessageSequence() {
-		return messageSequence;
-	}
-
-	public void setMessageSequence(String messageSequence) {
-		this.messageSequence = messageSequence;
-	}
-
-	public String getMessageTimestamp() {
-		return messageTimestamp;
-	}
-
-	public void setMessageTimestamp(String messageTimestamp) {
-		this.messageTimestamp = messageTimestamp;
-	}
-
-	public String getPublishTimestamp() {
-		return Utils.getFormattedDate(new Date());
-	}
-
-	public String getConsumeTimestamp() {
-		return consumeTimestamp;
-	}
-
-	public void setConsumeTimestamp(String consumeTimestamp) {
-		this.consumeTimestamp = consumeTimestamp;
-	}
-
-	public long getMessageLengthInBytes() {
-		return messageLengthInBytes;
-	}
-
-	public void setMessageLengthInBytes(long messageLengthInBytes) {
-		this.messageLengthInBytes = messageLengthInBytes;
-	}
-
-	public long getTotalMessageCount() {
-		return totalMessageCount;
-	}
-
-	public void setTotalMessageCount(long totalMessageCount) {
-		this.totalMessageCount = totalMessageCount;
-	}
-
-	public boolean isTransactionEnabled() {
-		return transactionEnabled;
-	}
-
-	public void setTransactionEnabled(boolean transactionEnabled) {
-		this.transactionEnabled = transactionEnabled;
-	}
-
-	public String getTransactionIdTs() {
-		return transactionIdTs;
-	}
-
-	public void setTransactionIdTs(String transactionIdTs) {
-		this.transactionIdTs = transactionIdTs;
-	}
-
-	public String getPublisherLogDetails() {
-		
-			StringBuilder buffer = new StringBuilder();
-			buffer.append("[publisherId=" + publisherId);
-			buffer.append(", topicId=" + topicId);
-			buffer.append(", messageTimestamp=" + messageTimestamp);
-			buffer.append(", publisherIp=" + publisherIp);
-			buffer.append(", messageBatchId=" + messageBatchId);
-			buffer.append(", messageSequence=" + messageSequence );
-			buffer.append(", messageLengthInBytes=" + messageLengthInBytes);
-			buffer.append(", transactionEnabled=" + transactionEnabled);
-			buffer.append(", transactionId=" + getTransactionId());
-			buffer.append(", publishTimestamp=" + getPublishTimestamp());		
-			buffer.append(", serverIp=" + getServerIp()+"]");
-		return buffer.toString();
-		
-	}
-
-	public String getServerIp() {
-		return serverIp;
-	}
-
-	public void setServerIp(String serverIp) {
-		this.serverIp = serverIp;
-	}
-
-	public void setMessageBatchId(String messageBatchId) {
-		this.messageBatchId = messageBatchId;
-	}
-	
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/TopicBean.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/TopicBean.java
deleted file mode 100644
index 9ff8a32..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/beans/TopicBean.java
+++ /dev/null
@@ -1,155 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-/**
- * 
- */
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans;
-
-import java.io.Serializable;
-
-import javax.xml.bind.annotation.XmlRootElement;
-
-/**
- * @author author
- *
- */
-@XmlRootElement
-public class TopicBean implements Serializable {
-
-	private static final long serialVersionUID = -8620390377775457949L;
-	private String topicName;
-	private String topicDescription;
-
-	private int partitionCount = 1; //default values
-	private int replicationCount = 1; //default value
-
-	private boolean transactionEnabled;
-
-	/**
-	 * constructor
-	 */
-	public TopicBean() {
-		super();
-	}
-
-	/**
-	 * constructor initialization with topic details name, description,
-	 * partition, replication, transaction
-	 * 
-	 * @param topicName
-	 * @param description
-	 * @param partitionCount
-	 * @param replicationCount
-	 * @param transactionEnabled
-	 */
-	public TopicBean(String topicName, String topicDescription, int partitionCount, int replicationCount,
-			boolean transactionEnabled) {
-		super();
-		this.topicName = topicName;
-		this.topicDescription = topicDescription;
-		this.partitionCount = partitionCount;
-		this.replicationCount = replicationCount;
-		this.transactionEnabled = transactionEnabled;
-	}
-
-	/**
-	 * @return
-	 * returns topic name which is of String type
-	 */
-	public String getTopicName() {
-		return topicName;
-	}
-
-	/**
-	 * @param topicName
-	 * set topic name  
-	 */
-	public void setTopicName(String topicName) {
-		this.topicName = topicName;
-	}
-
-
-	/**
-	 * @return
-	 * returns partition count which is of int type
-	 */
-	public int getPartitionCount() {
-		return partitionCount;
-	}
-
-	/**
-	 * @param partitionCount
-	 * set partition Count 
-	 */
-	public void setPartitionCount(int partitionCount) {
-		this.partitionCount = partitionCount;
-	}
-	
-	/**
-	 * @return
-	 * returns replication count which is of int type
-	 */
-	public int getReplicationCount() {
-		return replicationCount;
-	}
-	
-	/**
-	 * @param
-	 * set replication count which is of int type
-	 */
-	public void setReplicationCount(int replicationCount) {
-		this.replicationCount = replicationCount;
-	}
-	
-	/**
-	 * @return
-	 * returns boolean value which indicates whether transaction is Enabled 
-	 */
-	public boolean isTransactionEnabled() {
-		return transactionEnabled;
-	}
-	
-	/**
-	 * @param
-	 * sets boolean value which indicates whether transaction is Enabled 
-	 */
-	public void setTransactionEnabled(boolean transactionEnabled) {
-		this.transactionEnabled = transactionEnabled;
-	}
-
-	/**
-	 * 
-	 * @return returns description which is of String type
-	 */
-	public String getTopicDescription() {
-		return topicDescription;
-	}
-	/**
-	 * 
-	 * @param topicDescription
-	 * set description which is of String type
-	 */
-	public void setTopicDescription(String topicDescription) {
-		this.topicDescription = topicDescription;
-	}
-
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/constants/CambriaConstants.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/constants/CambriaConstants.java
deleted file mode 100644
index 98d0766..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/constants/CambriaConstants.java
+++ /dev/null
@@ -1,125 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants;
-
-import org.apache.coyote.http11.Http11NioProtocol;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.Utils;
-
-/**
- * This is the constant files for all the property or parameters.
- * @author author
- *
- */
-public interface CambriaConstants {
-
-	String CAMBRIA = "Cambria";
-	String DMAAP = "DMaaP";
-
-	String kDefault_ZkRoot = "/fe3c/cambria";
-
-	String kSetting_ZkConfigDbRoot = "config.zk.root";
-	String kDefault_ZkConfigDbRoot = kDefault_ZkRoot + "/config";
-String msgRtr_prop="MsgRtrApi.properties";
-	String kBrokerType = "broker.type";
-	
-	/**
-	 * value to use to signal kafka broker type.
-	 */
-	String kBrokerType_Kafka = "kafka";
-	String kBrokerType_Memory = "memory";
-	String kSetting_AdminSecret = "authentication.adminSecret";
-
-	String kSetting_ApiNodeIdentifier = "cambria.api.node.identifier";
-
-	/**
-	 * value to use to signal max empty poll per minute
-	 */
-	String kSetting_MaxEmptyPollsPerMinute = "cambria.rateLimit.maxEmptyPollsPerMinute";
-	double kDefault_MaxEmptyPollsPerMinute = 10.0;
-
-	String kSetting_SleepMsOnRateLimit = "cambria.rateLimit.delay.ms";
-	long kDefault_SleepMsOnRateLimit = Utils.getSleepMsForRate ( kDefault_MaxEmptyPollsPerMinute );
-
-	String kSetting_RateLimitWindowLength = "cambria.rateLimit.window.minutes";
-	int kDefault_RateLimitWindowLength = 5;
-
-	String kConfig = "c";
-
-	String kSetting_Port = "cambria.service.port";
-	/**
-	 * value to use to signal default port
-	 */
-	int kDefault_Port = 3904;
-
-	String kSetting_MaxThreads = "tomcat.maxthreads";
-	int kDefault_MaxThreads = -1;
-	
-	
-//	String kSetting_TomcatProtocolClass = "tomcat.protocolClass";
-	//String kDefault_TomcatProtocolClass = Http11NioProtocol.class.getName ();
-
-	String kSetting_ZkConfigDbServers = "config.zk.servers";
-	
-	/**
-	 * value to indicate localhost port number
-	 */
-	String kDefault_ZkConfigDbServers = "localhost:2181";
-
-	/**
-	 * value to use to signal Session time out
-	 */
-	String kSetting_ZkSessionTimeoutMs = "cambria.consumer.cache.zkSessionTimeout";
-	int kDefault_ZkSessionTimeoutMs = 20 * 1000;
-
-	/**
-	 * value to use to signal connection time out 
-	 */
-	String kSetting_ZkConnectionTimeoutMs = "cambria.consumer.cache.zkConnectionTimeout";
-	int kDefault_ZkConnectionTimeoutMs = 5 * 1000;
-
-	String TRANSACTION_ID_SEPARATOR = "::";
-
-	/**
-	 * value to use to signal there's no timeout on the consumer request.
-	 */
-	public static final int kNoTimeout = 10000;
-
-	/**
-	 * value to use to signal no limit in the number of messages returned.
-	 */
-	public static final int kNoLimit = 0;
-
-	/**
-	 * value to use to signal that the caller wants the next set of events
-	 */
-	public static final int kNextOffset = -1;
-
-	/**
-	 * value to use to signal there's no filter on the response stream.
-	 */
-	public static final String kNoFilter = "";
-
-	//Added for Metric publish
-	public static final int kStdCambriaServicePort = 3904;
-	public static final String kBasePath = "/events/";
-
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/DMaaPAccessDeniedException.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/DMaaPAccessDeniedException.java
deleted file mode 100644
index 190714f..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/DMaaPAccessDeniedException.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception;
-
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
-
-public class DMaaPAccessDeniedException extends CambriaApiException{
-	
-	
-	
-	public DMaaPAccessDeniedException(ErrorResponse errRes) {
-		super(errRes);
-		
-	}
-
-	/**
-	 * 
-	 */
-	private static final long serialVersionUID = 1L;
-
-	
-
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/DMaaPCambriaExceptionMapper.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/DMaaPCambriaExceptionMapper.java
deleted file mode 100644
index 10d127f..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/DMaaPCambriaExceptionMapper.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception;
-
-import javax.inject.Singleton;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.ext.ExceptionMapper;
-import javax.ws.rs.ext.Provider;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import org.apache.http.HttpStatus;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
-import org.springframework.beans.factory.annotation.Autowired;
-
-/**
- * Exception Mapper class to handle
- * CambriaApiException 
- * @author author
- *
- */
-@Provider
-@Singleton
-public class DMaaPCambriaExceptionMapper implements ExceptionMapper<CambriaApiException>{
-
-private ErrorResponse errRes;
-
-//private static final Logger LOGGER = Logger.getLogger(DMaaPCambriaExceptionMapper.class);
-private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(DMaaPCambriaExceptionMapper.class);
-	
-	@Autowired
-	private DMaaPErrorMessages msgs;
-	
-	public DMaaPCambriaExceptionMapper() {
-		super();
-		LOGGER.info("Cambria Exception Mapper Created..");
-	}
-	
-	@Override
-	public Response toResponse(CambriaApiException ex) {
-
-		LOGGER.info("Reached Cambria Exception Mapper..");
-		
-		/**
-		 * Cambria Generic Exception
-		 */
-		if(ex instanceof CambriaApiException)
-		{
-			
-			errRes = ex.getErrRes();
-			if(errRes!=null) {
-				
-				return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON)
-		            .build();
-			}
-			else
-			{
-				return Response.status(ex.getStatus()).entity(ex.getMessage()).type(MediaType.APPLICATION_JSON)
-			            .build();
-			}
-			
-			
-		}
-		else
-		{
-			errRes = new ErrorResponse(HttpStatus.SC_EXPECTATION_FAILED, DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(), msgs.getServerUnav());
-			return Response.status(HttpStatus.SC_EXPECTATION_FAILED).entity(errRes).type(MediaType.APPLICATION_JSON).build();
-		}
-		
-	}
-
-	
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/DMaaPErrorMessages.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/DMaaPErrorMessages.java
deleted file mode 100644
index 5ef2493..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/DMaaPErrorMessages.java
+++ /dev/null
@@ -1,239 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception;
-
-import org.springframework.beans.factory.annotation.Value;
-import org.springframework.stereotype.Component;
-
-/**
- * This Class reads the error message properties
- * from the properties file
- * @author author
- *
- */
-@Component
-public class DMaaPErrorMessages {
-
-	@Value("${resource.not.found}")
-	private String notFound;
-	
-	@Value("${server.unavailable}")
-	private String serverUnav;
-	
-	@Value("${http.method.not.allowed}")
-	private String methodNotAllowed;
-	
-	@Value("${incorrect.request.json}")
-	private String badRequest;
-	
-	@Value("${network.time.out}")
-	private String nwTimeout;
-	
-	@Value("${get.topic.failure}")
-	private String topicsfailure;
-	
-	@Value("${not.permitted.access.1}")
-	private String notPermitted1;
-	
-	@Value("${not.permitted.access.2}")
-	private String notPermitted2;
-	
-	@Value("${get.topic.details.failure}")
-	private String topicDetailsFail;
-	
-	@Value("${create.topic.failure}")
-	private String createTopicFail;
-	
-	@Value("${delete.topic.failure}")
-	private String deleteTopicFail;
-	
-	@Value("${incorrect.json}")
-	private String incorrectJson;
-	
-	@Value("${consume.msg.error}")
-	private String consumeMsgError;
-	
-	@Value("${publish.msg.error}")
-	private String publishMsgError;
-	
-	
-	@Value("${publish.msg.count}")
-	private String publishMsgCount;
-	
-	
-	@Value("${authentication.failure}")
-	private String authFailure;
-	@Value("${msg_size_exceeds}")
-	private String msgSizeExceeds;
-	
-	
-	@Value("${topic.not.exist}")
-	private String topicNotExist;
-	
-	public String getMsgSizeExceeds() {
-		return msgSizeExceeds;
-	}
-
-	public void setMsgSizeExceeds(String msgSizeExceeds) {
-		this.msgSizeExceeds = msgSizeExceeds;
-	}
-
-	public String getNotFound() {
-		return notFound;
-	}
-
-	public void setNotFound(String notFound) {
-		this.notFound = notFound;
-	}
-
-	public String getServerUnav() {
-		return serverUnav;
-	}
-
-	public void setServerUnav(String serverUnav) {
-		this.serverUnav = serverUnav;
-	}
-
-	public String getMethodNotAllowed() {
-		return methodNotAllowed;
-	}
-
-	public void setMethodNotAllowed(String methodNotAllowed) {
-		this.methodNotAllowed = methodNotAllowed;
-	}
-
-	public String getBadRequest() {
-		return badRequest;
-	}
-
-	public void setBadRequest(String badRequest) {
-		this.badRequest = badRequest;
-	}
-
-	public String getNwTimeout() {
-		return nwTimeout;
-	}
-
-	public void setNwTimeout(String nwTimeout) {
-		this.nwTimeout = nwTimeout;
-	}
-
-	public String getNotPermitted1() {
-		return notPermitted1;
-	}
-
-	public void setNotPermitted1(String notPermitted1) {
-		this.notPermitted1 = notPermitted1;
-	}
-
-	public String getNotPermitted2() {
-		return notPermitted2;
-	}
-
-	public void setNotPermitted2(String notPermitted2) {
-		this.notPermitted2 = notPermitted2;
-	}
-
-	public String getTopicsfailure() {
-		return topicsfailure;
-	}
-
-	public void setTopicsfailure(String topicsfailure) {
-		this.topicsfailure = topicsfailure;
-	}
-
-	public String getTopicDetailsFail() {
-		return topicDetailsFail;
-	}
-
-	public void setTopicDetailsFail(String topicDetailsFail) {
-		this.topicDetailsFail = topicDetailsFail;
-	}
-
-	public String getCreateTopicFail() {
-		return createTopicFail;
-	}
-
-	public void setCreateTopicFail(String createTopicFail) {
-		this.createTopicFail = createTopicFail;
-	}
-
-	public String getIncorrectJson() {
-		return incorrectJson;
-	}
-
-	public void setIncorrectJson(String incorrectJson) {
-		this.incorrectJson = incorrectJson;
-	}
-
-	public String getDeleteTopicFail() {
-		return deleteTopicFail;
-	}
-
-	public void setDeleteTopicFail(String deleteTopicFail) {
-		this.deleteTopicFail = deleteTopicFail;
-	}
-
-	public String getConsumeMsgError() {
-		return consumeMsgError;
-	}
-
-	public void setConsumeMsgError(String consumeMsgError) {
-		this.consumeMsgError = consumeMsgError;
-	}
-
-	public String getPublishMsgError() {
-		return publishMsgError;
-	}
-
-	public void setPublishMsgError(String publishMsgError) {
-		this.publishMsgError = publishMsgError;
-	}
-
-	public String getPublishMsgCount() {
-		return publishMsgCount;
-	}
-
-	public String getAuthFailure() {
-		return authFailure;
-	}
-
-	public void setAuthFailure(String authFailure) {
-		this.authFailure = authFailure;
-	}
-
-	public void setPublishMsgCount(String publishMsgCount) {
-		this.publishMsgCount = publishMsgCount;
-	}
-
-	public String getTopicNotExist() {
-		return topicNotExist;
-	}
-
-	public void setTopicNotExist(String topicNotExist) {
-		this.topicNotExist = topicNotExist;
-	}
-	
-	
-	
-	
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/DMaaPResponseCode.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/DMaaPResponseCode.java
deleted file mode 100644
index 1302686..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/DMaaPResponseCode.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception;
-
-/**
- * Define the Error Response Codes for MR
- * using this enumeration
- * @author author
- *
- */
-public enum DMaaPResponseCode {
-	
-	  
-	  /**
-	   * GENERIC
-	   */
-	  RESOURCE_NOT_FOUND(3001),
-	  SERVER_UNAVAILABLE(3002),
-	  METHOD_NOT_ALLOWED(3003),
-	  GENERIC_INTERNAL_ERROR(1004),
-	  /**
-	   * AAF
-	   */
-	  INVALID_CREDENTIALS(4001),
-	  ACCESS_NOT_PERMITTED(4002),
-	  UNABLE_TO_AUTHORIZE(4003),
-	  /**
-	   * PUBLISH AND SUBSCRIBE
-	   */
-	  MSG_SIZE_EXCEEDS_BATCH_LIMIT(5001),
-	  UNABLE_TO_PUBLISH(5002),
-	  INCORRECT_BATCHING_FORMAT(5003),
-	  MSG_SIZE_EXCEEDS_MSG_LIMIT(5004),
-	  INCORRECT_JSON(5005),
-	  CONN_TIMEOUT(5006),
-	  PARTIAL_PUBLISH_MSGS(5007),
-	  CONSUME_MSG_ERROR(5008),
-	  PUBLISH_MSG_ERROR(5009), 
-	  RETRIEVE_TRANSACTIONS(5010),
-	  RETRIEVE_TRANSACTIONS_DETAILS(5011),
-	  TOO_MANY_REQUESTS(5012),
-	  
-	  RATE_LIMIT_EXCEED(301),
-	 
-	  /**
-	   * TOPICS
-	   */
-	GET_TOPICS_FAIL(6001),
-	GET_TOPICS_DETAILS_FAIL(6002),
-	CREATE_TOPIC_FAIL(6003),
-	DELETE_TOPIC_FAIL(6004),
-	GET_PUBLISHERS_BY_TOPIC(6005),
-	GET_CONSUMERS_BY_TOPIC(6006),
-	PERMIT_PUBLISHER_FOR_TOPIC(6007),
-	REVOKE_PUBLISHER_FOR_TOPIC(6008),
-	PERMIT_CONSUMER_FOR_TOPIC(6009),
-	REVOKE_CONSUMER_FOR_TOPIC(6010),
-	GET_CONSUMER_CACHE(6011),
-	DROP_CONSUMER_CACHE(6012),
-	GET_METRICS_ERROR(6013),
-	GET_BLACKLIST(6014),
-	ADD_BLACKLIST(6015),
-	REMOVE_BLACKLIST(6016),
-	TOPIC_NOT_IN_AAF(6017);
-	private int responseCode;
-	
-	public int getResponseCode() {
-		return responseCode;
-	}
-	private DMaaPResponseCode (final int code) {
-		responseCode = code;
-	}
-
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/DMaaPWebExceptionMapper.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/DMaaPWebExceptionMapper.java
deleted file mode 100644
index f526eb5..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/DMaaPWebExceptionMapper.java
+++ /dev/null
@@ -1,137 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception;
-
-import javax.inject.Singleton;
-import javax.ws.rs.BadRequestException;
-import javax.ws.rs.InternalServerErrorException;
-import javax.ws.rs.NotAllowedException;
-import javax.ws.rs.NotAuthorizedException;
-import javax.ws.rs.NotFoundException;
-import javax.ws.rs.ServiceUnavailableException;
-import javax.ws.rs.WebApplicationException;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.ext.ExceptionMapper;
-import javax.ws.rs.ext.Provider;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import org.apache.http.HttpStatus;
-//import org.apache.log-4j.Logger;
-import org.springframework.beans.factory.annotation.Autowired;
-
-/**
- * Exception Mapper class to handle
- * Jersey Exceptions
- * @author author
- *
- */
-@Provider
-@Singleton
-public class DMaaPWebExceptionMapper implements ExceptionMapper<WebApplicationException>{
-	
-	//private static final Logger LOGGER = Logger
-		//	.getLogger(DMaaPWebExceptionMapper.class);
-	private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(DMaaPWebExceptionMapper.class);
-	private ErrorResponse errRes;
-	
-	@Autowired
-	private DMaaPErrorMessages msgs;
-	
-	public DMaaPWebExceptionMapper() {
-		super();
-		LOGGER.info("WebException Mapper Created..");
-	}
-
-	@Override
-	public Response toResponse(WebApplicationException ex) {
-		
-		LOGGER.info("Reached WebException Mapper");
-		
-		/**
-		 * Resource Not Found
-		 */
-		if(ex instanceof NotFoundException)
-		{
-			errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND,DMaaPResponseCode.RESOURCE_NOT_FOUND.getResponseCode(),msgs.getNotFound());
-			
-			LOGGER.info(errRes.toString());
-			
-			return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON)
-		            .build();
-			
-		}
-		
-		if(ex instanceof InternalServerErrorException)
-		{
-			errRes = new ErrorResponse(HttpStatus.SC_INTERNAL_SERVER_ERROR,DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(),msgs.getServerUnav());
-			
-			LOGGER.info(errRes.toString());
-			return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON)
-		            .build();
-			
-		}
-		
-		if(ex instanceof NotAuthorizedException)
-		{
-			errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),msgs.getAuthFailure());
-			
-			LOGGER.info(errRes.toString());
-			return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON)
-		            .build();
-		}
-		
-		if(ex instanceof BadRequestException)
-		{
-			errRes = new ErrorResponse(HttpStatus.SC_BAD_REQUEST,DMaaPResponseCode.INCORRECT_JSON.getResponseCode(),msgs.getBadRequest());
-			
-			LOGGER.info(errRes.toString());
-			return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON)
-		            .build();
-		}
-		if(ex instanceof NotAllowedException)
-		{
-			errRes = new ErrorResponse(HttpStatus.SC_METHOD_NOT_ALLOWED,DMaaPResponseCode.METHOD_NOT_ALLOWED.getResponseCode(),msgs.getMethodNotAllowed());
-			
-			LOGGER.info(errRes.toString());
-			return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON)
-		            .build();
-		}
-		
-		if(ex instanceof ServiceUnavailableException)
-		{
-			errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE,DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(),msgs.getServerUnav());
-			
-			LOGGER.info(errRes.toString());
-			return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON)
-		            .build();
-		}
-		
-		
-		return Response.serverError().build();
-	}
-
-	
-
-	
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/ErrorResponse.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/ErrorResponse.java
deleted file mode 100644
index 3bc5364..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/exception/ErrorResponse.java
+++ /dev/null
@@ -1,135 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception;
-import org.json.JSONObject;
-/**
- * Represents the Error Response Object 
- * that is rendered as a JSON object when
- * an exception or error occurs on MR Rest Service.
- * @author author
- *
- */
-//@XmlRootElement
-public class ErrorResponse {
-	
-	private int httpStatusCode;
-	private int mrErrorCode;
-    private String errorMessage;
-    private String helpURL;
-    private String statusTs;
-    private String topic;
-    private String publisherId;
-    private String publisherIp;
-    private String subscriberId;
-    private String subscriberIp;
-	
-
-	public ErrorResponse(int httpStatusCode, int mrErrorCode,
-			String errorMessage, String helpURL, String statusTs, String topic,
-			String publisherId, String publisherIp, String subscriberId,
-			String subscriberIp) {
-		super();
-		this.httpStatusCode = httpStatusCode;
-		this.mrErrorCode = mrErrorCode;
-		this.errorMessage = errorMessage;
-		this.helpURL = "https://wiki.web.att.com/display/DMAAP/DMaaP+Home";
-		this.statusTs = statusTs;
-		this.topic = topic;
-		this.publisherId = publisherId;
-		this.publisherIp = publisherIp;
-		this.subscriberId = subscriberId;
-		this.subscriberIp = subscriberIp;
-	}
-
-	public ErrorResponse(int httpStatusCode, int mrErrorCode,
-			String errorMessage) {
-		super();
-		this.httpStatusCode = httpStatusCode;
-		this.mrErrorCode = mrErrorCode;
-		this.errorMessage = errorMessage;
-		this.helpURL = "https://wiki.web.att.com/display/DMAAP/DMaaP+Home";
-		
-	}
-	
-	public int getHttpStatusCode() {
-		return httpStatusCode;
-	}
-
-	public void setHttpStatusCode(int httpStatusCode) {
-		this.httpStatusCode = httpStatusCode;
-	}
-	
-	public int getMrErrorCode() {
-		return mrErrorCode;
-	}
-
-
-	public void setMrErrorCode(int mrErrorCode) {
-		this.mrErrorCode = mrErrorCode;
-	}
-
-	
-	public String getErrorMessage() {
-		return errorMessage;
-	}
-
-	public void setErrorMessage(String errorMessage) {
-		this.errorMessage = errorMessage;
-	}
-
-	public String getHelpURL() {
-		return helpURL;
-	}
-
-	public void setHelpURL(String helpURL) {
-		this.helpURL = helpURL;
-	}
-
-	@Override
-	public String toString() {
-		return "ErrorResponse {\"httpStatusCode\":\"" + httpStatusCode
-				+ "\", \"mrErrorCode\":\"" + mrErrorCode + "\", \"errorMessage\":\""
-				+ errorMessage + "\", \"helpURL\":\"" + helpURL + "\", \"statusTs\":\""+statusTs+"\""
-				+ ", \"topicId\":\""+topic+"\", \"publisherId\":\""+publisherId+"\""
-				+ ", \"publisherIp\":\""+publisherIp+"\", \"subscriberId\":\""+subscriberId+"\""
-				+ ", \"subscriberIp\":\""+subscriberIp+"\"}";
-	}
-	
-	public String getErrMapperStr1() {
-		return "ErrorResponse [httpStatusCode=" + httpStatusCode + ", mrErrorCode=" + mrErrorCode + ", errorMessage="
-				+ errorMessage + ", helpURL=" + helpURL + "]";
-	}
-
-	
-	
-	public JSONObject getErrMapperStr() {
-		JSONObject o = new JSONObject();
-		o.put("status", getHttpStatusCode());
-		o.put("mrstatus", getMrErrorCode());
-		o.put("message", getErrorMessage());
-		o.put("helpURL", getHelpURL());
-		return o;
-	}
-	
-    
-	
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/listener/CambriaServletContextListener.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/listener/CambriaServletContextListener.java
deleted file mode 100644
index 35c0b27..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/listener/CambriaServletContextListener.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.listener;
-
-import javax.servlet.ServletContextEvent;
-import javax.servlet.ServletContextListener;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-
-/**
- * This is the Cambria Servlet Context Listner which helpes while loading the app which provide the endpoints 
- * @author author
- *
- */
-public class CambriaServletContextListener implements ServletContextListener {
-	
-	DME2EndPointLoader loader = DME2EndPointLoader.getInstance();
-//	private static Logger log = Logger.getLogger(CambriaServletContextListener.class);
-	private static final EELFLogger log = EELFManager.getInstance().getLogger(CambriaServletContextListener.class);
-	
-
-	@Override
-	
-	/**
-	 * contextDestroyed() loads unpublished end points
-	 * @param arg0
-	 */
-	public void contextDestroyed(ServletContextEvent arg0) {
-		log.info("CambriaServletContextListener contextDestroyed");
-		
-		loader.unPublishEndPoints();
-	}
-
-	@Override
-	/**
-	 * contextInitialized() loads published end points
-	 * @param arg0
-	 */
-	public void contextInitialized(ServletContextEvent arg0) {
-		log.info("CambriaServletContextListener contextInitialized");
-		loader.publishEndPoints();
-	}
-
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/listener/DME2EndPointLoader.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/listener/DME2EndPointLoader.java
deleted file mode 100644
index 9332aeb..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/listener/DME2EndPointLoader.java
+++ /dev/null
@@ -1,124 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.listener;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.Properties;
-
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service.impl.EventsServiceImpl;
-
-import com.att.aft.dme2.api.DME2Exception;
-import com.att.aft.dme2.api.DME2Manager;
-import com.att.aft.dme2.manager.registry.DME2EndpointRegistry;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-
-/**
- * 
- * @author author
- *
- */
-public class DME2EndPointLoader {
-
-	private String latitude;
-	private String longitude;
-	private String version;
-	private String serviceName;
-	private String env;
-	private String routeOffer;
-	private String hostName;
-	private String port;
-	private String contextPath;
-	private String protocol;
-	private String serviceURL;
-	private static DME2EndPointLoader loader = new DME2EndPointLoader();
-//	private static final Logger LOG = LoggerFactory.getLogger(EventsServiceImpl.class);
-	private static final EELFLogger LOG = EELFManager.getInstance().getLogger(EventsServiceImpl.class);
-	private DME2EndPointLoader() {
-	}
-
-	public static DME2EndPointLoader getInstance() {
-		return loader;
-	}
-
-	/**
-	 * publishing endpoints
-	 */
-	public void publishEndPoints() {
-
-		try {
-			InputStream input = this.getClass().getResourceAsStream("/endpoint.properties");
-			Properties props = new Properties();
-			props.load(input);
-
-			latitude = props.getProperty("Latitude");
-			longitude = props.getProperty("Longitude");
-			version = props.getProperty("Version");
-			serviceName = props.getProperty("ServiceName");
-			env = props.getProperty("Environment");
-			routeOffer = props.getProperty("RouteOffer");
-			hostName = props.getProperty("HostName");
-			port = props.getProperty("Port");
-			contextPath = props.getProperty("ContextPath");
-			protocol = props.getProperty("Protocol");
-
-			System.setProperty("AFT_LATITUDE", latitude);
-			System.setProperty("AFT_LONGITUDE", longitude);
-			System.setProperty("AFT_ENVIRONMENT", "AFTUAT");
-
-			serviceURL = "service=" + serviceName + "/" + "version=" + version + "/" + "envContext=" + env + "/"
-					+ "routeOffer=" + routeOffer;
-
-			DME2Manager manager = new DME2Manager("testEndpointPublish", props);
-			manager.setClientCredentials("sh301n", "");
-			DME2EndpointRegistry svcRegistry = manager.getEndpointRegistry();
-			// Publish API takes service name, context path, hostname, port and
-			// protocol as args
-			svcRegistry.publish(serviceURL, contextPath, hostName, Integer.parseInt(port), protocol);
-
-		} catch (IOException | DME2Exception e) {
-			LOG.error("Failed due to :" + e);
-		}
-
-	}
-/**
- * unpublishing endpoints
- */
-	public void unPublishEndPoints() {
-
-		DME2Manager manager;
-		try {
-			System.setProperty("AFT_LATITUDE", latitude);
-			System.setProperty("AFT_LONGITUDE", longitude);
-			System.setProperty("AFT_ENVIRONMENT", "AFTUAT");
-
-			manager = DME2Manager.getDefaultInstance();
-			DME2EndpointRegistry svcRegistry = manager.getEndpointRegistry();
-			svcRegistry.unpublish(serviceURL, hostName, Integer.parseInt(port));
-		} catch (DME2Exception e) {
-			LOG.error("Failed due to DME2Exception" + e);
-		}
-
-	}
-
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metabroker/Broker.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metabroker/Broker.java
deleted file mode 100644
index 9634cc2..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metabroker/Broker.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metabroker;
-
-import java.util.List;
-
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
-
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
-
-/**
- * A broker interface to manage metadata around topics, etc.
- * 
- * @author author
- *
- */
-public interface Broker {
-	/**
-	 * 
-	 * @author author
-	 *
-	 */
-	public class TopicExistsException extends Exception {
-		/**
-		 * 
-		 * @param topicName
-		 */
-		public TopicExistsException(String topicName) {
-			super("Topic " + topicName + " exists.");
-		}
-
-		private static final long serialVersionUID = 1L;
-	}
-
-	/**
-	 * Get all topics in the underlying broker.
-	 * 
-	 * @return
-	 * @throws ConfigDbException
-	 */
-	List<Topic> getAllTopics() throws ConfigDbException;
-
-	/**
-	 * Get a specific topic from the underlying broker.
-	 * 
-	 * @param topic
-	 * @return a topic, or null
-	 */
-	Topic getTopic(String topic) throws ConfigDbException;
-
-	/**
-	 * create a  topic
-	 * 
-	 * @param topic
-	 * @param description
-	 * @param ownerApiKey
-	 * @param partitions
-	 * @param replicas
-	 * @param transactionEnabled
-	 * @return
-	 * @throws TopicExistsException
-	 * @throws CambriaApiException
-	 */
-	Topic createTopic(String topic, String description, String ownerApiKey, int partitions, int replicas,
-			boolean transactionEnabled) throws TopicExistsException, CambriaApiException;
-
-	/**
-	 * Delete a topic by name
-	 * 
-	 * @param topic
-	 */
-	void deleteTopic(String topic) throws AccessDeniedException, CambriaApiException, TopicExistsException;
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metabroker/Topic.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metabroker/Topic.java
deleted file mode 100644
index f38a4a6..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metabroker/Topic.java
+++ /dev/null
@@ -1,133 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metabroker;
-
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.security.NsaAcl;
-import com.att.nsa.security.NsaApiKey;
-import com.att.nsa.security.ReadWriteSecuredResource;
-/**
- * This is the interface for topic and all the topic related operations
- * get topic name, owner, description, transactionEnabled etc.
- * @author author
- *
- */
-public interface Topic extends ReadWriteSecuredResource
-{	
-	/**
-	 * User defined exception for access denied while access the topic for Publisher and consumer
-	 * @author author
-	 *
-	 *//*
-	public class AccessDeniedException extends Exception
-	{	
-		*//**
-		 * AccessDenied Description
-		 *//*
-		public AccessDeniedException () { super ( "Access denied." ); } 
-		*//**
-		 * AccessDenied Exception for the user while authenticating the user request
-		 * @param user
-		 *//*
-		public AccessDeniedException ( String user ) { super ( "Access denied for " + user ); } 
-		private static final long serialVersionUID = 1L;
-	}*/
-
-	/**
-	 * Get this topic's name
-	 * @return
-	 */
-	String getName ();
-
-	/**
-	 * Get the API key of the owner of this topic.
-	 * @return
-	 */
-	String getOwner ();
-
-	/**
-	 * Get a description of the topic, as set by the owner at creation time.
-	 * @return
-	 */
-	String getDescription ();
-	
-	/**
-	 * If the topic is transaction enabled
-	 * @return boolean true/false
-	 */
-	boolean isTransactionEnabled();
-	
-	/**
-	 * Get the ACL for reading on this topic. Can be null.
-	 * @return
-	 */
-	NsaAcl getReaderAcl ();
-
-	/**
-	 * Get the ACL for writing on this topic.  Can be null.
-	 * @return
-	 */
-	NsaAcl getWriterAcl ();
-
-	/**
-	 * Check if this user can read the topic. Throw otherwise. Note that
-	 * user may be null.
-	 * @param user
-	 */
-	void checkUserRead ( NsaApiKey user ) throws AccessDeniedException;
-
-	/**
-	 * Check if this user can write to the topic. Throw otherwise. Note
-	 * that user may be null.
-	 * @param user
-	 */
-	void checkUserWrite ( NsaApiKey user ) throws AccessDeniedException;
-
-	/**
-	 * allow the given user to publish
-	 * @param publisherId
-	 * @param asUser
-	 */
-	void permitWritesFromUser ( String publisherId, NsaApiKey asUser ) throws AccessDeniedException, ConfigDbException;
-
-	/**
-	 * deny the given user from publishing
-	 * @param publisherId
-	 * @param asUser
-	 */
-	void denyWritesFromUser ( String publisherId, NsaApiKey asUser ) throws AccessDeniedException, ConfigDbException;
-
-	/**
-	 * allow the given user to read the topic
-	 * @param consumerId
-	 * @param asUser
-	 */
-	void permitReadsByUser ( String consumerId, NsaApiKey asUser ) throws AccessDeniedException, ConfigDbException;
-
-	/**
-	 * deny the given user from reading the topic
-	 * @param consumerId
-	 * @param asUser
-	 * @throws ConfigDbException 
-	 */
-	void denyReadsByUser ( String consumerId, NsaApiKey asUser ) throws AccessDeniedException, ConfigDbException;
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/CambriaBatchingPublisher.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/CambriaBatchingPublisher.java
deleted file mode 100644
index de6044a..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/CambriaBatchingPublisher.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metrics.publisher;
-
-import java.io.IOException;
-import java.util.List;
-import java.util.concurrent.TimeUnit;
-
-/**
- * A Cambria batching publisher is a publisher with additional functionality
- * for managing delayed sends.
- * 
- * @author author
- *
- */
-public interface CambriaBatchingPublisher extends CambriaPublisher
-{
-	/**
-	 * Get the number of messages that have not yet been sent.
-	 * @return the number of pending messages
-	 */
-	int getPendingMessageCount ();
-
-	/**
-	 * Close this publisher, sending any remaining messages.
-	 * @param timeout an amount of time to wait for unsent messages to be sent
-	 * @param timeoutUnits the time unit for the timeout arg
-	 * @return a list of any unsent messages after the timeout
-	 * @throws IOException
-	 * @throws InterruptedException 
-	 */
-	List<message> close ( long timeout, TimeUnit timeoutUnits ) throws IOException, InterruptedException;
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/CambriaClient.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/CambriaClient.java
deleted file mode 100644
index f5ac924..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/CambriaClient.java
+++ /dev/null
@@ -1,89 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metrics.publisher;
-
-//import org.slf4j.Logger;
-
-//
-import com.att.eelf.configuration.EELFLogger;
-//import com.att.eelf.configuration.EELFManager;
-
-/**
- * 
- * @author author
- *
- */
-public interface CambriaClient {
-	/**
-	 * An exception at the Cambria layer. This is used when the HTTP transport
-	 * layer returns a success code but the transaction is not completed as
-	 * expected.
-	 */
-	public class CambriaApiException extends Exception {
-		/**
-		 * 
-		 * @param msg
-		 */
-		public CambriaApiException(String msg) {
-			super(msg);
-		}
-
-		/**
-		 * 
-		 * @param msg
-		 * @param t
-		 */
-		public CambriaApiException(String msg, Throwable t) {
-			super(msg, t);
-		}
-
-		private static final long serialVersionUID = 1L;
-	}
-
-	/**
-	 * Optionally set the Logger to use
-	 * 
-	 * @param log
-	 */
-	void logTo(EELFLogger  log);
-
-	/**
-	 * Set the API credentials for this client connection. Subsequent calls will
-	 *  include authentication headers.who i
-	 * 
-	 * @param apiKey
-	 * @param apiSecret
-	 */
-	void setApiCredentials(String apiKey, String apiSecret);
-
-	/**
-	 * Remove API credentials, if any, on this connection. Subsequent calls will
-	 * not include authentication headers.
-	 */
-	void clearApiCredentials();
-
-	/**
-	 * Close this connection. Some client interfaces have additional close
-	 * capability.
-	 */
-	void close();
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/CambriaConsumer.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/CambriaConsumer.java
deleted file mode 100644
index ad2613f..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/CambriaConsumer.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metrics.publisher;
-
-import java.io.IOException;
-
-/**
- * This interface will provide fetch mechanism for consumer
- * @author author
- *
- */
-public interface CambriaConsumer extends CambriaClient
-{
-	/**
-	 * Fetch a set of messages. The consumer's timeout and message limit are used if set in the constructor call. 
-
-	 * @return a set of messages
-	 * @throws IOException
-	 */
-	Iterable<String> fetch () throws IOException;
-
-	/**
-	 * Fetch a set of messages with an explicit timeout and limit for this call. These values
-	 * override any set in the constructor call.
-	 * 
-	 * @param timeoutMs	The amount of time in milliseconds that the server should keep the connection
-	 * open while waiting for message traffic. Use -1 for default timeout (controlled on the server-side).
-	 * @param limit A limit on the number of messages returned in a single call. Use -1 for no limit.
-	 * @return a set messages
-	 * @throws IOException if there's a problem connecting to the server
-	 */
-	Iterable<String> fetch ( int timeoutMs, int limit ) throws IOException;
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/CambriaPublisher.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/CambriaPublisher.java
deleted file mode 100644
index 9b3ac12..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/CambriaPublisher.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metrics.publisher;
-
-import java.io.IOException;
-import java.util.Collection;
-
-/**
- * A Cambria publishing interface.
- * 
- * @author author
- *
- */
-public interface CambriaPublisher extends CambriaClient {
-	/**
-	 * A simple message container
-	 */
-	public static class message {
-		/**
-		 * 
-		 * @param partition
-		 * @param msg
-		 */
-		public message(String partition, String msg) {
-			fPartition = partition == null ? "" : partition;
-			fMsg = msg;
-			if (fMsg == null) {
-				throw new IllegalArgumentException("Can't send a null message.");
-			}
-		}
-
-		/**
-		 * 
-		 * @param msg
-		 */
-		public message(message msg) {
-			this(msg.fPartition, msg.fMsg);
-		}
-
-		/**
-		 *  declaring partition string
-		 */
-		public final String fPartition;
-		/**
-		 * declaring fMsg String
-		 */
-		public final String fMsg;
-	}
-
-	/**
-	 * Send the given message using the given partition.
-	 * 
-	 * @param partition
-	 * @param msg
-	 * @return the number of pending messages
-	 * @throws IOException
-	 */
-	int send(String partition, String msg) throws IOException;
-
-	/**
-	 * Send the given message using its partition.
-	 * 
-	 * @param msg
-	 * @return the number of pending messages
-	 * @throws IOException
-	 */
-	int send(message msg) throws IOException;
-
-	/**
-	 * Send the given messages using their partitions.
-	 * 
-	 * @param msgs
-	 * @return the number of pending messages
-	 * @throws IOException
-	 */
-	int send(Collection<message> msgs) throws IOException;
-
-	/**
-	 * Close this publisher. It's an error to call send() after close()
-	 */
-	void close();
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/CambriaPublisherUtility.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/CambriaPublisherUtility.java
deleted file mode 100644
index 066a2f3..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/CambriaPublisherUtility.java
+++ /dev/null
@@ -1,146 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metrics.publisher;
-
-import java.io.UnsupportedEncodingException;
-import java.net.URLEncoder;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.List;
-
-import org.apache.http.HttpHost;
-/**
- * 
- * @author author
- *
- */
-public class CambriaPublisherUtility
-{
-	public static final String kBasePath = "/events/";
-	public static final int kStdCambriaServicePort = 3904;
-/**
- * 
- * Translates a string into <code>application/x-www-form-urlencoded</code>
- * format using a specific encoding scheme.
- * @param s
- * @return
- * 
- */
-	public static String escape ( String s )
-	{
-		try
-		{
-			return URLEncoder.encode ( s, "UTF-8");
-		}
-		catch ( UnsupportedEncodingException e )
-		{
-			throw new RuntimeException ( e );
-		}
-	}
-/**
- * 
- * building url
- * @param rawTopic
- * @return
- */
-	public static String makeUrl ( String rawTopic )
-	{
-		final String cleanTopic = escape ( rawTopic );
-		
-		final StringBuffer url = new StringBuffer().
-			append ( CambriaPublisherUtility.kBasePath ).
-			append ( cleanTopic );
-		return url.toString ();
-	}
-/**
- * 
- * building consumerUrl
- * @param topic
- * @param rawConsumerGroup
- * @param rawConsumerId
- * @return
- */
-	public static String makeConsumerUrl ( String topic, String rawConsumerGroup, String rawConsumerId )
-	{
-		final String cleanConsumerGroup = escape ( rawConsumerGroup );
-		final String cleanConsumerId = escape ( rawConsumerId );
-		return CambriaPublisherUtility.kBasePath + topic + "/" + cleanConsumerGroup + "/" + cleanConsumerId;
-	}
-
-	/**
-	 * Create a list of HttpHosts from an input list of strings. Input strings have
-	 * host[:port] as format. If the port section is not provided, the default port is used.
-	 * 
-	 * @param hosts
-	 * @return a list of hosts
-	 */
-	public static List<HttpHost> createHostsList(Collection<String> hosts)
-	{
-		final ArrayList<HttpHost> convertedHosts = new ArrayList<HttpHost> ();
-		for ( String host : hosts )
-		{
-			if ( host.length () == 0 ) continue;
-			convertedHosts.add ( hostForString ( host ) );
-		}
-		return convertedHosts;
-	}
-
-	/**
-	 * Return an HttpHost from an input string. Input string has
-	 * host[:port] as format. If the port section is not provided, the default port is used.
-	 * 
-	 * @param hosts
-	 * @return a list of hosts
-	 * if host.length<1 throws IllegalArgumentException
-	 * 
-	 */
-	public static HttpHost hostForString ( String host )
-	{
-		if ( host.length() < 1 ) throw new IllegalArgumentException ( "An empty host entry is invalid." );
-		
-		String hostPart = host;
-		int port = kStdCambriaServicePort;
-
-		final int colon = host.indexOf ( ':' );
-		if ( colon == 0 ) throw new IllegalArgumentException ( "Host entry '" + host + "' is invalid." );
-		if ( colon > 0 )
-		{
-			hostPart = host.substring ( 0, colon ).trim();
-
-			final String portPart = host.substring ( colon + 1 ).trim();
-			if ( portPart.length () > 0 )
-			{
-				try
-				{
-					port = Integer.parseInt ( portPart );
-				}
-				catch ( NumberFormatException x )
-				{
-					throw new IllegalArgumentException ( "Host entry '" + host + "' is invalid.", x );
-				}
-			}
-			// else: use default port on "foo:"
-		}
-
-		return new HttpHost ( hostPart, port );
-	}
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/DMaaPCambriaClientFactory.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/DMaaPCambriaClientFactory.java
deleted file mode 100644
index 1f32511..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/DMaaPCambriaClientFactory.java
+++ /dev/null
@@ -1,423 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metrics.publisher;
-
-import java.net.MalformedURLException;
-import java.util.Collection;
-import java.util.TreeSet;
-import java.util.UUID;
-
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metrics.publisher.impl.DMaaPCambriaConsumerImpl;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metrics.publisher.impl.DMaaPCambriaSimplerBatchPublisher;
-
-/**
- * A factory for Cambria clients.<br/>
- * <br/>
- * Use caution selecting a consumer creator factory. If the call doesn't accept
- * a consumer group name, then it creates a consumer that is not restartable.
- * That is, if you stop your process and start it again, your client will NOT
- * receive any missed messages on the topic. If you need to ensure receipt of
- * missed messages, then you must use a consumer that's created with a group
- * name and ID. (If you create multiple consumer processes using the same group,
- * load is split across them. Be sure to use a different ID for each instance.)<br/>
- * <br/>
- * Publishers
- * 
- * @author author
- */
-public class DMaaPCambriaClientFactory {
-	/**
-	 * Create a consumer instance with the default timeout and no limit on
-	 * messages returned. This consumer operates as an independent consumer
-	 * (i.e., not in a group) and is NOT re-startable across sessions.
-	 * 
-	 * @param hostList
-	 *            A comma separated list of hosts to use to connect to Cambria.
-	 *            You can include port numbers (3904 is the default). 
-	 * 
-	 * @param topic
-	 *            The topic to consume
-	 * 
-	 * @return a consumer
-	 */
-	public static CambriaConsumer createConsumer(String hostList, String topic) {
-		return createConsumer(DMaaPCambriaConsumerImpl.stringToList(hostList),
-				topic);
-	}
-
-	/**
-	 * Create a consumer instance with the default timeout and no limit on
-	 * messages returned. This consumer operates as an independent consumer
-	 * (i.e., not in a group) and is NOT re-startable across sessions.
-	 * 
-	 * @param hostSet
-	 *            The host used in the URL to Cambria. Entries can be
-	 *            "host:port".
-	 * @param topic
-	 *            The topic to consume
-	 * 
-	 * @return a consumer
-	 */
-	public static CambriaConsumer createConsumer(Collection<String> hostSet,
-			String topic) {
-		return createConsumer(hostSet, topic, null);
-	}
-
-	/**
-	 * Create a consumer instance with server-side filtering, the default
-	 * timeout, and no limit on messages returned. This consumer operates as an
-	 * independent consumer (i.e., not in a group) and is NOT re-startable
-	 * across sessions.
-	 * 
-	 * @param hostSet
-	 *            The host used in the URL to Cambria. Entries can be
-	 *            "host:port".
-	 * @param topic
-	 *            The topic to consume
-	 * @param filter
-	 *            a filter to use on the server side
-	 * 
-	 * @return a consumer
-	 */
-	public static CambriaConsumer createConsumer(Collection<String> hostSet,
-			String topic, String filter) {
-		return createConsumer(hostSet, topic, UUID.randomUUID().toString(),
-				"0", -1, -1, filter, null, null);
-	}
-
-	/**
-	 * Create a consumer instance with the default timeout, and no limit on
-	 * messages returned. This consumer can operate in a logical group and is
-	 * re-startable across sessions when you use the same group and ID on
-	 * restart.
-	 * 
-	 * @param hostSet
-	 *            The host used in the URL to Cambria. Entries can be
-	 *            "host:port".
-	 * @param topic
-	 *            The topic to consume
-	 * @param consumerGroup
-	 *            The name of the consumer group this consumer is part of
-	 * @param consumerId
-	 *            The unique id of this consume in its group
-	 * 
-	 * @return a consumer
-	 */
-	public static CambriaConsumer createConsumer(Collection<String> hostSet,
-			final String topic, final String consumerGroup,
-			final String consumerId) {
-		return createConsumer(hostSet, topic, consumerGroup, consumerId, -1, -1);
-	}
-
-	/**
-	 * Create a consumer instance with the default timeout, and no limit on
-	 * messages returned. This consumer can operate in a logical group and is
-	 * re-startable across sessions when you use the same group and ID on
-	 * restart.
-	 * 
-	 * @param hostSet
-	 *            The host used in the URL to Cambria. Entries can be
-	 *            "host:port".
-	 * @param topic
-	 *            The topic to consume
-	 * @param consumerGroup
-	 *            The name of the consumer group this consumer is part of
-	 * @param consumerId
-	 *            The unique id of this consume in its group
-	 * @param timeoutMs
-	 *            The amount of time in milliseconds that the server should keep
-	 *            the connection open while waiting for message traffic. Use -1
-	 *            for default timeout.
-	 * @param limit
-	 *            A limit on the number of messages returned in a single call.
-	 *            Use -1 for no limit.
-	 * 
-	 * @return a consumer
-	 */
-	public static CambriaConsumer createConsumer(Collection<String> hostSet,
-			final String topic, final String consumerGroup,
-			final String consumerId, int timeoutMs, int limit) {
-		return createConsumer(hostSet, topic, consumerGroup, consumerId,
-				timeoutMs, limit, null, null, null);
-	}
-
-	/**
-	 * Create a consumer instance with the default timeout, and no limit on
-	 * messages returned. This consumer can operate in a logical group and is
-	 * re-startable across sessions when you use the same group and ID on
-	 * restart. This consumer also uses server-side filtering.
-	 * 
-	 * @param hostList
-	 *            A comma separated list of hosts to use to connect to Cambria.
-	 *            You can include port numbers (3904 is the default). 
-	 * @param topic
-	 *            The topic to consume
-	 * @param consumerGroup
-	 *            The name of the consumer group this consumer is part of
-	 * @param consumerId
-	 *            The unique id of this consume in its group
-	 * @param timeoutMs
-	 *            The amount of time in milliseconds that the server should keep
-	 *            the connection open while waiting for message traffic. Use -1
-	 *            for default timeout.
-	 * @param limit
-	 *            A limit on the number of messages returned in a single call.
-	 *            Use -1 for no limit.
-	 * @param filter
-	 *            A Highland Park filter expression using only built-in filter
-	 *            components. Use null for "no filter".
-	 * @param apiKey
-	 *            key associated with a user
-	 * @param apiSecret
-	 *            of a user
-	 * 
-	 * @return a consumer
-	 */
-	public static CambriaConsumer createConsumer(String hostList,
-			final String topic, final String consumerGroup,
-			final String consumerId, int timeoutMs, int limit, String filter,
-			String apiKey, String apiSecret) {
-		return createConsumer(DMaaPCambriaConsumerImpl.stringToList(hostList),
-				topic, consumerGroup, consumerId, timeoutMs, limit, filter,
-				apiKey, apiSecret);
-	}
-
-	/**
-	 * Create a consumer instance with the default timeout, and no limit on
-	 * messages returned. This consumer can operate in a logical group and is
-	 * re-startable across sessions when you use the same group and ID on
-	 * restart. This consumer also uses server-side filtering.
-	 * 
-	 * @param hostSet
-	 *            The host used in the URL to Cambria. Entries can be
-	 *            "host:port".
-	 * @param topic
-	 *            The topic to consume
-	 * @param consumerGroup
-	 *            The name of the consumer group this consumer is part of
-	 * @param consumerId
-	 *            The unique id of this consume in its group
-	 * @param timeoutMs
-	 *            The amount of time in milliseconds that the server should keep
-	 *            the connection open while waiting for message traffic. Use -1
-	 *            for default timeout.
-	 * @param limit
-	 *            A limit on the number of messages returned in a single call.
-	 *            Use -1 for no limit.
-	 * @param filter
-	 *            A Highland Park filter expression using only built-in filter
-	 *            components. Use null for "no filter".
-	 * @param apiKey
-	 *            key associated with a user
-	 * @param apiSecret
-	 *            of a user
-	 * @return a consumer
-	 */
-	public static CambriaConsumer createConsumer(Collection<String> hostSet,
-			final String topic, final String consumerGroup,
-			final String consumerId, int timeoutMs, int limit, String filter,
-			String apiKey, String apiSecret) {
-		if (sfMock != null)
-			return sfMock;
-		try {
-			return new DMaaPCambriaConsumerImpl(hostSet, topic, consumerGroup,
-					consumerId, timeoutMs, limit, filter, apiKey, apiSecret);
-		} catch (MalformedURLException e) {
-			throw new RuntimeException(e);
-		}
-	}
-
-	/*************************************************************************/
-	/*************************************************************************/
-	/*************************************************************************/
-
-	/**
-	 * Create a publisher that sends each message (or group of messages)
-	 * immediately. Most applications should favor higher latency for much
-	 * higher message throughput and the "simple publisher" is not a good
-	 * choice.
-	 * 
-	 * @param hostlist
-	 *            The host used in the URL to Cambria. Can be "host:port", can
-	 *            be multiple comma-separated entries.
-	 * @param topic
-	 *            The topic on which to publish messages.
-	 * @return a publisher
-	 */
-	public static CambriaBatchingPublisher createSimplePublisher(
-			String hostlist, String topic) {
-		return createBatchingPublisher(hostlist, topic, 1, 1);
-	}
-
-	/**
-	 * Create a publisher that batches messages. Be sure to close the publisher
-	 * to send the last batch and ensure a clean shutdown. Message payloads are
-	 * not compressed.
-	 * 
-	 * @param hostlist
-	 *            The host used in the URL to Cambria. Can be "host:port", can
-	 *            be multiple comma-separated entries.
-	 * @param topic
-	 *            The topic on which to publish messages.
-	 * @param maxBatchSize
-	 *            The largest set of messages to batch
-	 * @param maxAgeMs
-	 *            The maximum age of a message waiting in a batch
-	 * 
-	 * @return a publisher
-	 */
-	public static CambriaBatchingPublisher createBatchingPublisher(
-			String hostlist, String topic, int maxBatchSize, long maxAgeMs) {
-		return createBatchingPublisher(hostlist, topic, maxBatchSize, maxAgeMs,
-				false);
-	}
-
-	/**
-	 * Create a publisher that batches messages. Be sure to close the publisher
-	 * to send the last batch and ensure a clean shutdown.
-	 * 
-	 * @param hostlist
-	 *            The host used in the URL to Cambria. Can be "host:port", can
-	 *            be multiple comma-separated entries.
-	 * @param topic
-	 *            The topic on which to publish messages.
-	 * @param maxBatchSize
-	 *            The largest set of messages to batch
-	 * @param maxAgeMs
-	 *            The maximum age of a message waiting in a batch
-	 * @param compress
-	 *            use gzip compression
-	 * 
-	 * @return a publisher
-	 */
-	public static CambriaBatchingPublisher createBatchingPublisher(
-			String hostlist, String topic, int maxBatchSize, long maxAgeMs,
-			boolean compress) {
-		return createBatchingPublisher(
-				DMaaPCambriaConsumerImpl.stringToList(hostlist), topic,
-				maxBatchSize, maxAgeMs, compress);
-	}
-
-	/**
-	 * Create a publisher that batches messages. Be sure to close the publisher
-	 * to send the last batch and ensure a clean shutdown.
-	 * 
-	 * @param hostSet
-	 *            A set of hosts to be used in the URL to Cambria. Can be
-	 *            "host:port". Use multiple entries to enable failover.
-	 * @param topic
-	 *            The topic on which to publish messages.
-	 * @param maxBatchSize
-	 *            The largest set of messages to batch
-	 * @param maxAgeMs
-	 *            The maximum age of a message waiting in a batch
-	 * @param compress
-	 *            use gzip compression
-	 * 
-	 * @return a publisher
-	 */
-	public static CambriaBatchingPublisher createBatchingPublisher(
-			String[] hostSet, String topic, int maxBatchSize, long maxAgeMs,
-			boolean compress) {
-		final TreeSet<String> hosts = new TreeSet<String>();
-		for (String hp : hostSet) {
-			hosts.add(hp);
-		}
-		return createBatchingPublisher(hosts, topic, maxBatchSize, maxAgeMs,
-				compress);
-	}
-
-	/**
-	 * Create a publisher that batches messages. Be sure to close the publisher
-	 * to send the last batch and ensure a clean shutdown.
-	 * 
-	 * @param hostSet
-	 *            A set of hosts to be used in the URL to Cambria. Can be
-	 *            "host:port". Use multiple entries to enable failover.
-	 * @param topic
-	 *            The topic on which to publish messages.
-	 * @param maxBatchSize
-	 *            The largest set of messages to batch
-	 * @param maxAgeMs
-	 *            The maximum age of a message waiting in a batch
-	 * @param compress
-	 *            use gzip compression
-	 * 
-	 * @return a publisher
-	 */
-	public static CambriaBatchingPublisher createBatchingPublisher(
-			Collection<String> hostSet, String topic, int maxBatchSize,
-			long maxAgeMs, boolean compress) {
-		return new DMaaPCambriaSimplerBatchPublisher.Builder()
-				.againstUrls(hostSet).onTopic(topic)
-				.batchTo(maxBatchSize, maxAgeMs).compress(compress).build();
-	}
-
-	/**
-	 * Create an identity manager client to work with API keys.
-	 * 
-	 * @param hostSet
-	 *            A set of hosts to be used in the URL to Cambria. Can be
-	 *            "host:port". Use multiple entries to enable failover.
-	 * @param apiKey
-	 *            Your API key
-	 * @param apiSecret
-	 *            Your API secret
-	 * @return an identity manager
-	 */
-	/*
-	 * public static CambriaIdentityManager createIdentityManager (
-	 * Collection<String> hostSet, String apiKey, String apiSecret ) { final
-	 * CambriaIdentityManager cim = new CambriaMetaClient ( hostSet );
-	 * cim.setApiCredentials ( apiKey, apiSecret ); return cim; }
-	 */
-
-	/**
-	 * Create a topic manager for working with topics.
-	 * 
-	 * @param hostSet
-	 *            A set of hosts to be used in the URL to Cambria. Can be
-	 *            "host:port". Use multiple entries to enable failover.
-	 * @param apiKey
-	 *            Your API key
-	 * @param apiSecret
-	 *            Your API secret
-	 * @return a topic manager
-	 */
-	/*
-	 * public static CambriaTopicManager createTopicManager ( Collection<String>
-	 * hostSet, String apiKey, String apiSecret ) { final CambriaMetaClient tmi
-	 * = new CambriaMetaClient ( hostSet ); tmi.setApiCredentials ( apiKey,
-	 * apiSecret ); return tmi; }
-	 */
-
-	/**
-	 * Inject a consumer. Used to support unit tests.
-	 * 
-	 * @param cc
-	 */
-	public static void $testInject(CambriaConsumer cc) {
-		sfMock = cc;
-	}
-
-	private static CambriaConsumer sfMock = null;
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/impl/CambriaBaseClient.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/impl/CambriaBaseClient.java
deleted file mode 100644
index 397e818..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/impl/CambriaBaseClient.java
+++ /dev/null
@@ -1,99 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metrics.publisher.impl;
-
-import java.net.MalformedURLException;
-import java.util.Collection;
-import java.util.Set;
-import java.util.TreeSet;
-import java.util.concurrent.TimeUnit;
-
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants.CambriaConstants;
-
-//import org.slf4j.Logger;
-//import org.slf4j.LoggerFactory;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import com.att.nsa.apiClient.http.CacheUse;
-import com.att.nsa.apiClient.http.HttpClient;
-
-/**
- * 
- * @author author
- *
- */
-public class CambriaBaseClient extends HttpClient implements org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metrics.publisher.CambriaClient 
-{
-	protected CambriaBaseClient ( Collection<String> hosts ) throws MalformedURLException
-	{
-		this ( hosts, null );
-	}
-
-	protected CambriaBaseClient ( Collection<String> hosts, String clientSignature ) throws MalformedURLException
-	{
-//		super ( hosts, CambriaConstants.kStdCambriaServicePort, clientSignature,
-//			CacheUse.NONE, 1, 1, TimeUnit.MILLISECONDS );
-		super(ConnectionType.HTTP, hosts, CambriaConstants.kStdCambriaServicePort, clientSignature, CacheUse.NONE, 1, 1L, TimeUnit.MILLISECONDS, 32, 32, 600000);
-
-		//fLog = LoggerFactory.getLogger ( this.getClass().getName () );
-		fLog = EELFManager.getInstance().getLogger(this.getClass().getName());
-		//( this.getClass().getName () );
-	}
-
-	@Override
-	public void close ()
-	{
-	}
-
-	protected Set<String> jsonArrayToSet ( JSONArray a ) throws JSONException
-	{
-		if ( a == null ) return null;
-
-		final TreeSet<String> set = new TreeSet<String> ();
-		for ( int i=0; i<a.length (); i++ )
-		{
-			set.add ( a.getString ( i ));
-		}
-		return set;
-	}
-	/**
-	 * @param log
-	 */
-	public void logTo ( EELFLogger  log )
-	{
-		fLog = log; 
-		
-		//replaceLogger ( log );
-	}
-
-	protected EELFLogger  getLog ()
-	{
-		return fLog;
-	}
-	
-	private EELFLogger  fLog;
-	
-	
-
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/impl/Clock.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/impl/Clock.java
deleted file mode 100644
index e7531c7..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/impl/Clock.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metrics.publisher.impl;
-
-/**
- * 
- * This class maintains the system clocks
- * @author author
- *
- */
-public class Clock
-{
-	public synchronized static Clock getIt ()
-	{
-		if ( sfClock == null )
-		{
-			sfClock = new Clock ();
-		}
-		return sfClock;
-	}
-
-	/**
-	 * 
-	 * Get the system's current time in milliseconds.
-	 * @return the current time
-	 * 
-	 */
-	public static long now ()
-	{
-		return getIt().nowImpl ();
-	}
-
-	/**
-	 * Get current time in milliseconds
-	 * @return current time in ms
-	 */
-	protected long nowImpl ()
-	{
-		return System.currentTimeMillis ();
-	}
-
-	/**
-	 * Initialize constructor
-	 */
-	protected Clock ()
-	{
-	}
-
-	private static Clock sfClock = null;
-
-	protected synchronized static void register ( Clock testClock )
-	{
-		sfClock = testClock;
-	}
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/impl/DMaaPCambriaConsumerImpl.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/impl/DMaaPCambriaConsumerImpl.java
deleted file mode 100644
index 332c8b1..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/impl/DMaaPCambriaConsumerImpl.java
+++ /dev/null
@@ -1,170 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metrics.publisher.impl;
-
-import java.io.IOException;
-import java.io.UnsupportedEncodingException;
-import java.net.MalformedURLException;
-import java.net.URLEncoder;
-import java.util.Collection;
-import java.util.LinkedList;
-import java.util.List;
-
-import jline.internal.Log;
-
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metrics.publisher.CambriaPublisherUtility;
-
-import com.att.nsa.apiClient.http.HttpException;
-import com.att.nsa.apiClient.http.HttpObjectNotFoundException;
-
-/**
- * 
- * @author author
- *
- */
-public class DMaaPCambriaConsumerImpl extends CambriaBaseClient
-		implements org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metrics.publisher.CambriaConsumer {
-	private final String fTopic;
-	private final String fGroup;
-	private final String fId;
-	private final int fTimeoutMs;
-	private final int fLimit;
-	private final String fFilter;
-
-	/**
-	 * 
-	 * @param hostPart
-	 * @param topic
-	 * @param consumerGroup
-	 * @param consumerId
-	 * @param timeoutMs
-	 * @param limit
-	 * @param filter
-	 * @param apiKey
-	 * @param apiSecret
-	 * @throws MalformedURLException 
-	 */
-	public DMaaPCambriaConsumerImpl(Collection<String> hostPart, final String topic, final String consumerGroup,
-			final String consumerId, int timeoutMs, int limit, String filter, String apiKey, String apiSecret) throws MalformedURLException {
-		super(hostPart, topic + "::" + consumerGroup + "::" + consumerId);
-
-		fTopic = topic;
-		fGroup = consumerGroup;
-		fId = consumerId;
-		fTimeoutMs = timeoutMs;
-		fLimit = limit;
-		fFilter = filter;
-
-		setApiCredentials(apiKey, apiSecret);
-	}
-
-	/**
-	 * method converts String to list
-	 * 
-	 * @param str
-	 * @return
-	 */
-	public static List<String> stringToList(String str) {
-		final LinkedList<String> set = new LinkedList<String>();
-		if (str != null) {
-			final String[] parts = str.trim().split(",");
-			for (String part : parts) {
-				final String trimmed = part.trim();
-				if (trimmed.length() > 0) {
-					set.add(trimmed);
-				}
-			}
-		}
-		return set;
-	}
-
-	@Override
-	public Iterable<String> fetch() throws IOException {
-		// fetch with the timeout and limit set in constructor
-		return fetch(fTimeoutMs, fLimit);
-	}
-
-	@Override
-	public Iterable<String> fetch(int timeoutMs, int limit) throws IOException {
-		final LinkedList<String> msgs = new LinkedList<String>();
-
-		final String urlPath = createUrlPath(timeoutMs, limit);
-
-		getLog().info("UEB GET " + urlPath);
-		try {
-			final JSONObject o = get(urlPath);
-
-			if (o != null) {
-				final JSONArray a = o.getJSONArray("result");
-				if (a != null) {
-					for (int i = 0; i < a.length(); i++) {
-						msgs.add(a.getString(i));
-					}
-				}
-			}
-		} catch (HttpObjectNotFoundException e) {
-			// this can happen if the topic is not yet created. ignore.
-			Log.error("Failed due to topic is not yet created" + e);
-		} catch (JSONException e) {
-			// unexpected response
-			reportProblemWithResponse();
-			Log.error("Failed due to jsonException", e);
-		} catch (HttpException e) {
-			throw new IOException(e);
-		}
-
-		return msgs;
-	}
-
-	protected String createUrlPath(int timeoutMs, int limit) {
-		final StringBuilder url = new StringBuilder(CambriaPublisherUtility.makeConsumerUrl(fTopic, fGroup, fId));
-		final StringBuilder adds = new StringBuilder();
-		if (timeoutMs > -1) {
-			adds.append("timeout=").append(timeoutMs);
-		}
-
-		if (limit > -1) {
-			if (adds.length() > 0) {
-				adds.append("&");
-			}
-			adds.append("limit=").append(limit);
-		}
-		if (fFilter != null && fFilter.length() > 0) {
-			try {
-				if (adds.length() > 0) {
-					adds.append("&");
-				}
-				adds.append("filter=").append(URLEncoder.encode(fFilter, "UTF-8"));
-			} catch (UnsupportedEncodingException e) {
-				Log.error("Failed due to UnsupportedEncodingException" + e);
-			}
-		}
-		if (adds.length() > 0) {
-			url.append("?").append(adds.toString());
-		}
-		return url.toString();
-	}
-
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisher.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisher.java
deleted file mode 100644
index 2b9bad4..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisher.java
+++ /dev/null
@@ -1,430 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metrics.publisher.impl;
-
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.net.MalformedURLException;
-import java.util.Collection;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.concurrent.ScheduledThreadPoolExecutor;
-import java.util.concurrent.TimeUnit;
-import java.util.zip.GZIPOutputStream;
-
-import javax.ws.rs.client.Client;
-import javax.ws.rs.client.ClientBuilder;
-import javax.ws.rs.client.Entity;
-import javax.ws.rs.client.WebTarget;
-import javax.ws.rs.core.Response;
-
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants.CambriaConstants;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metrics.publisher.CambriaPublisherUtility;
-
-import com.att.ajsc.filemonitor.AJSCPropertiesMap;
-
-/**
- * 
- * class DMaaPCambriaSimplerBatchPublisher used to send the publish the messages
- * in batch
- * 
- * @author author
- *
- */
-public class DMaaPCambriaSimplerBatchPublisher extends CambriaBaseClient
-		implements org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metrics.publisher.CambriaBatchingPublisher {
-	/**
-	 * 
-	 * static inner class initializes with urls, topic,batchSize
-	 * 
-	 * @author author
-	 *
-	 */
-	public static class Builder {
-		public Builder() {
-		}
-
-		/**
-		 * constructor initialize with url
-		 * 
-		 * @param baseUrls
-		 * @return
-		 * 
-		 */
-		public Builder againstUrls(Collection<String> baseUrls) {
-			fUrls = baseUrls;
-			return this;
-		}
-
-		/**
-		 * constructor initializes with topics
-		 * 
-		 * @param topic
-		 * @return
-		 * 
-		 */
-		public Builder onTopic(String topic) {
-			fTopic = topic;
-			return this;
-		}
-
-		/**
-		 * constructor initilazes with batch size and batch time
-		 * 
-		 * @param maxBatchSize
-		 * @param maxBatchAgeMs
-		 * @return
-		 * 
-		 */
-		public Builder batchTo(int maxBatchSize, long maxBatchAgeMs) {
-			fMaxBatchSize = maxBatchSize;
-			fMaxBatchAgeMs = maxBatchAgeMs;
-			return this;
-		}
-
-		/**
-		 * constructor initializes with compress
-		 * 
-		 * @param compress
-		 * @return
-		 */
-		public Builder compress(boolean compress) {
-			fCompress = compress;
-			return this;
-		}
-
-		/**
-		 * method returns DMaaPCambriaSimplerBatchPublisher object
-		 * 
-		 * @return
-		 */
-		public DMaaPCambriaSimplerBatchPublisher build() {
-			try {
-				return new DMaaPCambriaSimplerBatchPublisher(fUrls, fTopic, fMaxBatchSize, fMaxBatchAgeMs, fCompress);
-			} catch (MalformedURLException e) {
-				throw new RuntimeException(e);
-			}
-		}
-
-		private Collection<String> fUrls;
-		private String fTopic;
-		private int fMaxBatchSize = 100;
-		private long fMaxBatchAgeMs = 1000;
-		private boolean fCompress = false;
-	};
-
-	/**
-	 * 
-	 * @param partition
-	 * @param msg
-	 */
-	@Override
-	public int send(String partition, String msg) {
-		return send(new message(partition, msg));
-	}
-
-	/**
-	 * @param msg
-	 */
-	@Override
-	public int send(message msg) {
-		final LinkedList<message> list = new LinkedList<message>();
-		list.add(msg);
-		return send(list);
-	}
-
-	/**
-	 * @param msgs
-	 */
-	@Override
-	public synchronized int send(Collection<message> msgs) {
-		if (fClosed) {
-			throw new IllegalStateException("The publisher was closed.");
-		}
-
-		for (message userMsg : msgs) {
-			fPending.add(new TimestampedMessage(userMsg));
-		}
-		return getPendingMessageCount();
-	}
-
-	/**
-	 * getPending message count
-	 */
-	@Override
-	public synchronized int getPendingMessageCount() {
-		return fPending.size();
-	}
-
-	/**
-	 * 
-	 * @exception InterruptedException
-	 * @exception IOException
-	 */
-	@Override
-	public void close() {
-		try {
-			final List<message> remains = close(Long.MAX_VALUE, TimeUnit.MILLISECONDS);
-			if (remains.size() > 0) {
-				getLog().warn("Closing publisher with " + remains.size() + " messages unsent. "
-						+ "Consider using CambriaBatchingPublisher.close( long timeout, TimeUnit timeoutUnits ) to recapture unsent messages on close.");
-			}
-		} catch (InterruptedException e) {
-			getLog().warn("Possible message loss. " + e.getMessage(), e);
-		} catch (IOException e) {
-			getLog().warn("Possible message loss. " + e.getMessage(), e);
-		}
-	}
-
-	/**
-	 * @param time
-	 * @param unit
-	 */
-	@Override
-	public List<message> close(long time, TimeUnit unit) throws IOException, InterruptedException {
-		synchronized (this) {
-			fClosed = true;
-
-			// stop the background sender
-			fExec.setContinueExistingPeriodicTasksAfterShutdownPolicy(false);
-			fExec.setExecuteExistingDelayedTasksAfterShutdownPolicy(false);
-			fExec.shutdown();
-		}
-
-		final long now = Clock.now();
-		final long waitInMs = TimeUnit.MILLISECONDS.convert(time, unit);
-		final long timeoutAtMs = now + waitInMs;
-
-		while (Clock.now() < timeoutAtMs && getPendingMessageCount() > 0) {
-			send(true);
-			Thread.sleep(250);
-		}
-		// synchronizing the current object
-		synchronized (this) {
-			final LinkedList<message> result = new LinkedList<message>();
-			fPending.drainTo(result);
-			return result;
-		}
-	}
-
-	/**
-	 * Possibly send a batch to the cambria server. This is called by the
-	 * background thread and the close() method
-	 * 
-	 * @param force
-	 */
-	private synchronized void send(boolean force) {
-		if (force || shouldSendNow()) {
-			if (!sendBatch()) {
-				getLog().warn("Send failed, " + fPending.size() + " message to send.");
-
-				// note the time for back-off
-				fDontSendUntilMs = sfWaitAfterError + Clock.now();
-			}
-		}
-	}
-
-	/**
-	 * 
-	 * @return
-	 */
-	private synchronized boolean shouldSendNow() {
-		boolean shouldSend = false;
-		if (fPending.size() > 0) {
-			final long nowMs = Clock.now();
-
-			shouldSend = (fPending.size() >= fMaxBatchSize);
-			if (!shouldSend) {
-				final long sendAtMs = fPending.peek().timestamp + fMaxBatchAgeMs;
-				shouldSend = sendAtMs <= nowMs;
-			}
-
-			// however, wait after an error
-			shouldSend = shouldSend && nowMs >= fDontSendUntilMs;
-		}
-		return shouldSend;
-	}
-
-	/**
-	 * 
-	 * @return
-	 */
-	private synchronized boolean sendBatch() {
-		// it's possible for this call to be made with an empty list. in this
-		// case, just return.
-		if (fPending.size() < 1) {
-			return true;
-		}
-
-		final long nowMs = Clock.now();
-		final String url = CambriaPublisherUtility.makeUrl(fTopic);
-
-		getLog().info("sending " + fPending.size() + " msgs to " + url + ". Oldest: "
-				+ (nowMs - fPending.peek().timestamp) + " ms");
-
-		try {
-
-			final ByteArrayOutputStream baseStream = new ByteArrayOutputStream();
-			OutputStream os = baseStream;
-			if (fCompress) {
-				os = new GZIPOutputStream(baseStream);
-			}
-			for (TimestampedMessage m : fPending) {
-				os.write(("" + m.fPartition.length()).getBytes());
-				os.write('.');
-				os.write(("" + m.fMsg.length()).getBytes());
-				os.write('.');
-				os.write(m.fPartition.getBytes());
-				os.write(m.fMsg.getBytes());
-				os.write('\n');
-			}
-			os.close();
-
-			final long startMs = Clock.now();
-
-			// code from REST Client Starts
-
-			// final String serverCalculatedSignature = sha1HmacSigner.sign
-			// ("2015-09-21T11:38:19-0700", "iHAxArrj6Ve9JgmHvR077QiV");
-
-			Client client = ClientBuilder.newClient();
-			String metricTopicname = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"metrics.send.cambria.topic");
-			 if (null==metricTopicname) {
-				 
-        		 metricTopicname="msgrtr.apinode.metrics.dmaap";
-			 }
-			WebTarget target = client
-					.target("http://localhost:" + CambriaConstants.kStdCambriaServicePort);
-			target = target.path("/events/" + fTopic);
-			getLog().info("url : " + target.getUri().toString());
-			// API Key
-
-			Entity<byte[]> data = Entity.entity(baseStream.toByteArray(), "application/cambria");
-
-			Response response = target.request().post(data);
-			// header("X-CambriaAuth",
-			// "2OH46YIWa329QpEF:"+serverCalculatedSignature).
-			// header("X-CambriaDate", "2015-09-21T11:38:19-0700").
-			// post(Entity.json(baseStream.toByteArray()));
-
-			getLog().info("Response received :: " + response.getStatus());
-			getLog().info("Response received :: " + response.toString());
-
-			// code from REST Client Ends
-
-			/*
-			 * final JSONObject result = post ( url, contentType,
-			 * baseStream.toByteArray(), true ); final String logLine =
-			 * "cambria reply ok (" + (Clock.now()-startMs) + " ms):" +
-			 * result.toString (); getLog().info ( logLine );
-			 */
-			fPending.clear();
-			return true;
-		} catch (IllegalArgumentException x) {
-			getLog().warn(x.getMessage(), x);
-		}
-		/*
-		 * catch ( HttpObjectNotFoundException x ) { getLog().warn (
-		 * x.getMessage(), x ); } catch ( HttpException x ) { getLog().warn (
-		 * x.getMessage(), x ); }
-		 */
-		catch (IOException x) {
-			getLog().warn(x.getMessage(), x);
-		}
-		return false;
-	}
-
-	private final String fTopic;
-	private final int fMaxBatchSize;
-	private final long fMaxBatchAgeMs;
-	private final boolean fCompress;
-	private boolean fClosed;
-
-	private final LinkedBlockingQueue<TimestampedMessage> fPending;
-	private long fDontSendUntilMs;
-	private final ScheduledThreadPoolExecutor fExec;
-
-	private static final long sfWaitAfterError = 1000;
-
-	/**
-	 * 
-	 * @param hosts
-	 * @param topic
-	 * @param maxBatchSize
-	 * @param maxBatchAgeMs
-	 * @param compress
-	 * @throws MalformedURLException 
-	 */
-	private DMaaPCambriaSimplerBatchPublisher(Collection<String> hosts, String topic, int maxBatchSize,
-			long maxBatchAgeMs, boolean compress) throws MalformedURLException {
-
-		super(hosts);
-
-		if (topic == null || topic.length() < 1) {
-			throw new IllegalArgumentException("A topic must be provided.");
-		}
-
-		fClosed = false;
-		fTopic = topic;
-		fMaxBatchSize = maxBatchSize;
-		fMaxBatchAgeMs = maxBatchAgeMs;
-		fCompress = compress;
-
-		fPending = new LinkedBlockingQueue<TimestampedMessage>();
-		fDontSendUntilMs = 0;
-
-		fExec = new ScheduledThreadPoolExecutor(1);
-		fExec.scheduleAtFixedRate(new Runnable() {
-			@Override
-			public void run() {
-				send(false);
-			}
-		}, 100, 50, TimeUnit.MILLISECONDS);
-	}
-
-	/**
-	 * 
-	 * 
-	 * @author author
-	 *
-	 */
-	private static class TimestampedMessage extends message {
-		/**
-		 * to store timestamp value
-		 */
-		public final long timestamp;
-
-		/**
-		 * constructor initialize with message
-		 * 
-		 * @param m
-		 * 
-		 */
-		public TimestampedMessage(message m) {
-			super(m);
-			timestamp = Clock.now();
-		}
-	}
-
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/CambriaEventSet.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/CambriaEventSet.java
deleted file mode 100644
index 9eb1691..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/CambriaEventSet.java
+++ /dev/null
@@ -1,115 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.resources;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.zip.GZIPInputStream;
-
-import javax.servlet.http.HttpServletResponse;
-
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Publisher.message;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.resources.streamReaders.CambriaJsonStreamReader;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.resources.streamReaders.CambriaRawStreamReader;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.resources.streamReaders.CambriaStreamReader;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.resources.streamReaders.CambriaTextStreamReader;
-
-import com.att.nsa.apiServer.streams.ChunkedInputStream;
-import com.att.nsa.drumlin.service.standards.HttpStatusCodes;
-
-/**
- * An inbound event set.
- * 
- * @author author
- */
-public class CambriaEventSet {
-	private final reader fReader;
-
-	/**
-	 * constructor initialization
-	 * 
-	 * @param mediaType
-	 * @param originalStream
-	 * @param chunked
-	 * @param defPartition
-	 * @throws CambriaApiException
-	 */
-	public CambriaEventSet(String mediaType, InputStream originalStream,
-			boolean chunked, String defPartition) throws CambriaApiException {
-		InputStream is = originalStream;
-		if (chunked) {
-			is = new ChunkedInputStream(originalStream);
-		}
-
-		if (("application/json").equals(mediaType)) {
-			if (chunked) {
-				throw new CambriaApiException(
-						HttpServletResponse.SC_BAD_REQUEST,
-						"The JSON stream reader doesn't support chunking.");
-			}
-			fReader = new CambriaJsonStreamReader(is, defPartition);
-		} else if (("application/cambria").equals(mediaType)) {
-			fReader = new CambriaStreamReader(is);
-		} else if (("application/cambria-zip").equals(mediaType)) {
-			try {
-				is = new GZIPInputStream(is);
-			} catch (IOException e) {
-				throw new CambriaApiException(HttpStatusCodes.k400_badRequest,
-						"Couldn't read compressed format: " + e);
-			}
-			fReader = new CambriaStreamReader(is);
-		} else if (("text/plain").equals(mediaType)) {
-			fReader = new CambriaTextStreamReader(is, defPartition);
-		} else {
-			fReader = new CambriaRawStreamReader(is, defPartition);
-		}
-	}
-
-	/**
-	 * Get the next message from this event set. Returns null when the end of
-	 * stream is reached. Will block until a message arrives (or the stream is
-	 * closed/broken).
-	 * 
-	 * @return a message, or null
-	 * @throws IOException
-	 * @throws CambriaApiException
-	 */
-	public message next() throws IOException, CambriaApiException {
-		return fReader.next();
-	}
-
-	/**
-	 * 
-	 * @author author
-	 *
-	 */
-	public interface reader {
-		/**
-		 * 
-		 * @return
-		 * @throws IOException
-		 * @throws CambriaApiException
-		 */
-		message next() throws IOException, CambriaApiException;
-	}
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/CambriaOutboundEventStream.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/CambriaOutboundEventStream.java
deleted file mode 100644
index 9fbc7f7..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/CambriaOutboundEventStream.java
+++ /dev/null
@@ -1,516 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.resources;
-
-import java.io.IOException;
-import java.io.OutputStream;
-import java.util.Date;
-
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import org.json.JSONException;
-import org.json.JSONObject;
-import org.json.JSONTokener;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Consumer;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Consumer.Message;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants.CambriaConstants;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metabroker.Topic;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.Utils;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.DMaaPResponseBuilder.StreamWriter;
-
-import com.att.ajsc.filemonitor.AJSCPropertiesMap;
-
-
-/**
- * class used to write the consumed messages
- * 
- * @author author
- *
- */
-public class CambriaOutboundEventStream implements StreamWriter {
-	private static final int kTopLimit = 1024 * 4;
-
-	/**
-	 * 
-	 * static innerclass it takes all the input parameter for kafka consumer
-	 * like limit, timeout, meta, pretty
-	 * 
-	 * @author author
-	 *
-	 */
-	public static class Builder {
-
-		// Required
-		private final Consumer fConsumer;
-		//private final rrNvReadable fSettings;   // used during write to tweak
-												// format, decide to explicitly
-												// close stream or not
-
-		// Optional
-		private int fLimit;
-		private int fTimeoutMs;
-		private String fTopicFilter;
-		private boolean fPretty;
-		private boolean fWithMeta;
-
-		// private int fOffset;
-		/**
-		 * constructor it initializes all the consumer parameters
-		 * 
-		 * @param c
-		 * @param settings
-		 */
-		public Builder(Consumer c) {
-			this.fConsumer = c;
-			//this.fSettings = settings;
-
-			fLimit = CambriaConstants.kNoTimeout;
-			fTimeoutMs = CambriaConstants.kNoLimit;
-			fTopicFilter = CambriaConstants.kNoFilter;
-			fPretty = false;
-			fWithMeta = false;
-			// fOffset = CambriaEvents.kNextOffset;
-		}
-
-		/**
-		 * 
-		 * constructor initializes with limit
-		 * 
-		 * @param l
-		 *            only l no of messages will be consumed
-		 * @return
-		 */
-		public Builder limit(int l) {
-			this.fLimit = l;
-			return this;
-		}
-
-		/**
-		 * constructor initializes with timeout
-		 * 
-		 * @param t
-		 *            if there is no message to consume, them DMaaP will wait
-		 *            for t time
-		 * @return
-		 */
-		public Builder timeout(int t) {
-			this.fTimeoutMs = t;
-			return this;
-		}
-
-		/**
-		 * constructor initializes with filter
-		 * 
-		 * @param f
-		 *            filter
-		 * @return
-		 */
-		public Builder filter(String f) {
-			this.fTopicFilter = f;
-			return this;
-		}
-
-		/**
-		 * constructor initializes with boolean value pretty
-		 * 
-		 * @param p
-		 *            messages print in new line
-		 * @return
-		 */
-		public Builder pretty(boolean p) {
-			fPretty = p;
-			return this;
-		}
-
-		/**
-		 * constructor initializes with boolean value meta
-		 * 
-		 * @param withMeta,
-		 *            along with messages offset will print
-		 * @return
-		 */
-		public Builder withMeta(boolean withMeta) {
-			fWithMeta = withMeta;
-			return this;
-		}
-
-		// public Builder atOffset ( int pos )
-		// {
-		// fOffset = pos;
-		// return this;
-		// }
-		/**
-		 * method returs object of CambriaOutboundEventStream
-		 * 
-		 * @return
-		 * @throws CambriaApiException
-		 */
-		public CambriaOutboundEventStream build() throws CambriaApiException {
-			return new CambriaOutboundEventStream(this);
-		}
-	}
-
-	@SuppressWarnings("unchecked")
-	/**
-	 * 
-	 * @param builder
-	 * @throws CambriaApiException
-	 * 
-	 */
-	private CambriaOutboundEventStream(Builder builder) throws CambriaApiException {
-		fConsumer = builder.fConsumer;
-		fLimit = builder.fLimit;
-		fTimeoutMs = builder.fTimeoutMs;
-		//fSettings = builder.fSettings;
-		fSent = 0;
-		fPretty = builder.fPretty;
-		fWithMeta = builder.fWithMeta;
-		
-//		if (CambriaConstants.kNoFilter.equals(builder.fTopicFilter)) {
-//			fHpAlarmFilter = null;
-//			fHppe = null;
-//		} else {
-//			try {
-//				final JSONObject filter = new JSONObject(new JSONTokener(builder.fTopicFilter));
-//				HpConfigContext<HpEvent> cc = new HpConfigContext<HpEvent>();
-//				fHpAlarmFilter = cc.create(HpAlarmFilter.class, filter);
-//				final EventFactory<HpJsonEvent> ef = new HpJsonEventFactory();
-//				fHppe = new HpProcessingEngine<HpJsonEvent>(ef);
-//			} catch (HpReaderException e) {
-//				// JSON was okay, but the filter engine says it's bogus
-//				throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST,
-//						"Couldn't create filter: " + e.getMessage());
-//			} catch (JSONException e) {
-//				// user sent a bogus JSON object
-//				throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST,
-//						"Couldn't parse JSON: " + e.getMessage());
-//			}
-//		}
-	}
-
-	/**
-	 * 
-	 * interface provides onWait and onMessage methods
-	 *
-	 */
-	public interface operation {
-		/**
-		 * Call thread.sleep
-		 * @throws IOException
-		 */
-		void onWait() throws IOException;
-/**
- * provides the output based in the consumer paramter
- * @param count
- * @param msg
- * @throws IOException
- */
-		void onMessage(int count, Message msg) throws IOException;
-	}
-
-	/**
-	 * 
-	 * @return
-	 */
-	public int getSentCount() {
-		return fSent;
-	}
-
-	@Override
-	/**
-	 * 
-	 * @param os
-	 * throws IOException
-	 */
-	public void write(final OutputStream os) throws IOException {
-		//final boolean transactionEnabled = topic.isTransactionEnabled();
-		//final boolean transactionEnabled = isTransEnabled();
-		final boolean transactionEnabled = istransEnable;
-		os.write('[');
-
-		fSent = forEachMessage(new operation() {
-			@Override
-			public void onMessage(int count, Message msg) throws IOException, JSONException {
-
-				String message = "";
-				JSONObject jsonMessage = null;
-				if (transactionEnabled) {
-					jsonMessage = new JSONObject(msg.getMessage());
-					message = jsonMessage.getString("message");
-				}
-
-				if (count > 0) {
-					os.write(',');
-				}
-
-				if (fWithMeta) {
-					final JSONObject entry = new JSONObject();
-					entry.put("offset", msg.getOffset());
-					entry.put("message", message);
-					os.write(entry.toString().getBytes());
-				} else {
-					//os.write(message.getBytes());
-					 String jsonString = "";
-					if(transactionEnabled){
-						jsonString= JSONObject.valueToString(message);
-					}else{
-						jsonString = JSONObject.valueToString (msg.getMessage());
-						}
-				 	os.write ( jsonString.getBytes () );
-				}
-
-				if (fPretty) {
-					os.write('\n');
-				}
-
-				
-				String metricTopicname= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"metrics.send.cambria.topic");
-				if (null==metricTopicname)
-           		  metricTopicname="msgrtr.apinode.metrics.dmaap";
-           	 
-           	 if (!metricTopicname.equalsIgnoreCase(topic.getName())) {
-				if (transactionEnabled) {
-					final String transactionId = jsonMessage.getString("transactionId");
-					responseTransactionId = transactionId;
-
-					StringBuilder consumerInfo = new StringBuilder();
-					if (null != dmaapContext && null != dmaapContext.getRequest()) {
-						final HttpServletRequest request = dmaapContext.getRequest();
-						consumerInfo.append("consumerIp= \"" + request.getRemoteHost() + "\",");
-						consumerInfo.append("consServerIp= \"" + request.getLocalAddr() + "\",");
-						consumerInfo.append("consumerId= \"" + Utils.getUserApiKey(request) + "\",");
-						consumerInfo.append(
-								"consumerGroup= \"" + getConsumerGroupFromRequest(request.getRequestURI()) + "\",");
-						consumerInfo.append("consumeTime= \"" + Utils.getFormattedDate(new Date()) + "\",");
-					}
-
-					log.info("Consumer [" + consumerInfo.toString() + "transactionId= \"" + transactionId
-							+ "\",messageLength= \"" + message.length() + "\",topic= \"" + topic.getName() + "\"]");
-				}
-           	 }
-
-			}
-
-			@Override
-			/**
-			 * 
-			 * It makes thread to wait
-			 * @throws IOException
-			 */
-			public void onWait() throws IOException {
-				os.flush(); // likely totally unnecessary for a network socket
-				try {
-					// FIXME: would be good to wait/signal
-					Thread.sleep(100);
-				} catch (InterruptedException e) {
-					// ignore
-				}
-			}
-		});
-
-		//if (null != dmaapContext && isTransactionEnabled()) {
-			if (null != dmaapContext && istransEnable) {
-			
-			dmaapContext.getResponse().setHeader("transactionId",
-					Utils.getResponseTransactionId(responseTransactionId));
-		}
-
-		os.write(']');
-		os.flush();
-
-		boolean close_out_stream = true;
-		String strclose_out_stream = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"close.output.stream");
-		if(null!=strclose_out_stream)close_out_stream=Boolean.parseBoolean(strclose_out_stream);
-		
-		//if (fSettings.getBoolean("close.output.stream", true)) {
-				if (close_out_stream) {
-			os.close();
-		}
-	}
-
-	/**
-	 * 
-	 * @param requestURI
-	 * @return
-	 */
-	private String getConsumerGroupFromRequest(String requestURI) {
-		if (null != requestURI && !requestURI.isEmpty()) {
-
-			String consumerDetails = requestURI.substring(requestURI.indexOf("events/") + 7);
-
-			int startIndex = consumerDetails.indexOf("/") + 1;
-			int endIndex = consumerDetails.lastIndexOf("/");
-			return consumerDetails.substring(startIndex, endIndex);
-		}
-		return null;
-	}
-/**
- * 
- * @param op
- * @return
- * @throws IOException
- * @throws JSONException 
- */
-	public int forEachMessage(operation op) throws IOException, JSONException {
-		final int effectiveLimit = (fLimit == 0 ? kTopLimit : fLimit);
-
-		int count = 0;
-		boolean firstPing = true;
-
-		final long startMs = System.currentTimeMillis();
-		final long timeoutMs = fTimeoutMs + startMs;
-
-		while (firstPing || (count == 0 && System.currentTimeMillis() < timeoutMs)) {
-			if (!firstPing) {
-				op.onWait();
-			}
-			firstPing = false;
-
-			Consumer.Message msg = null;
-			while (count < effectiveLimit && (msg = fConsumer.nextMessage()) != null) {
-
-				
-				String message = "";
-			//	if (topic.isTransactionEnabled() || true) {
-				if (istransEnable) {
-					// As part of DMaaP changes we are wrapping the original
-					// message into a json object
-					// and then this json object is further wrapped into message
-					// object before publishing,
-					// so extracting the original message from the message
-					// object for matching with filter.
-					final JSONObject jsonMessage = new JSONObject(msg.getMessage());
-					message = jsonMessage.getString("message");
-				} else {
-					message = msg.getMessage();
-				}
-
-				// If filters are enabled/set, message should be in JSON format
-				// for filters to work for
-				// otherwise filter will automatically ignore message in
-				// non-json format.
-				if (filterMatches(message)) {
-					op.onMessage(count, msg);
-					count++;
-				}
-			}
-		}
-
-		return count;
-	}
-
-	/**
-	 * 
-	 * Checks whether filter is initialized
-	 */
-//	private boolean isFilterInitialized() {
-//		return (fHpAlarmFilter != null && fHppe != null);
-//	}
-
-	/**
-	 * 
-	 * @param msg
-	 * @return
-	 */
-	private boolean filterMatches(String msg) {
-		boolean result = true;
-//		if (isFilterInitialized()) {
-//			try {
-//				final HpJsonEvent e = new HpJsonEvent("e", new JSONObject(msg));
-//				result = fHpAlarmFilter.matches(fHppe, e);
-//			} catch (JSONException x) {
-//				// the msg may not be JSON
-//				result = false;
-//				log.error("Failed due to " + x.getMessage());
-//			} catch (Exception x) {
-//				log.error("Error using filter: " + x.getMessage(), x);
-//			}
-//		}
-
-		return result;
-	}
-
-	public DMaaPContext getDmaapContext() {
-		return dmaapContext;
-	}
-
-	public void setDmaapContext(DMaaPContext dmaapContext) {
-		this.dmaapContext = dmaapContext;
-	}
-
-	public Topic getTopic() {
-		return topic;
-	}
-
-	public void setTopic(Topic topic) {
-		this.topic = topic;
-	}
-	
-	public void setTopicStyle(boolean aaftopic) {
-		this.isAAFTopic = aaftopic;
-	}
-	
-	public void setTransEnabled ( boolean transEnable) {
-		this.istransEnable = transEnable;
-	}
-
-	/*private boolean isTransactionEnabled() {
-		//return topic.isTransactionEnabled();
-		return true; // let metrics creates for all the topics
-	}*/
-
-	private boolean isTransEnabled() {
-		String istransidUEBtopicreqd = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"transidUEBtopicreqd");
-		boolean istransidreqd=false;
-		if ((null != istransidUEBtopicreqd && istransidUEBtopicreqd.equalsIgnoreCase("true")) || isAAFTopic){
-			istransidreqd = true; 
-		}
-		
-		return istransidreqd;
-
-	}
-	
-	private final Consumer fConsumer;
-	private final int fLimit;
-	private final int fTimeoutMs;
-	//private final rrNvReadable fSettings;
-	private final boolean fPretty;
-	private final boolean fWithMeta;
-	private int fSent;
-//	private final HpAlarmFilter<HpJsonEvent> fHpAlarmFilter;
-//	private final HpProcessingEngine<HpJsonEvent> fHppe;
-	private DMaaPContext dmaapContext;
-	private String responseTransactionId;
-	private Topic topic;
-	private boolean isAAFTopic = false;
-	private boolean istransEnable = false;
-	
-
-	//private static final Logger log = Logger.getLogger(CambriaOutboundEventStream.class);
-	
-	private static final EELFLogger log = EELFManager.getInstance().getLogger(CambriaOutboundEventStream.class);
-}
\ No newline at end of file
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/streamReaders/CambriaJsonStreamReader.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/streamReaders/CambriaJsonStreamReader.java
deleted file mode 100644
index 5aefe2d..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/streamReaders/CambriaJsonStreamReader.java
+++ /dev/null
@@ -1,171 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.resources.streamReaders;
-
-import java.io.InputStream;
-
-import javax.servlet.http.HttpServletResponse;
-
-import org.json.JSONException;
-import org.json.JSONObject;
-import org.json.JSONTokener;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Publisher.message;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.LogDetails;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.resources.CambriaEventSet.reader;
-
-/**
- * 
- * @author author
- *
- */
-public class CambriaJsonStreamReader implements reader {
-	private final JSONTokener fTokens;
-	private final boolean fIsList;
-	private long fCount;
-	private final String fDefPart;
-	public static final String kKeyField = "cambria.partition";
-
-	/**
-	 * 
-	 * @param is
-	 * @param defPart
-	 * @throws CambriaApiException
-	 */
-	public CambriaJsonStreamReader(InputStream is, String defPart) throws CambriaApiException {
-		try {
-			fTokens = new JSONTokener(is);
-			fCount = 0;
-			fDefPart = defPart;
-
-			final int c = fTokens.next();
-			if (c == '[') {
-				fIsList = true;
-			} else if (c == '{') {
-				fTokens.back();
-				fIsList = false;
-			} else {
-				throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, "Expecting an array or an object.");
-			}
-		} catch (JSONException e) {
-			throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, e.getMessage());
-		}
-	}
-
-	@Override
-	public message next() throws CambriaApiException {
-		try {
-			if (!fTokens.more()) {
-				return null;
-			}
-
-			final int c = fTokens.next();
-			
-			/*if (c ==','){
-				fCloseCount++;
-				System.out.println("fCloseCount=" + fCloseCount +" fCount "+fCount);
-			}*/
-			if (fIsList) {
-				if (c == ']' || (fCount > 0 && c == 10))
-					return null;
-
-
-				if (fCount > 0 && c != ',' && c!= 10) {
-					throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST,
-							"Expected ',' or closing ']' after last object.");
-				}
-
-				if (fCount == 0 && c != '{' && c!= 10  && c!=32) {
-					throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, "Expected { to start an object.");
-				}
-			} else if (fCount != 0 || c != '{') {
-				throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, "Expected '{' to start an object.");
-			}
-
-			if (c == '{') {
-				fTokens.back();
-			}
-			final JSONObject o = new JSONObject(fTokens);
-			fCount++;
-			return new msg(o);
-		} catch (JSONException e) {
-			throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, e.getMessage());
-
-		}
-	}
-
-	private class msg implements message {
-		private final String fKey;
-		private  String fMsg;
-		private LogDetails logDetails;
-		private boolean transactionEnabled;
-
-		/**
-		 * constructor
-		 * 
-		 * @param o
-		 */
-		//public msg(JSONObject o){}
-		
-		
-		public msg(JSONObject o) {
-			String key = o.optString(kKeyField, fDefPart);
-			if (key == null) {
-				key = "" + System.currentTimeMillis();
-			}
-			fKey = key;
-					
-				fMsg = o.toString().trim();
-			
-		}
-
-		@Override
-		public String getKey() {
-			return fKey;
-		}
-
-		@Override
-		public String getMessage() {
-			return fMsg;
-		}
-
-		@Override
-		public boolean isTransactionEnabled() {
-			return transactionEnabled;
-		}
-
-		@Override
-		public void setTransactionEnabled(boolean transactionEnabled) {
-			this.transactionEnabled = transactionEnabled;
-		}
-
-		@Override
-		public void setLogDetails(LogDetails logDetails) {
-			this.logDetails = logDetails;
-		}
-
-		@Override
-		public LogDetails getLogDetails() {
-			return logDetails;
-		}
-	}
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/streamReaders/CambriaRawStreamReader.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/streamReaders/CambriaRawStreamReader.java
deleted file mode 100644
index f0ec225..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/streamReaders/CambriaRawStreamReader.java
+++ /dev/null
@@ -1,142 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.resources.streamReaders;
-
-import java.io.IOException;
-import java.io.InputStream;
-
-import javax.servlet.http.HttpServletResponse;
-
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Publisher.message;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.LogDetails;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.resources.CambriaEventSet.reader;
-
-import com.att.nsa.util.StreamTools;
-
-/**
- * 
- * This stream reader reads raw bytes creating a single message.
- * @author author
- *
- */
-public class CambriaRawStreamReader implements reader
-{
-	/**
-	 * This is the constructor of CambriaRawStreamReader, it will basically the read from Input stream
-	 * @param is
-	 * @param defPart
-	 * @throws CambriaApiException
-	 */
-	public CambriaRawStreamReader ( InputStream is, String defPart ) throws CambriaApiException
-	{
-		fStream = is;
-		fDefPart = defPart;
-		fClosed = false;
-	}
-
-	@Override
-	/**
-	 * 
-	 * next() method reads the bytes and
-	 * iterates through the messages 
-	 * @throws CambriaApiException
-	 * 
-	 */
-	public message next () throws CambriaApiException
-	{
-		if ( fClosed ) return null;
-
-		try
-		{
-			final byte[] rawBytes = StreamTools.readBytes ( fStream );
-			fClosed = true;
-			return new message ()
-			{
-				private LogDetails logDetails;
-				private boolean transactionEnabled;
-
-				/**
-				 * returns boolean value which 
-				 * indicates whether transaction is enabled
-				 */
-				public boolean isTransactionEnabled() {
-					return transactionEnabled;
-				}
-
-				/**
-				 * sets boolean value which 
-				 * indicates whether transaction is enabled
-				 */
-				public void setTransactionEnabled(boolean transactionEnabled) {
-					this.transactionEnabled = transactionEnabled;
-				}
-				
-				@Override
-				/**
-				 * @returns key
-				 * It ch4ecks whether fDefPart value is Null.
-				 * If yes, it will return ystem.currentTimeMillis () else
-				 * it will return fDefPart variable value
-				 */
-				public String getKey ()
-				{
-					return fDefPart == null ? "" + System.currentTimeMillis () : fDefPart;
-				}
-
-				@Override
-				/**
-				 * returns the message in String type object
-				 */
-				public String getMessage ()
-				{
-					return new String ( rawBytes );
-				}
-
-				/**
-				 * set log details in logDetails variable
-				 */
-				@Override
-				public void setLogDetails(LogDetails logDetails) {
-					this.logDetails = logDetails;
-				}
-
-				@Override
-				/**
-				 * get the log details
-				 */
-				public LogDetails getLogDetails() {
-					return this.logDetails;
-				}
-			};
-		}
-		catch ( IOException e )
-		{
-			throw new CambriaApiException ( HttpServletResponse.SC_BAD_REQUEST, e.getMessage () );
-		}
-	}
-	
-	private final InputStream fStream;
-	private final String fDefPart;
-	private boolean fClosed;
-	//private String transactionId;
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/streamReaders/CambriaStreamReader.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/streamReaders/CambriaStreamReader.java
deleted file mode 100644
index bff6398..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/streamReaders/CambriaStreamReader.java
+++ /dev/null
@@ -1,229 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.resources.streamReaders;
-
-import java.io.IOException;
-import java.io.InputStream;
-
-import javax.servlet.http.HttpServletResponse;
-
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Publisher.message;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.LogDetails;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.resources.CambriaEventSet.reader;
-
-/**
- * Read an optionally chunked stream in the Cambria app format. This format
- * allows for speedier server-side message parsing than pure JSON. It's looks
- * like:<br/>
- * <br/>
- * &lt;keyLength&gt;.&lt;msgLength&gt;.&lt;key&gt;&lt;message&gt;<br/>
- * <br/>
- * Whitespace before/after each entry is ignored, so messages can be delivered
- * with newlines between them, or not.
- * 
- * @author author
- *
- */
-public class CambriaStreamReader implements reader {
-	/**
-	 * constructor initializing InputStream with fStream
-	 * 
-	 * @param senderStream
-	 * @throws CambriaApiException
-	 */
-	public CambriaStreamReader(InputStream senderStream) throws CambriaApiException {
-		fStream = senderStream;
-	}
-
-	@Override
-	/**
-	 * next method iterates through msg length
-	 * throws IOException
-	 * throws CambriaApiException
-	 * 
-	 */
-	public message next() throws IOException, CambriaApiException {
-		final int keyLen = readLength();
-		if (keyLen == -1)
-			return null;
-
-		final int msgLen = readLength();
-		final String keyPart = readString(keyLen);
-		final String msgPart = readString(msgLen);
-
-		return new msg(keyPart, msgPart);
-	}
-
-	private static class msg implements message {
-		/**
-		 * constructor initialization
-		 * 
-		 * @param key
-		 * @param msg
-		 */
-		public msg(String key, String msg) {
-			// if no key, use the current time. This allows the message to be
-			// delivered
-			// in any order without forcing it into a single partition as empty
-			// string would.
-			if (key.length() < 1) {
-				key = "" + System.currentTimeMillis();
-			}
-
-			fKey = key;
-			fMsg = msg;
-		}
-
-		@Override
-		/**
-		 * @returns fkey
-		 */
-		public String getKey() {
-			return fKey;
-		}
-
-		@Override
-		/**
-		 * returns the message in String type object
-		 */
-		public String getMessage() {
-			return fMsg;
-		}
-
-		private final String fKey;
-		private final String fMsg;
-		private LogDetails logDetails;
-		private boolean transactionEnabled;
-		
-		/**
-		 * returns boolean value which 
-		 * indicates whether transaction is enabled
-		 */
-		public boolean isTransactionEnabled() {
-			return transactionEnabled;
-		}
-		
-		/**
-		 * sets boolean value which 
-		 * indicates whether transaction is enabled
-		 */
-		public void setTransactionEnabled(boolean transactionEnabled) {
-			this.transactionEnabled = transactionEnabled;
-		}
-
-		@Override
-		/**
-		 * set log details in logDetails variable
-		 */
-		public void setLogDetails(LogDetails logDetails) {
-			this.logDetails = logDetails;
-		}
-
-		@Override
-		/**
-		 * get the log details
-		 */
-		public LogDetails getLogDetails() {
-			return this.logDetails;
-		}
-
-	}
-
-	private final InputStream fStream;
-
-	/**
-	 * max cambria length indicates message length
-	 
-	// This limit is here to prevent the server from spinning on a long string of numbers
-    // that is delivered with 'application/cambria' as the format. The limit needs to be
-    // large enough to support the max message length (currently 1MB, the default Kafka
-    // limit)
-    * */
-     
-    private static final int kMaxCambriaLength = 4*1000*1024;
-
-
-	/**
-	 * 
-	 * @return
-	 * @throws IOException
-	 * @throws CambriaApiException
-	 */
-	private int readLength() throws IOException, CambriaApiException {
-		// always ignore leading whitespace
-		int c = fStream.read();
-		while (Character.isWhitespace(c)) {
-			c = fStream.read();
-		}
-
-		if (c == -1) {
-			return -1;
-		}
-
-		int result = 0;
-		while (Character.isDigit(c)) {
-			result = (result * 10) + (c - '0');
-			if (result > kMaxCambriaLength) {
-				throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, "Expected . after length.");
-			}
-			c = fStream.read();
-		}
-
-		if (c != '.') {
-			throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, "Expected . after length.");
-		}
-
-		return result;
-	}
-
-	/**
-	 * 
-	 * @param len
-	 * @return
-	 * @throws IOException
-	 * @throws CambriaApiException
-	 */
-	private String readString(int len) throws IOException, CambriaApiException {
-		final byte[] buffer = new byte[len];
-
-		final long startMs = System.currentTimeMillis();
-		final long timeoutMs = startMs + 30000; // FIXME configurable
-
-		int readTotal = 0;
-		while (readTotal < len) {
-			final int read = fStream.read(buffer, readTotal, len - readTotal);
-			if (read == -1 || System.currentTimeMillis() > timeoutMs) {
-				// EOF
-				break;
-			}
-			readTotal += read;
-		}
-
-		if (readTotal < len) {
-			throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST,
-					"End of stream while reading " + len + " bytes");
-		}
-
-		return new String(buffer);
-	}
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/streamReaders/CambriaTextStreamReader.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/streamReaders/CambriaTextStreamReader.java
deleted file mode 100644
index ce2cffd..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/resources/streamReaders/CambriaTextStreamReader.java
+++ /dev/null
@@ -1,140 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.resources.streamReaders;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-
-import javax.servlet.http.HttpServletResponse;
-
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Publisher.message;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.LogDetails;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.resources.CambriaEventSet.reader;
-
-/**
- * This stream reader just pulls single lines. It uses the default partition if provided. If
- * not, the key is the current time, which does not guarantee ordering.
- * 
- * @author author
- *
- */
-public class CambriaTextStreamReader implements reader
-{
-	/**
-	 * This is the constructor for Cambria Text Reader format
-	 * @param is
-	 * @param defPart
-	 * @throws CambriaApiException
-	 */
-	public CambriaTextStreamReader ( InputStream is, String defPart ) throws CambriaApiException
-	{
-		fReader = new BufferedReader ( new InputStreamReader ( is ) );
-		fDefPart = defPart;
-	}
-
-	@Override
-	/**
-	 * next() method iterates through msg length
-	 * throws IOException
-	 * throws CambriaApiException
-	 * 
-	 */ 
-	public message next () throws CambriaApiException
-	{
-		try
-		{
-			final String line = fReader.readLine ();
-			if ( line == null ) return null;
-
-			return new message ()
-			{
-				private LogDetails logDetails;
-				private boolean transactionEnabled;
-
-				/**
-				 * returns boolean value which 
-				 * indicates whether transaction is enabled
-				 * @return
-				 */
-				public boolean isTransactionEnabled() {
-					return transactionEnabled;
-				}
-
-				/**
-				 * sets boolean value which 
-				 * indicates whether transaction is enabled
-				 */
-				public void setTransactionEnabled(boolean transactionEnabled) {
-					this.transactionEnabled = transactionEnabled;
-				}
-				
-				@Override
-				/**
-				 * @returns key
-				 * It ch4ecks whether fDefPart value is Null.
-				 * If yes, it will return ystem.currentTimeMillis () else
-				 * it will return fDefPart variable value
-				 */
-				public String getKey ()
-				{
-					return fDefPart == null ? "" + System.currentTimeMillis () : fDefPart;
-				}
-
-				@Override
-				/**
-				 * returns the message in String type object
-				 * @return
-				 */
-				public String getMessage ()
-				{
-					return line;
-				}
-
-				@Override
-				/**
-				 * set log details in logDetails variable
-				 */
-				public void setLogDetails(LogDetails logDetails) {
-					this.logDetails = logDetails;
-				}
-
-				@Override
-				/**
-				 * get the log details
-				 */
-				public LogDetails getLogDetails() {
-					return this.logDetails;
-				}
-			};
-		}
-		catch ( IOException e )
-		{
-			throw new CambriaApiException ( HttpServletResponse.SC_BAD_REQUEST, e.getMessage () );
-		}
-	}
-	
-	private final BufferedReader fReader;
-	private final String fDefPart;
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/DMaaPAAFAuthenticator.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/DMaaPAAFAuthenticator.java
deleted file mode 100644
index 9593b51..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/DMaaPAAFAuthenticator.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security;
-
-import javax.servlet.http.HttpServletRequest;
-
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
-
-
-
-
-/**
- * 
- * @author author
- *
- */
-public interface DMaaPAAFAuthenticator {
-	boolean aafAuthentication( HttpServletRequest req , String role);
-	String aafPermissionString(String permission, String action) throws CambriaApiException;
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/DMaaPAAFAuthenticatorImpl.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/DMaaPAAFAuthenticatorImpl.java
deleted file mode 100644
index 223e8c5..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/DMaaPAAFAuthenticatorImpl.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security;
-
-import java.util.Date;
-
-import javax.servlet.http.HttpServletRequest;
-
-import org.apache.http.HttpStatus;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants.CambriaConstants;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception.DMaaPResponseCode;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception.ErrorResponse;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.Utils;
-
-
-/**
- * 
- * @author author
- *
- */
-public class DMaaPAAFAuthenticatorImpl implements DMaaPAAFAuthenticator {
-
-	/**
-	 * @param req
-	 * @param role
-	 */
-	@Override
-	public boolean aafAuthentication(HttpServletRequest req, String role) {
-		boolean auth = false;
-		if(req.isUserInRole(role))
-		{
-			
-			auth = true;
-		}
-		return auth;
-	}
-
-	@Override
-	public String aafPermissionString(String topicName, String action) throws CambriaApiException {
-		
-		
-		String permission = "";
-		String nameSpace ="";
-		if(topicName.contains(".") && (topicName.contains("com.att")||topicName.contains("org"))) {
-			//String topic = topicName.substring(topicName.lastIndexOf(".")+1);
-			nameSpace = topicName.substring(0,topicName.lastIndexOf("."));
-		}
-		else {
-			nameSpace = null;
-			 nameSpace= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"defaultNSforUEB");
-			
-			if(null==nameSpace)nameSpace="com.att.dmaap.mr.ueb";
-			
-			
-			/*ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
-					DMaaPResponseCode.TOPIC_NOT_IN_AAF.getResponseCode(), "Topic does not exist in AAF"
-							, null, Utils.getFormattedDate(new Date()), topicName,
-					null, null, null, null);
-					
-			throw new CambriaApiException(errRes);*/
-		}
-		
-		permission = nameSpace+".mr.topic|:topic."+topicName+"|"+action;
-		return permission;
-		
-	}
-	
-	
-
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/DMaaPAuthenticator.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/DMaaPAuthenticator.java
deleted file mode 100644
index b0c87af..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/DMaaPAuthenticator.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security;
-
-import javax.servlet.http.HttpServletRequest;
-
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
-
-import com.att.nsa.security.NsaApiKey;
-
-
-/**
- * An interface for authenticating an inbound request.
- * @author author
- *
- * @param <K> NsaApiKey
- */
-public interface DMaaPAuthenticator<K extends NsaApiKey> {
-
-	/**
-	 * Qualify a request as possibly using the authentication method that this class implements.
-	 * @param req
-	 * @return true if the request might be authenticated by this class
-	 */
-	boolean qualify ( HttpServletRequest req );
-	
-	/**
-	 * Check for a request being authentic. If it is, return the API key. If not, return null.
-	 * @param req An inbound web request
-	 * @return the API key for an authentic request, or null
-	 */
-	K isAuthentic ( HttpServletRequest req );
-	/**
-	 * Check for a ctx being authenticate. If it is, return the API key. If not, return null.
-	 * @param ctx
-	 * @return the API key for an authentication request, or null
-	 */
-	K authenticate ( DMaaPContext ctx );
-	
-	
-	void addAuthenticator(DMaaPAuthenticator<K> a);
-
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/DMaaPAuthenticatorImpl.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/DMaaPAuthenticatorImpl.java
deleted file mode 100644
index d1d5019..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/DMaaPAuthenticatorImpl.java
+++ /dev/null
@@ -1,136 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security;
-
-import java.util.LinkedList;
-
-import javax.servlet.http.HttpServletRequest;
-
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security.impl.DMaaPOriginalUebAuthenticator;
-
-import com.att.nsa.security.NsaApiKey;
-import com.att.nsa.security.NsaAuthenticator;
-import com.att.nsa.security.authenticators.OriginalUebAuthenticator;
-import com.att.nsa.security.db.NsaApiDb;
-import com.att.nsa.security.db.simple.NsaSimpleApiKey;
-
-/**
- * 
- * @author author
- *
- * @param <K>
- */
-public class DMaaPAuthenticatorImpl<K extends NsaApiKey> implements DMaaPAuthenticator<K> {
-
-	private final LinkedList<DMaaPAuthenticator<K>> fAuthenticators;
-	
-
-
-	// Setting timeout to a large value for testing purpose.
-	// private static final long kDefaultRequestTimeWindow = 1000 * 60 * 10; //
-	// 10 minutes
-	private static final long kDefaultRequestTimeWindow = 1000 * 60 * 10 * 10 * 10 * 10 * 10;
-
-	/**
-	 * Construct the security manager against an API key database
-	 * 
-	 * @param db
-	 *            the API key db
-	 */
-	public DMaaPAuthenticatorImpl(NsaApiDb<K> db) {
-		this(db, kDefaultRequestTimeWindow);
-	}
-
-	
-	
-	
-	/**
-	 * Construct the security manager against an API key database with a
-	 * specific request time window size
-	 * 
-	 * @param db
-	 *            the API key db
-	 * @param authTimeWindowMs
-	 *            the size of the time window for request authentication
-	 */
-	public DMaaPAuthenticatorImpl(NsaApiDb<K> db, long authTimeWindowMs) {
-		fAuthenticators = new LinkedList<DMaaPAuthenticator<K>>();
-
-		fAuthenticators.add(new DMaaPOriginalUebAuthenticator<K>(db, authTimeWindowMs));
-	}
-
-	/**
-	 * Authenticate a user's request. This method returns the API key if the
-	 * user is authentic, null otherwise.
-	 * 
-	 * @param ctx
-	 * @return an api key record, or null
-	 */
-	public K authenticate(DMaaPContext ctx) {
-		final HttpServletRequest req = ctx.getRequest();
-		for (DMaaPAuthenticator<K> a : fAuthenticators) {
-			if (a.qualify(req)) {
-				final K k = a.isAuthentic(req);
-				if (k != null)
-					return k;
-			}
-			// else: this request doesn't look right to the authenticator
-		}
-		return null;
-	}
-
-	/**
-	 * Get the user associated with the incoming request, or null if the user is
-	 * not authenticated.
-	 * 
-	 * @param ctx
-	 * @return
-	 */
-	public static NsaSimpleApiKey getAuthenticatedUser(DMaaPContext ctx) {
-		final DMaaPAuthenticator<NsaSimpleApiKey> m = ctx.getConfigReader().getfSecurityManager();
-		return m.authenticate(ctx);
-	}
-
-	/**
-	 * method by default returning false
-	 * @param req
-	 * @return false
-	 */
-	public boolean qualify(HttpServletRequest req) {
-		return false;
-	}
-/**
- * method by default returning null
- * @param req
- * @return null
- */
-	public K isAuthentic(HttpServletRequest req) {
-		return null;
-	}
-	
-	public void addAuthenticator ( DMaaPAuthenticator<K> a )
-	{
-		this.fAuthenticators.add(a);
-	}
-
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/impl/DMaaPMechIdAuthenticator.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/impl/DMaaPMechIdAuthenticator.java
deleted file mode 100644
index dcf98f8..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/impl/DMaaPMechIdAuthenticator.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security.impl;
-
-import javax.servlet.http.HttpServletRequest;
-
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security.DMaaPAuthenticator;
-
-import com.att.nsa.security.NsaApiKey;
-import com.att.nsa.security.authenticators.MechIdAuthenticator;
-//import com.att.nsa.security.db.NsaApiDb;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-
-/**
- * An authenticator for AT&T MechIds.
- * 
- * @author author
- *
- * @param <K>
- */
-public class DMaaPMechIdAuthenticator <K extends NsaApiKey> implements DMaaPAuthenticator<K> {
-
-/**
- * This is not yet implemented. by refault its returing false
- * @param req HttpServletRequest
- * @return false
- */
-	public boolean qualify (HttpServletRequest req) {
-		// we haven't implemented anything here yet, so there's no qualifying request
-		return false;
-	}
-/**
- * This metod authenticate the mech id 
- * @param req
- * @return APIkey or null
- */
-	public K isAuthentic (HttpServletRequest req) {
-		final String remoteAddr = req.getRemoteAddr();
-		authLog ( "MechId auth is not yet implemented.", remoteAddr );
-		return null;
-	}
-
-	private static void authLog ( String msg, String remoteAddr )
-	{
-		log.info ( "AUTH-LOG(" + remoteAddr + "): " + msg );
-	}
-
-//	private final NsaApiDb<K> fDb;
-	//private static final Logger log = Logger.getLogger( MechIdAuthenticator.class.toString());
-	private static final EELFLogger log = EELFManager.getInstance().getLogger(MechIdAuthenticator.class);
-/**
- * Curently its not yet implemented returning null
- * @param ctx DMaaP context
- * @return APIkey or null
- */
-	@Override
-	public K authenticate(DMaaPContext ctx) {
-		// TODO Auto-generated method stub
-		return null;
-	}
-@Override
-public void addAuthenticator(DMaaPAuthenticator<K> a) {
-	// TODO Auto-generated method stub
-	
-}
-
-}
\ No newline at end of file
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/impl/DMaaPOriginalUebAuthenticator.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/impl/DMaaPOriginalUebAuthenticator.java
deleted file mode 100644
index 8026ab5..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/security/impl/DMaaPOriginalUebAuthenticator.java
+++ /dev/null
@@ -1,293 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security.impl;
-
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import javax.servlet.http.HttpServletRequest;
-
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security.DMaaPAuthenticator;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.drumlin.till.data.sha1HmacSigner;
-import com.att.nsa.security.NsaApiKey;
-import com.att.nsa.security.db.NsaApiDb;
-
-/**
- * This authenticator handles an AWS-like authentication, originally used by the
- * Cambria server (the API server for UEB).
- * 
- * @author author
- *
- * @param <K>
- */
-public class DMaaPOriginalUebAuthenticator<K extends NsaApiKey> implements DMaaPAuthenticator<K> {
-	/**
-	 * constructor initialization
-	 * 
-	 * @param db
-	 * @param requestTimeWindowMs
-	 */
-	public DMaaPOriginalUebAuthenticator(NsaApiDb<K> db, long requestTimeWindowMs) {
-		fDb = db;
-		fRequestTimeWindowMs = requestTimeWindowMs;
-		//fAuthenticators = new LinkedList<DMaaPAuthenticator<K>>();
-
-		//fAuthenticators.add(new DMaaPOriginalUebAuthenticator<K>(db, requestTimeWindowMs));
-
-	}
-
-	@Override
-	public boolean qualify(HttpServletRequest req) {
-		// accept anything that comes in with X-(Cambria)Auth in the header
-		final String xAuth = getFirstHeader(req, new String[] { "X-CambriaAuth", "X-Auth" });
-		return xAuth != null;
-	}
-
-	/**
-	 * method for authentication
-	 * 
-	 * @param req
-	 * @return
-	 */
-	public K isAuthentic(HttpServletRequest req) {
-		final String remoteAddr = req.getRemoteAddr();
-		// Cambria originally used "Cambria..." headers, but as the API key
-		// system is now more
-		// general, we take either form.
-		final String xAuth = getFirstHeader(req, new String[] { "X-CambriaAuth", "X-Auth" });
-		final String xDate = getFirstHeader(req, new String[] { "X-CambriaDate", "X-Date" });
-
-		final String httpDate = req.getHeader("Date");
-
-		final String xNonce = getFirstHeader(req, new String[] { "X-Nonce" });
-		return authenticate(remoteAddr, xAuth, xDate, httpDate, xNonce);
-	}
-
-	/**
-	 * Authenticate a user's request. This method returns the API key if the
-	 * user is authentic, null otherwise.
-	 * 
-	 * @param remoteAddr
-	 * @param xAuth
-	 * @param xDate
-	 * @param httpDate
-	 * @param nonce
-	 * @return an api key record, or null
-	 */
-	public K authenticate(String remoteAddr, String xAuth, String xDate, String httpDate, String nonce) {
-		if (xAuth == null) {
-			authLog("No X-Auth header on request", remoteAddr);
-			return null;
-		}
-		
-		final String[] xAuthParts = xAuth.split(":");
-		if (xAuthParts.length != 2) {
-			authLog("Bad X-Auth header format (" + xAuth + ")", remoteAddr);
-			return null;
-		}
-
-
-		// get the api key and signature
-		final String clientApiKey = xAuthParts[0];
-		final String clientApiHash = xAuthParts[1];
-		if (clientApiKey.length() == 0 || clientApiHash.length() == 0) {
-			authLog("Bad X-Auth header format (" + xAuth + ")", remoteAddr);
-			return null;
-		}
-		// if the user provided X-Date, use that. Otherwise, go for Date
-		final String dateString = xDate != null ? xDate : httpDate;
-		final Date clientDate = getClientDate(dateString);
-		if (clientDate == null) {
-			authLog("Couldn't parse client date '" + dateString + "'. Preferring X-Date over Date.", remoteAddr);
-			return null;
-		}
-		// check the time range
-		final long nowMs = System.currentTimeMillis();
-		final long diffMs = Math.abs(nowMs - clientDate.getTime());
-		if (diffMs > fRequestTimeWindowMs) {
-			authLog("Client date is not in acceptable range of server date. Client:" + clientDate.getTime()
-					+ ", Server: " + nowMs + ", Threshold: " + fRequestTimeWindowMs + ".", remoteAddr);
-			return null;
-		}
-		K apiRecord;
-		try {
-			apiRecord = fDb.loadApiKey(clientApiKey);
-			if (apiRecord == null) {
-				authLog("No such API key " + clientApiKey, remoteAddr);
-				return null;
-			}
-		} catch (ConfigDbException e) {
-			authLog("Couldn't load API key " + clientApiKey + ": " + e.getMessage(), remoteAddr);
-			return null;
-		}
-				// make the signed content
-		final StringBuilder sb = new StringBuilder();
-		sb.append(dateString);
-		if (nonce != null) {
-			sb.append(":");
-			sb.append(nonce);
-		}
-		final String signedContent = sb.toString();
-		// now check the signed date string
-		final String serverCalculatedSignature = sha1HmacSigner.sign(signedContent, apiRecord.getSecret());
-		if (serverCalculatedSignature == null || !serverCalculatedSignature.equals(clientApiHash)) {
-			authLog("Signatures don't match. Rec'd " + clientApiHash + ", expect " + serverCalculatedSignature + ".",
-					remoteAddr);
-			return null;
-		}
-		authLog("authenticated " + apiRecord.getKey(), remoteAddr);
-		return apiRecord;
-	}
-
-	/**
-	 * Get the first value of the first existing header from the headers list
-	 * 
-	 * @param req
-	 * @param headers
-	 * @return a header value, or null if none exist
-	 */
-	private static String getFirstHeader(HttpServletRequest req, String[] headers) {
-		for (String header : headers) {
-			final String result = req.getHeader(header);
-			if (result != null)
-				return result;
-		}
-		return null;
-	}
-
-	/**
-	 * Parse the date string into a Date using one of the supported date
-	 * formats.
-	 * 
-	 * @param dateHeader
-	 * @return a date, or null
-	 */
-	private static Date getClientDate(String dateString) {
-		if (dateString == null) {
-			return null;
-		}
-
-		// parse the date
-		Date result = null;
-		for (String dateFormat : kDateFormats) {
-			final SimpleDateFormat parser = new SimpleDateFormat(dateFormat, java.util.Locale.US);
-			if (!dateFormat.contains("z") && !dateFormat.contains("Z")) {
-				parser.setTimeZone(TIMEZONE_GMT);
-			}
-
-			try {
-				result = parser.parse(dateString);
-				break;
-			} catch (ParseException e) {
-				// presumably wrong format
-			}
-		}
-		return result;
-	}
-
-	private static void authLog(String msg, String remoteAddr) {
-		log.info("AUTH-LOG(" + remoteAddr + "): " + msg);
-	}
-
-	private final NsaApiDb<K> fDb;
-	private final long fRequestTimeWindowMs;
-
-	private static final java.util.TimeZone TIMEZONE_GMT = java.util.TimeZone.getTimeZone("GMT");
-	
-	private static final String kDateFormats[] =
-		{
-		    // W3C date format (RFC 3339).
-		    "yyyy-MM-dd'T'HH:mm:ssz",
-		    "yyyy-MM-dd'T'HH:mm:ssXXX",		// as of Java 7, reqd to handle colon in TZ offset
-
-		    // Preferred HTTP date format (RFC 1123).
-		    "EEE, dd MMM yyyy HH:mm:ss zzz",
-
-		    // simple unix command line 'date' format
-		    "EEE MMM dd HH:mm:ss z yyyy",
-
-		    // Common date format (RFC 822).
-		    "EEE, dd MMM yy HH:mm:ss z",
-		    "EEE, dd MMM yy HH:mm z",
-		    "dd MMM yy HH:mm:ss z",
-		    "dd MMM yy HH:mm z",
-
-			// Obsoleted HTTP date format (ANSI C asctime() format).
-		    "EEE MMM dd HH:mm:ss yyyy",
-
-		    // Obsoleted HTTP date format (RFC 1036).
-		    "EEEE, dd-MMM-yy HH:mm:ss zzz",
-		};
-
-	/*private static final String kDateFormats[] = {
-			// W3C date format (RFC 3339).
-			"yyyy-MM-dd'T'HH:mm:ssz",
-
-			// Preferred HTTP date format (RFC 1123).
-			"EEE, dd MMM yyyy HH:mm:ss zzz",
-
-			// simple unix command line 'date' format
-			"EEE MMM dd HH:mm:ss z yyyy",
-
-			// Common date format (RFC 822).
-			"EEE, dd MMM yy HH:mm:ss z", "EEE, dd MMM yy HH:mm z", "dd MMM yy HH:mm:ss z", "dd MMM yy HH:mm z",
-
-			// Obsoleted HTTP date format (ANSI C asctime() format).
-			"EEE MMM dd HH:mm:ss yyyy",
-
-			// Obsoleted HTTP date format (RFC 1036).
-			"EEEE, dd-MMM-yy HH:mm:ss zzz", }; */
-	// logger declaration
-	//private static final Logger log = Logger.getLogger(DMaaPOriginalUebAuthenticator.class.toString());
-	private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPOriginalUebAuthenticator.class);
-	@Override
-//	public K authenticate(DMaaPContext ctx) {
-		// TODO Auto-generated method stub
-		//return null;
-	//}
-	
-	public K authenticate(DMaaPContext ctx) {
-		
-		/*final HttpServletRequest req = ctx.getRequest();
-		for (DMaaPAuthenticator<K> a : fAuthenticators) {
-			if (a.qualify(req)) {
-				final K k = a.isAuthentic(req);
-				if (k != null)
-					return k;
-			}
-			// else: this request doesn't look right to the authenticator
-		}*/
-		return null;
-	}
-
-
-	public void addAuthenticator ( DMaaPAuthenticator<K> a )
-	{
-		//this.fAuthenticators.add(a);
-	}
-	//private final LinkedList<DMaaPAuthenticator<K>> fAuthenticators;
-}
\ No newline at end of file
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/AdminService.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/AdminService.java
deleted file mode 100644
index b4a7282..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/AdminService.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service;
-
-import java.io.IOException;
-
-import org.json.JSONException;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
-
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
-
-/**
- * @author author
- *
- */
-public interface AdminService {
-	/**
-	 * method provide consumerCache
-	 * 
-	 * @param dMaaPContext
-	 * @throws IOException
-	 */
-	void showConsumerCache(DMaaPContext dMaaPContext) throws IOException,AccessDeniedException;
-
-	/**
-	 * method drops consumer cache
-	 * 
-	 * @param dMaaPContext
-	 * @throws JSONException
-	 * @throws IOException
-	 */
-	void dropConsumerCache(DMaaPContext dMaaPContext) throws JSONException, IOException,AccessDeniedException;
-	
-	
-	/**
-	 * Get list of blacklisted ips 
-	 * @param dMaaPContext context
-	 * @throws IOException ex
-	 * @throws AccessDeniedException ex
-	 */
-	void getBlacklist ( DMaaPContext dMaaPContext ) throws IOException, AccessDeniedException;
-	
-	/**
-	 * Add ip to blacklist
-	 * @param dMaaPContext context
-	 * @param ip ip
-	 * @throws IOException ex
-	 * @throws ConfigDbException ex
-	 * @throws AccessDeniedException ex
-	 */
-	void addToBlacklist ( DMaaPContext dMaaPContext, String ip ) throws IOException, ConfigDbException, AccessDeniedException;
-	
-	/**
-	 * Remove ip from blacklist
-	 * @param dMaaPContext context
-	 * @param ip ip
-	 * @throws IOException ex
-	 * @throws ConfigDbException ex
-	 * @throws AccessDeniedException ex
-	 */
-	void removeFromBlacklist ( DMaaPContext dMaaPContext, String ip ) throws IOException, ConfigDbException, AccessDeniedException;
-	
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/ApiKeysService.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/ApiKeysService.java
deleted file mode 100644
index bb3685d..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/ApiKeysService.java
+++ /dev/null
@@ -1,106 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service;
-
-import java.io.IOException;
-
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.ApiKeyBean;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
-
-import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.security.db.NsaApiDb.KeyExistsException;
-
-/**
- * Declaring all the method in interface that is mainly used for authentication
- * purpose.
- *
- *
- */
-
-public interface ApiKeysService {
-	/**
-	 * This method declaration for getting all ApiKey that has generated on
-	 * server.
-	 * 
-	 * @param dmaapContext
-	 * @throws ConfigDbException
-	 * @throws IOException
-	 */
-
-	public void getAllApiKeys(DMaaPContext dmaapContext)
-			throws ConfigDbException, IOException;
-
-	/**
-	 * Getting information about specific ApiKey
-	 * 
-	 * @param dmaapContext
-	 * @param apikey
-	 * @throws ConfigDbException
-	 * @throws IOException
-	 */
-
-	public void getApiKey(DMaaPContext dmaapContext, String apikey)
-			throws ConfigDbException, IOException;
-
-	/**
-	 * Thid method is used for create a particular ApiKey
-	 * 
-	 * @param dmaapContext
-	 * @param nsaApiKey
-	 * @throws KeyExistsException
-	 * @throws ConfigDbException
-	 * @throws IOException
-	 */
-
-	public void createApiKey(DMaaPContext dmaapContext, ApiKeyBean nsaApiKey)
-			throws KeyExistsException, ConfigDbException, IOException;
-
-	/**
-	 * This method is used for update ApiKey that is already generated on
-	 * server.
-	 * 
-	 * @param dmaapContext
-	 * @param apikey
-	 * @param nsaApiKey
-	 * @throws ConfigDbException
-	 * @throws IOException
-	 * @throws AccessDeniedException
-	 * @throws com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException 
-	 */
-	public void updateApiKey(DMaaPContext dmaapContext, String apikey,
-			ApiKeyBean nsaApiKey) throws ConfigDbException, IOException,AccessDeniedException
-			;
-
-	/**
-	 * This method is used for delete specific ApiKey
-	 * 
-	 * @param dmaapContext
-	 * @param apikey
-	 * @throws ConfigDbException
-	 * @throws IOException
-	 * @throws AccessDeniedException
-	 */
-
-	public void deleteApiKey(DMaaPContext dmaapContext, String apikey)
-			throws ConfigDbException, IOException,AccessDeniedException;
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/EventsService.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/EventsService.java
deleted file mode 100644
index 526d185..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/EventsService.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service;
-
-import java.io.IOException;
-import java.io.InputStream;
-
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.ConsumerFactory.UnavailableException;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metabroker.Broker.TopicExistsException;
-
-import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
-
-/**
- * 
- * @author author
- *
- */
-public interface EventsService {
-	/**
-	 * 
-	 * @param ctx
-	 * @param topic
-	 * @param consumerGroup
-	 * @param clientId
-	 * @throws ConfigDbException
-	 * @throws TopicExistsException
-	 * @throws AccessDeniedException
-	 * @throws UnavailableException
-	 * @throws CambriaApiException
-	 * @throws IOException
-	 */
-	public void getEvents(DMaaPContext ctx, String topic, String consumerGroup, String clientId)
-			throws ConfigDbException, TopicExistsException,UnavailableException,
-			CambriaApiException, IOException,AccessDeniedException;
-
-	/**
-	 * 
-	 * @param ctx
-	 * @param topic
-	 * @param msg
-	 * @param defaultPartition
-	 * @param requestTime
-	 * @throws ConfigDbException
-	 * @throws AccessDeniedException
-	 * @throws TopicExistsException
-	 * @throws CambriaApiException
-	 * @throws IOException
-	 */
-	public void pushEvents(DMaaPContext ctx, final String topic, InputStream msg, final String defaultPartition,
-			final String requestTime) throws ConfigDbException, AccessDeniedException, TopicExistsException,
-					CambriaApiException, IOException,missingReqdSetting;
-
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/MMService.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/MMService.java
deleted file mode 100644
index 021db2c..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/MMService.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service;
-
-import java.io.IOException;
-import java.io.InputStream;
-
-import org.json.JSONException;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.ConsumerFactory.UnavailableException;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metabroker.Broker.TopicExistsException;
-
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
-import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
-
-/**
- * Contains the logic for executing calls to the Mirror Maker agent tool.
- * 
- * @author <a href="mailto:"></a>
- *
- * @since May 25, 2016
- */
-
-public interface MMService {
-
-	/*
-	 * this method calls the add white list method of a Mirror Maker agent API
-	 */
-	public void addWhiteList();
-	
-	/*
-	 * this method calls the remove white list method of a Mirror Maker agent API
-	 */
-	public void removeWhiteList();
-	
-	/*
-	 * This method calls the list white list method of a Mirror Maker agent API
-	 */
-	public void listWhiteList();
-	
-	public String subscribe(DMaaPContext ctx, String topic, String consumerGroup, String clientId) throws ConfigDbException, TopicExistsException, 
-		AccessDeniedException, UnavailableException, CambriaApiException, IOException;
-	
-	public void pushEvents(DMaaPContext ctx, final String topic, InputStream msg, final String defaultPartition,
-			final String requestTime) throws ConfigDbException, AccessDeniedException, TopicExistsException,
-			CambriaApiException, IOException, missingReqdSetting;
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/MetricsService.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/MetricsService.java
deleted file mode 100644
index aa3f967..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/MetricsService.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service;
-
-/**
- * @author 
- *
- */
-import java.io.IOException;
-
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
-
-/**
- * 
- * @author author
- *
- */
-public interface MetricsService {
-	/**
-	 * 
-	 * @param ctx
-	 * @throws IOException
-	 */
-	public void get(DMaaPContext ctx) throws IOException;
-
-	/**
-	 * 
-	 * @param ctx
-	 * @param name
-	 * @throws IOException
-	 * @throws CambriaApiException
-	 */
-	public void getMetricByName(DMaaPContext ctx, String name) throws IOException, CambriaApiException;
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/TopicService.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/TopicService.java
deleted file mode 100644
index fc91f63..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/TopicService.java
+++ /dev/null
@@ -1,176 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service;
-
-import java.io.IOException;
-
-import org.json.JSONException;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.TopicBean;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metabroker.Broker.TopicExistsException;
-
-import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
-import com.att.nsa.configs.ConfigDbException;
-
-/**
- * interface provide all the topic related operations
- * 
- * @author author
- *
- */
-public interface TopicService {
-	/**
-	 * method fetch details of all the topics
-	 * 
-	 * @param dmaapContext
-	 * @throws JSONException
-	 * @throws ConfigDbException
-	 * @throws IOException
-	 */
-	void getTopics(DMaaPContext dmaapContext) throws JSONException, ConfigDbException, IOException;
-	void getAllTopics(DMaaPContext dmaapContext) throws JSONException, ConfigDbException, IOException;
-
-	/**
-	 * method fetch details of specific topic
-	 * 
-	 * @param dmaapContext
-	 * @param topicName
-	 * @throws ConfigDbException
-	 * @throws IOException
-	 * @throws TopicExistsException
-	 */
-	void getTopic(DMaaPContext dmaapContext, String topicName)
-			throws ConfigDbException, IOException, TopicExistsException;
-
-	/**
-	 * method used to create the topic
-	 * 
-	 * @param dmaapContext
-	 * @param topicBean
-	 * @throws CambriaApiException
-	 * @throws TopicExistsException
-	 * @throws IOException
-	 * @throws AccessDeniedException
-	 * @throws JSONException 
-	 */
-
-	void createTopic(DMaaPContext dmaapContext, TopicBean topicBean)
-			throws CambriaApiException, TopicExistsException, IOException, AccessDeniedException;
-
-	/**
-	 * method used to delete to topic
-	 * 
-	 * @param dmaapContext
-	 * @param topicName
-	 * @throws IOException
-	 * @throws AccessDeniedException
-	 * @throws ConfigDbException
-	 * @throws CambriaApiException
-	 * @throws TopicExistsException
-	 */
-
-	void deleteTopic(DMaaPContext dmaapContext, String topicName)
-			throws IOException, AccessDeniedException, ConfigDbException, CambriaApiException, TopicExistsException;
-
-	/**
-	 * method provides list of all the publishers associated with a topic
-	 * 
-	 * @param dmaapContext
-	 * @param topicName
-	 * @throws IOException
-	 * @throws ConfigDbException
-	 * @throws TopicExistsException
-	 */
-	void getPublishersByTopicName(DMaaPContext dmaapContext, String topicName)
-			throws IOException, ConfigDbException, TopicExistsException;
-
-	/**
-	 * method provides details of all the consumer associated with a specific
-	 * topic
-	 * 
-	 * @param dmaapContext
-	 * @param topicName
-	 * @throws IOException
-	 * @throws ConfigDbException
-	 * @throws TopicExistsException
-	 */
-	void getConsumersByTopicName(DMaaPContext dmaapContext, String topicName)
-			throws IOException, ConfigDbException, TopicExistsException;
-
-	/**
-	 * method provides publishing right to a specific topic
-	 * 
-	 * @param dmaapContext
-	 * @param topicName
-	 * @param producerId
-	 * @throws AccessDeniedException
-	 * @throws ConfigDbException
-	 * @throws IOException
-	 * @throws TopicExistsException
-	 */
-	void permitPublisherForTopic(DMaaPContext dmaapContext, String topicName, String producerId)
-			throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,CambriaApiException;
-
-	/**
-	 * method denies any specific publisher from a topic
-	 * 
-	 * @param dmaapContext
-	 * @param topicName
-	 * @param producerId
-	 * @throws AccessDeniedException
-	 * @throws ConfigDbException
-	 * @throws IOException
-	 * @throws TopicExistsException
-	 */
-	void denyPublisherForTopic(DMaaPContext dmaapContext, String topicName, String producerId)
-			throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,CambriaApiException;
-
-	/**
-	 * method provide consuming right to a specific user on a topic
-	 * 
-	 * @param dmaapContext
-	 * @param topicName
-	 * @param consumerId
-	 * @throws AccessDeniedException
-	 * @throws ConfigDbException
-	 * @throws IOException
-	 * @throws TopicExistsException
-	 */
-	void permitConsumerForTopic(DMaaPContext dmaapContext, String topicName, String consumerId)
-			throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,CambriaApiException;
-
-	/**
-	 * method denies a particular user's consuming right on a topic
-	 * 
-	 * @param dmaapContext
-	 * @param topicName
-	 * @param consumerId
-	 * @throws AccessDeniedException
-	 * @throws ConfigDbException
-	 * @throws IOException
-	 * @throws TopicExistsException
-	 */
-	void denyConsumerForTopic(DMaaPContext dmaapContext, String topicName, String consumerId)
-			throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,CambriaApiException;
-
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/TransactionService.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/TransactionService.java
deleted file mode 100644
index b2c8182..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/TransactionService.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service;
-
-import java.io.IOException;
-
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.transaction.TransactionObj;
-
-import com.att.aft.dme2.internal.jettison.json.JSONException;
-import com.att.nsa.configs.ConfigDbException;
-
-/**
- * 
- * @author author
- *
- */
-public interface TransactionService {
-	/**
-	 * 
-	 * @param trnObj
-	 */
-	void checkTransaction(TransactionObj trnObj);
-
-	/**
-	 * 
-	 * @param dmaapContext
-	 * @throws ConfigDbException
-	 * @throws IOException
-	 */
-	void getAllTransactionObjs(DMaaPContext dmaapContext) throws ConfigDbException, IOException;
-
-	/**
-	 * 
-	 * @param dmaapContext
-	 * @param transactionId
-	 * @throws ConfigDbException
-	 * @throws JSONException
-	 * @throws IOException
-	 */
-	void getTransactionObj(DMaaPContext dmaapContext, String transactionId)
-			throws ConfigDbException, JSONException, IOException;
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/UIService.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/UIService.java
deleted file mode 100644
index daa3825..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/UIService.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-/**
- * 
- */
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service;
-
-import java.io.IOException;
-
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
-
-import com.att.nsa.configs.ConfigDbException;
-
-import kafka.common.TopicExistsException;
-
-/**
- * @author author
- *
- */
-public interface UIService {
-	/**
-	 * Returning template of hello page.
-	 * 
-	 * @param dmaapContext
-	 * @throws IOException
-	 */
-	void hello(DMaaPContext dmaapContext) throws IOException;
-
-	/**
-	 * Fetching list of all api keys and returning in a templated form for
-	 * display
-	 * 
-	 * @param dmaapContext
-	 * @throws ConfigDbException
-	 * @throws IOException
-	 */
-	void getApiKeysTable(DMaaPContext dmaapContext) throws ConfigDbException,
-			IOException;
-
-	/**
-	 * Fetching detials of apikey in a templated form for display
-	 * 
-	 * @param dmaapContext
-	 * @param apiKey
-	 * @throws Exception
-	 */
-	void getApiKey(DMaaPContext dmaapContext, final String apiKey)
-			throws Exception;
-
-	/**
-	 * Fetching list of all the topics and returning in a templated form for
-	 * display
-	 * 
-	 * @param dmaapContext
-	 * @throws ConfigDbException
-	 * @throws IOException
-	 */
-	void getTopicsTable(DMaaPContext dmaapContext) throws ConfigDbException,
-			IOException;
-
-	/**
-	 * Fetching detials of topic in a templated form for display
-	 * 
-	 * @param dmaapContext
-	 * @param topic
-	 * @throws ConfigDbException
-	 * @throws IOException
-	 * @throws TopicExistsException
-	 */
-	void getTopic(DMaaPContext dmaapContext, final String topic)
-			throws ConfigDbException, IOException, TopicExistsException;
-
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/AdminServiceImpl.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/AdminServiceImpl.java
deleted file mode 100644
index 96a63a7..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/AdminServiceImpl.java
+++ /dev/null
@@ -1,188 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service.impl;
-
-import java.io.IOException;
-import java.util.Collection;
-import java.util.Set;
-
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Consumer;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.ConsumerFactory;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security.DMaaPAuthenticatorImpl;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service.AdminService;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.DMaaPResponseBuilder;
-import org.springframework.stereotype.Component;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.limits.Blacklist;
-import com.att.nsa.security.NsaApiKey;
-import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
-
-/**
- * @author author
- *
- */
-@Component
-public class AdminServiceImpl implements AdminService {
-
-	//private Logger log = Logger.getLogger(AdminServiceImpl.class.toString());
-	private static final EELFLogger log = EELFManager.getInstance().getLogger(AdminServiceImpl.class);
-	/**
-	 * getConsumerCache returns consumer cache
-	 * @param dMaaPContext context
-	 * @throws IOException ex
-	 * @throws AccessDeniedException 
-	 */
-	@Override	
-	public void showConsumerCache(DMaaPContext dMaaPContext) throws IOException, AccessDeniedException {
-		adminAuthenticate(dMaaPContext);
-		
-		JSONObject consumers = new JSONObject();
-		JSONArray jsonConsumersList = new JSONArray();
-
-		for (Consumer consumer : getConsumerFactory(dMaaPContext).getConsumers()) {
-			JSONObject consumerObject = new JSONObject();
-			consumerObject.put("name", consumer.getName());
-			consumerObject.put("created", consumer.getCreateTimeMs());
-			consumerObject.put("accessed", consumer.getLastAccessMs());
-			jsonConsumersList.put(consumerObject);
-		}
-
-		consumers.put("consumers", jsonConsumersList);
-		log.info("========== AdminServiceImpl: getConsumerCache: " + jsonConsumersList.toString() + "===========");
-		DMaaPResponseBuilder.respondOk(dMaaPContext, consumers);
-	}
-
-	/**
-	 * 
-	 * dropConsumerCache() method clears consumer cache
-	 * @param dMaaPContext context
-	 * @throws JSONException ex
-	 * @throws IOException ex
-	 * @throws AccessDeniedException 
-	 * 
-	 */
-	@Override
-	public void dropConsumerCache(DMaaPContext dMaaPContext) throws JSONException, IOException, AccessDeniedException {
-		adminAuthenticate(dMaaPContext);
-		getConsumerFactory(dMaaPContext).dropCache();
-		DMaaPResponseBuilder.respondOkWithHtml(dMaaPContext, "Consumer cache cleared successfully");
-		// log.info("========== AdminServiceImpl: dropConsumerCache: Consumer
-		// Cache successfully dropped.===========");
-	}
-
-	/** 
-	 * getfConsumerFactory returns CosnumerFactory details
-	 * @param dMaaPContext contxt
-	 * @return ConsumerFactory obj
-	 * 
-	 */
-	private ConsumerFactory getConsumerFactory(DMaaPContext dMaaPContext) {
-		return dMaaPContext.getConfigReader().getfConsumerFactory();
-	}
-	
-	/**
-	 * return ipblacklist
-	 * @param dMaaPContext context
-	 * @return blacklist obj
-	 */
-	private static Blacklist getIpBlacklist(DMaaPContext dMaaPContext) {
-		return dMaaPContext.getConfigReader().getfIpBlackList();
-	}
-	
-	
-	/**
-	 * Get list of blacklisted ips
-	 */
-	@Override
-	public void getBlacklist ( DMaaPContext dMaaPContext ) throws IOException, AccessDeniedException
-	{
-		adminAuthenticate ( dMaaPContext );
-
-		DMaaPResponseBuilder.respondOk ( dMaaPContext,
-			new JSONObject().put ( "blacklist", setToJsonArray ( getIpBlacklist (dMaaPContext).asSet() ) ) );
-	}
-	
-	/**
-	 * Add ip to blacklist
-	 */
-	@Override
-	public void addToBlacklist ( DMaaPContext dMaaPContext, String ip ) throws IOException, ConfigDbException, AccessDeniedException
-	{
-		adminAuthenticate ( dMaaPContext );
-
-		getIpBlacklist (dMaaPContext).add ( ip );
-		DMaaPResponseBuilder.respondOkNoContent ( dMaaPContext );
-	}
-	
-	/**
-	 * Remove ip from blacklist
-	 */
-	@Override
-	public void removeFromBlacklist ( DMaaPContext dMaaPContext, String ip ) throws IOException, ConfigDbException, AccessDeniedException
-	{
-		adminAuthenticate ( dMaaPContext );
-
-		getIpBlacklist (dMaaPContext).remove ( ip );
-		DMaaPResponseBuilder.respondOkNoContent ( dMaaPContext );
-	}
-	
-	/**
-	 * Authenticate if user is admin
-	 * @param dMaaPContext context
-	 * @throws AccessDeniedException ex
-	 */
-	private static void adminAuthenticate ( DMaaPContext dMaaPContext ) throws AccessDeniedException
-	{
-		
-		final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dMaaPContext);
-		if ( user == null || !user.getKey ().equals ( "admin" ) )
-		{
-			throw new AccessDeniedException ();
-		}
-	}
-	
-	public static JSONArray setToJsonArray ( Set<?> fields )
-	{
-		return collectionToJsonArray ( fields );
-	}
-
-	public static JSONArray collectionToJsonArray ( Collection<?> fields )
-	{
-		final JSONArray a = new JSONArray ();
-		if ( fields != null )
-		{
-			for ( Object o : fields )
-			{
-				a.put ( o );
-			}
-		}
-		return a;
-	}
-
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/ApiKeysServiceImpl.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/ApiKeysServiceImpl.java
deleted file mode 100644
index 5086f23..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/ApiKeysServiceImpl.java
+++ /dev/null
@@ -1,325 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service.impl;
-
-import java.io.IOException;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import org.json.JSONArray;
-import org.json.JSONObject;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.ApiKeyBean;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants.CambriaConstants;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security.DMaaPAuthenticatorImpl;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service.ApiKeysService;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.ConfigurationReader;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.DMaaPResponseBuilder;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.Emailer;
-import org.springframework.stereotype.Service;
-
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.drumlin.service.standards.HttpStatusCodes;
-import com.att.nsa.security.NsaApiKey;
-import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
-import com.att.nsa.security.db.NsaApiDb;
-import com.att.nsa.security.db.NsaApiDb.KeyExistsException;
-import com.att.nsa.security.db.simple.NsaSimpleApiKey;
-
-/**
- * Implementation of the ApiKeysService, this will provide the below operations,
- * getAllApiKeys, getApiKey, createApiKey, updateApiKey, deleteApiKey
- * 
- * @author author
- */
-@Service
-public class ApiKeysServiceImpl implements ApiKeysService {
-
-	//private Logger log = Logger.getLogger(ApiKeysServiceImpl.class.toString());
-	private static final EELFLogger log = EELFManager.getInstance().getLogger(ApiKeysServiceImpl.class.toString());
-	/**
-	 * This method will provide all the ApiKeys present in kafka server.
-	 * 
-	 * @param dmaapContext
-	 * @throws ConfigDbException
-	 * @throws IOException
-	 */
-	public void getAllApiKeys(DMaaPContext dmaapContext)
-			throws ConfigDbException, IOException {
-
-		ConfigurationReader configReader = dmaapContext.getConfigReader();
-
-		log.info("configReader : " + configReader.toString());
-
-		final JSONObject result = new JSONObject();
-		final JSONArray keys = new JSONArray();
-		result.put("apiKeys", keys);
-
-		NsaApiDb<NsaSimpleApiKey> apiDb = configReader.getfApiKeyDb();
-
-		for (String key : apiDb.loadAllKeys()) {
-			keys.put(key);
-		}
-		log.info("========== ApiKeysServiceImpl: getAllApiKeys: Api Keys are : "
-				+ keys.toString() + "===========");
-		DMaaPResponseBuilder.respondOk(dmaapContext, result);
-	}
-
-	/**
-	 * @param dmaapContext
-	 * @param apikey
-	 * @throws ConfigDbException
-	 * @throws IOException
-	 */
-	@Override
-	public void getApiKey(DMaaPContext dmaapContext, String apikey)
-			throws ConfigDbException, IOException {
-
-		String errorMsg = "Api key name is not mentioned.";
-		int errorCode = HttpStatusCodes.k400_badRequest;
-		
-		if (null != apikey) {
-			NsaSimpleApiKey simpleApiKey = getApiKeyDb(dmaapContext)
-					.loadApiKey(apikey);
-			
-		
-			if (null != simpleApiKey) {
-				JSONObject result = simpleApiKey.asJsonObject();
-				DMaaPResponseBuilder.respondOk(dmaapContext, result);
-				log.info("========== ApiKeysServiceImpl: getApiKey : "
-						+ result.toString() + "===========");
-				return;
-			} else {
-				errorMsg = "Api key [" + apikey + "] does not exist.";
-				errorCode = HttpStatusCodes.k404_notFound;
-				log.info("========== ApiKeysServiceImpl: getApiKey: Error : API Key does not exist. "
-						+ "===========");
-				DMaaPResponseBuilder.respondWithError(dmaapContext, errorCode,
-						errorMsg);
-				throw new IOException();
-			}
-		}
-
-	}
-
-	/**
-	 * @param dmaapContext
-	 * @param nsaApiKey
-	 * @throws KeyExistsException
-	 * @throws ConfigDbException
-	 * @throws IOException
-	 */
-	@Override
-	public void createApiKey(DMaaPContext dmaapContext, ApiKeyBean nsaApiKey)
-			throws KeyExistsException, ConfigDbException, IOException {
-
-		log.debug("TopicService: : createApiKey....");
-		
-		
-			String contactEmail = nsaApiKey.getEmail();
-			final boolean emailProvided = contactEmail != null && contactEmail.length() > 0 && contactEmail.indexOf("@") > 1 ;
-			 String kSetting_AllowAnonymousKeys= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"apiKeys.allowAnonymous");
-			 if(null==kSetting_AllowAnonymousKeys) kSetting_AllowAnonymousKeys ="false";
-			 
-	     // if ((contactEmail == null) || (contactEmail.length() == 0))
-			 if ( kSetting_AllowAnonymousKeys.equalsIgnoreCase("true")    &&  !emailProvided   )
-	      {
-	        DMaaPResponseBuilder.respondWithErrorInJson(dmaapContext, 400, "You must provide an email address.");
-	        return;
-	      }
-		
-
-	  
-	  
-		final NsaApiDb<NsaSimpleApiKey> apiKeyDb = getApiKeyDb(dmaapContext);
-		String apiKey = nsaApiKey.getKey();
-		String sharedSecret = nsaApiKey.getSharedSecret();
-		final NsaSimpleApiKey key = apiKeyDb.createApiKey(apiKey,
-				sharedSecret);
-
-		if (null != key) {
-
-			if (null != nsaApiKey.getEmail()) {
-				key.setContactEmail(nsaApiKey.getEmail());
-			}
-
-			if (null != nsaApiKey.getDescription()) {
-				key.setDescription(nsaApiKey.getDescription());
-			}
-
-			log.debug("=======ApiKeysServiceImpl: createApiKey : saving api key : "
-					+ key.toString() + "=====");
-			apiKeyDb.saveApiKey(key);
-			// email out the secret to validate the email address
-			if ( emailProvided )
-			{
-				String body = "\n" + "Your email address was provided as the creator of new API key \""
-				+ apiKey + "\".\n" + "\n" + "If you did not make this request, please let us know."
-				+ " See http://sa2020.it.att.com:8888 for contact information, " + "but don't worry -"
-				+ " the API key is useless without the information below, which has been provided "
-				+ "only to you.\n" + "\n\n" + "For API key \"" + apiKey + "\", use API key secret:\n\n\t"
-				+ sharedSecret + "\n\n" + "Note that it's normal to share the API key"
-				+ " (" + apiKey + "). " 			
-				+ "This is how you are granted access to resources " + "like a UEB topic or Flatiron scope. "
-				+ "However, you should NOT share the API key's secret. " + "The API key is associated with your"
-				+ " email alone. ALL access to data made with this " + "key will be your responsibility. If you "
-				+ "share the secret, someone else can use the API key " + "to access proprietary data with your "
-				+ "identity.\n" + "\n" + "Enjoy!\n" + "\n" + "The GFP/SA-2020 Team";
-	
-		        Emailer em = dmaapContext.getConfigReader().getSystemEmailer();
-		        em.send(contactEmail, "New API Key", body);
-			}
-			log.debug("TopicService: : sending response.");
-	
-			JSONObject o = key.asJsonObject();
-			
-			o.put ( NsaSimpleApiKey.kApiSecretField,
-					emailProvided ?
-						"Emailed to " + contactEmail + "." :
-						key.getSecret ()
-				);
-			DMaaPResponseBuilder.respondOk(dmaapContext,
-					o);
-	        /*o.put("secret", "Emailed to " + contactEmail + ".");
-			DMaaPResponseBuilder.respondOk(dmaapContext,
-					o); */
-			return;
-		} else {
-			log.debug("=======ApiKeysServiceImpl: createApiKey : Error in creating API Key.=====");
-			DMaaPResponseBuilder.respondWithError(dmaapContext,
-					HttpStatusCodes.k500_internalServerError,
-					"Failed to create api key.");
-			throw new KeyExistsException(apiKey);
-		}
-	}
-
-	/**
-	 * @param dmaapContext
-	 * @param apikey
-	 * @param nsaApiKey
-	 * @throws ConfigDbException
-	 * @throws IOException
-	 * @throws AccessDeniedException
-	 */
-	@Override
-	public void updateApiKey(DMaaPContext dmaapContext, String apikey,
-			ApiKeyBean nsaApiKey) throws ConfigDbException, IOException, AccessDeniedException {
-
-		String errorMsg = "Api key name is not mentioned.";
-		int errorCode = HttpStatusCodes.k400_badRequest;
-
-		if (null != apikey) {
-			final NsaApiDb<NsaSimpleApiKey> apiKeyDb = getApiKeyDb(dmaapContext);
-			final NsaSimpleApiKey key = apiKeyDb.loadApiKey(apikey);
-			boolean shouldUpdate = false;
-
-			if (null != key) {
-				final NsaApiKey user = DMaaPAuthenticatorImpl
-						.getAuthenticatedUser(dmaapContext);
-
-				if (user == null || !user.getKey().equals(key.getKey())) {
-					throw new AccessDeniedException("You must authenticate with the key you'd like to update.");
-				}
-
-				if (null != nsaApiKey.getEmail()) {
-					key.setContactEmail(nsaApiKey.getEmail());
-					shouldUpdate = true;
-				}
-
-				if (null != nsaApiKey.getDescription()) {
-					key.setDescription(nsaApiKey.getDescription());
-					shouldUpdate = true;
-				}
-
-				if (shouldUpdate) {
-					apiKeyDb.saveApiKey(key);
-				}
-
-				log.info("======ApiKeysServiceImpl : updateApiKey : Key Updated Successfully :"
-						+ key.toString() + "=========");
-				DMaaPResponseBuilder.respondOk(dmaapContext,
-						key.asJsonObject());
-				return;
-			}
-		} else {
-			errorMsg = "Api key [" + apikey + "] does not exist.";
-			errorCode = HttpStatusCodes.k404_notFound;
-			DMaaPResponseBuilder.respondWithError(dmaapContext, errorCode,
-					errorMsg);
-			log.info("======ApiKeysServiceImpl : updateApiKey : Error in Updating Key.============");
-			throw new IOException();
-		}
-	}
-
-	/**
-	 * @param dmaapContext
-	 * @param apikey
-	 * @throws ConfigDbException
-	 * @throws IOException
-	 * @throws AccessDeniedException
-	 */
-	@Override
-	public void deleteApiKey(DMaaPContext dmaapContext, String apikey)
-			throws ConfigDbException, IOException, AccessDeniedException {
-
-		String errorMsg = "Api key name is not mentioned.";
-		int errorCode = HttpStatusCodes.k400_badRequest;
-
-		if (null != apikey) {
-			final NsaApiDb<NsaSimpleApiKey> apiKeyDb = getApiKeyDb(dmaapContext);
-			final NsaSimpleApiKey key = apiKeyDb.loadApiKey(apikey);
-
-			if (null != key) {
-
-				final NsaApiKey user = DMaaPAuthenticatorImpl
-						.getAuthenticatedUser(dmaapContext);
-				if (user == null || !user.getKey().equals(key.getKey())) {
-					throw new AccessDeniedException("You don't own the API key.");
-				}
-
-				apiKeyDb.deleteApiKey(key);
-				log.info("======ApiKeysServiceImpl : deleteApiKey : Deleted Key successfully.============");
-				DMaaPResponseBuilder.respondOkWithHtml(dmaapContext,
-						"Api key [" + apikey + "] deleted successfully.");
-				return;
-			}
-		} else {
-			errorMsg = "Api key [" + apikey + "] does not exist.";
-			errorCode = HttpStatusCodes.k404_notFound;
-			DMaaPResponseBuilder.respondWithError(dmaapContext, errorCode,
-					errorMsg);
-			log.info("======ApiKeysServiceImpl : deleteApiKey : Error while deleting key.============");
-			throw new IOException();
-		}
-	}
-
-	/**
-	 * 
-	 * @param dmaapContext
-	 * @return
-	 */
-	private NsaApiDb<NsaSimpleApiKey> getApiKeyDb(DMaaPContext dmaapContext) {
-		ConfigurationReader configReader = dmaapContext.getConfigReader();
-		return configReader.getfApiKeyDb();
-	}
-
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/BaseTransactionDbImpl.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/BaseTransactionDbImpl.java
deleted file mode 100644
index be779fa..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/BaseTransactionDbImpl.java
+++ /dev/null
@@ -1,154 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service.impl;
-
-import java.util.Set;
-import java.util.TreeSet;
-
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.transaction.DMaaPTransactionFactory;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.transaction.DMaaPTransactionObj;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.transaction.DMaaPTransactionObjDB;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.transaction.TransactionObj;
-
-import com.att.nsa.configs.ConfigDb;
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.configs.ConfigPath;
-
-/**
- * Persistent storage for Transaction objects built over an abstract config db.
- * 
- * @author author
- *
- * @param <K>
- */
-public class BaseTransactionDbImpl<K extends DMaaPTransactionObj> implements DMaaPTransactionObjDB<K> {
-
-	private final ConfigDb fDb;
-	private final ConfigPath fBasePath;
-	private final DMaaPTransactionFactory<K> fKeyFactory;
-
-	private static final String kStdRootPath = "/transaction";
-
-	private ConfigPath makePath(String transactionId) {
-		return fBasePath.getChild(transactionId);
-	}
-
-	/**
-	 * Construct an Transaction db over the given config db at the standard
-	 * location
-	 * 
-	 * @param db
-	 * @param keyFactory
-	 * @throws ConfigDbException
-	 */
-	public BaseTransactionDbImpl(ConfigDb db, DMaaPTransactionFactory<K> keyFactory) throws ConfigDbException {
-		this(db, kStdRootPath, keyFactory);
-	}
-
-	/**
-	 * Construct an Transaction db over the given config db using the given root
-	 * location
-	 * 
-	 * @param db
-	 * @param rootPath
-	 * @param keyFactory
-	 * @throws ConfigDbException
-	 */
-	public BaseTransactionDbImpl(ConfigDb db, String rootPath, DMaaPTransactionFactory<K> keyFactory)
-			throws ConfigDbException {
-		fDb = db;
-		fBasePath = db.parse(rootPath);
-		fKeyFactory = keyFactory;
-
-		if (!db.exists(fBasePath)) {
-			db.store(fBasePath, "");
-		}
-	}
-
-	/**
-	 * Create a new Transaction Obj. If one exists,
-	 * 
-	 * @param id
-	 * @return the new Transaction record
-	 * @throws ConfigDbException
-	 */
-	public synchronized K createTransactionObj(String id) throws KeyExistsException, ConfigDbException {
-		final ConfigPath path = makePath(id);
-		if (fDb.exists(path)) {
-			throw new KeyExistsException(id);
-		}
-
-		// make one, store it, return it
-		final K newKey = fKeyFactory.makeNewTransactionId(id);
-		fDb.store(path, newKey.serialize());
-		return newKey;
-	}
-
-	/**
-	 * Save an Transaction record. This must be used after changing auxiliary
-	 * data on the record. Note that the transaction object must exist (via
-	 * createTransactionObj).
-	 * 
-	 * @param transaction
-	 *            object
-	 * @throws ConfigDbException
-	 */
-	@Override
-	public synchronized void saveTransactionObj(K trnObj) throws ConfigDbException {
-		final ConfigPath path = makePath(trnObj.getId());
-		if (!fDb.exists(path) || !(trnObj instanceof TransactionObj)) {
-			throw new IllegalStateException(trnObj.getId() + " is not known to this database");
-		}
-		fDb.store(path, ((TransactionObj) trnObj).serialize());
-	}
-
-	/**
-	 * Load an Transaction record based on the Transaction Id value
-	 * 
-	 * @param transactionId
-	 * @return an Transaction Object record or null
-	 * @throws ConfigDbException
-	 */
-	@Override
-	public synchronized K loadTransactionObj(String transactionId) throws ConfigDbException {
-		final String data = fDb.load(makePath(transactionId));
-		if (data != null) {
-			return fKeyFactory.makeNewTransactionObj(data);
-		}
-		return null;
-	}
-
-	/**
-	 * Load all transactions known to this database. (This could be expensive.)
-	 * 
-	 * @return a set of all Transaction objects
-	 * @throws ConfigDbException
-	 */
-	public synchronized Set<String> loadAllTransactionObjs() throws ConfigDbException {
-		final TreeSet<String> result = new TreeSet<String>();
-		for (ConfigPath cp : fDb.loadChildrenNames(fBasePath)) {
-			result.add(cp.getName());
-		}
-		return result;
-	}
-
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/EventsServiceImpl.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/EventsServiceImpl.java
deleted file mode 100644
index dce5a19..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/EventsServiceImpl.java
+++ /dev/null
@@ -1,788 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service.impl;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.LinkedList;
-
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.core.MediaType;
-
-import org.apache.http.HttpStatus;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import org.json.JSONObject;
-import org.json.JSONTokener;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Consumer;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.ConsumerFactory;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.MetricsSet;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Publisher;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.ConsumerFactory.UnavailableException;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Publisher.message;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPCambriaLimiter;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.LogDetails;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants.CambriaConstants;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception.DMaaPAccessDeniedException;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception.DMaaPErrorMessages;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception.DMaaPResponseCode;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception.ErrorResponse;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metabroker.Topic;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metabroker.Broker.TopicExistsException;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.resources.CambriaEventSet;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.resources.CambriaOutboundEventStream;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security.DMaaPAAFAuthenticator;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security.DMaaPAAFAuthenticatorImpl;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security.DMaaPAuthenticatorImpl;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service.EventsService;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.DMaaPResponseBuilder;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.Utils;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.stereotype.Service;
-
-import com.att.ajsc.filemonitor.AJSCPropertiesMap;
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.drumlin.service.standards.MimeTypes;
-import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
-import com.att.nsa.security.NsaApiKey;
-import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
-import com.att.nsa.util.rrConvertor;
-
-import kafka.producer.KeyedMessage;
-
-/**
- * This class provides the functinality to publish and subscribe message to
- * kafka
- * 
- * @author author
- *
- */
-@Service
-public class EventsServiceImpl implements EventsService {
-	//private static final Logger LOG = Logger.getLogger(EventsServiceImpl.class);
-	private static final EELFLogger LOG = EELFManager.getInstance().getLogger(EventsServiceImpl.class);
-
-	private static final String BATCH_LENGTH = "event.batch.length";
-	private static final String TRANSFER_ENCODING = "Transfer-Encoding";
-	@Autowired
-	private DMaaPErrorMessages errorMessages;
-
-	//@Value("${metrics.send.cambria.topic}")
-	//private String metricsTopic;
-	
-	/**
-	 * @param ctx
-	 * @param topic
-	 * @param consumerGroup
-	 * @param clientId
-	 * @throws ConfigDbException,
-	 *             TopicExistsException, AccessDeniedException,
-	 *             UnavailableException, CambriaApiException, IOException
-	 * 
-	 * 
-	 */
-	@Override
-	public void getEvents(DMaaPContext ctx, String topic, String consumerGroup, String clientId)
-			throws ConfigDbException, TopicExistsException, AccessDeniedException, UnavailableException,
-			CambriaApiException, IOException,DMaaPAccessDeniedException {
-		final long startTime = System.currentTimeMillis();
-		final HttpServletRequest req = ctx.getRequest();
-
-		boolean isAAFTopic=false;
-		// was this host blacklisted?
-		final String remoteAddr = Utils.getRemoteAddress(ctx);;
-		if ( ctx.getConfigReader().getfIpBlackList().contains ( remoteAddr ) )
-		{
-			
-			ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, 
-					DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), "Source address [" + remoteAddr +
-				"] is blacklisted. Please contact the cluster management team."
-					,null,Utils.getFormattedDate(new Date()),topic,
-					Utils.getUserApiKey(ctx.getRequest()),ctx.getRequest().getRemoteHost(),
-					null,null);
-			LOG.info(errRes.toString());
-			throw new CambriaApiException(errRes);
-		}
-		
-		
-		int limit = CambriaConstants.kNoLimit;
-		if (req.getParameter("limit") != null) {
-			limit = Integer.parseInt(req.getParameter("limit"));
-		}
-
-		int timeoutMs= CambriaConstants.kNoTimeout;
-		String strtimeoutMS = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"timeout");
-		if(strtimeoutMS!=null)timeoutMs=Integer.parseInt(strtimeoutMS);
-		//int timeoutMs = ctx.getConfigReader().getSettings().getInt("timeout", CambriaConstants.kNoTimeout);
-		if (req.getParameter("timeout") != null) {
-			timeoutMs = Integer.parseInt(req.getParameter("timeout"));
-		}
-
-		// By default no filter is applied if filter is not passed as a
-		// parameter in the request URI
-		String topicFilter = CambriaConstants.kNoFilter;
-		if (null != req.getParameter("filter")) {
-			topicFilter = req.getParameter("filter");
-		}
-		// pretty to print the messaages in new line
-		String prettyval="0";
-		String strPretty=AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"pretty");
-		if (null!=strPretty)prettyval=strPretty;
-		
-		String metaval="0";
-		String strmeta=AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"meta");
-		if (null!=strmeta)metaval=strmeta;
-		
-		final boolean pretty = rrConvertor
-				.convertToBooleanBroad(prettyval);
-		// withMeta to print offset along with message
-		final boolean withMeta = rrConvertor
-				.convertToBooleanBroad(metaval);
-		
-		
-		/*final boolean pretty = rrConvertor
-				.convertToBooleanBroad(ctx.getConfigReader().getSettings().getString("pretty", "0"));
-		// withMeta to print offset along with message
-		final boolean withMeta = rrConvertor
-				.convertToBooleanBroad(ctx.getConfigReader().getSettings().getString("meta", "0"));
-*/
-		final LogWrap logger = new LogWrap ( topic, consumerGroup, clientId);
-		logger.info("fetch: timeout=" + timeoutMs + ", limit=" + limit + ", filter=" + topicFilter);
-
-		// is this user allowed to read this topic?
-		final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx);
-		final Topic metatopic = ctx.getConfigReader().getfMetaBroker().getTopic(topic);
-		
-		if (metatopic == null) {
-			// no such topic.
-			ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND, 
-					DMaaPResponseCode.RESOURCE_NOT_FOUND.getResponseCode(), 
-					errorMessages.getTopicNotExist()+"-[" + topic + "]",null,Utils.getFormattedDate(new Date()),topic,null,null,
-					clientId,ctx.getRequest().getRemoteHost());
-			LOG.info(errRes.toString());
-			throw new CambriaApiException(errRes);
-		}
-		String metricTopicname= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"metrics.send.cambria.topic");
-		if (null==metricTopicname)
-   		 metricTopicname="msgrtr.apinode.metrics.dmaap";
-		
-		 if(null==ctx.getRequest().getHeader("Authorization")&& !topic.equalsIgnoreCase(metricTopicname))
-		{	
-			if (null != metatopic.getOwner() && !("".equals(metatopic.getOwner()))){
-			// check permissions
-			metatopic.checkUserRead(user);	
-			}
-		}
-		// if headers are not provided then user will be null
-		 if(user == null && null!=ctx.getRequest().getHeader("Authorization"))
-		{
-			// the topic name will be sent by the client
-//			String permission = "com.att.dmaap.mr.topic"+"|"+topic+"|"+"sub";
-			DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
-			String permission = aaf.aafPermissionString(topic, "sub");
-			if(!aaf.aafAuthentication(ctx.getRequest(), permission))
-			{
-				ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED, 
-						DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), 
-						errorMessages.getNotPermitted1()+" read "+errorMessages.getNotPermitted2()+topic,null,Utils.getFormattedDate(new Date()),topic,null,null,
-						clientId,ctx.getRequest().getRemoteHost());
-				LOG.info(errRes.toString());
-				throw new DMaaPAccessDeniedException(errRes);
-				
-			}
-			isAAFTopic = true;
-		}
-		Consumer c = null;
-		try {
-			final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
-
-			final DMaaPCambriaLimiter rl = ctx.getConfigReader().getfRateLimiter();
-			rl.onCall(topic, consumerGroup, clientId);
-
-			c = ctx.getConfigReader().getfConsumerFactory().getConsumerFor(topic, consumerGroup, clientId, timeoutMs);
-
-		/*	final CambriaOutboundEventStream coes = new CambriaOutboundEventStream.Builder(c,
-					ctx.getConfigReader().getSettings()).timeout(timeoutMs).limit(limit).filter(topicFilter)
-							.pretty(pretty).withMeta(withMeta)
-							// .atOffset(topicOffset)
-							.build();*/
-			final CambriaOutboundEventStream coes = new CambriaOutboundEventStream.Builder(c).timeout(timeoutMs).limit(limit).filter(topicFilter)
-					.pretty(pretty).withMeta(withMeta).build();
-			coes.setDmaapContext(ctx);
-			coes.setTopic(metatopic);
-			if( isTransEnabled() || isAAFTopic ){
-				coes.setTransEnabled(true);
-			}else{
-			coes.setTransEnabled(false);
-			}
-			coes.setTopicStyle(isAAFTopic);
-            
-			DMaaPResponseBuilder.setNoCacheHeadings(ctx);
-
-			DMaaPResponseBuilder.respondOkWithStream(ctx, MediaType.APPLICATION_JSON, coes);
-
-			// No IOException thrown during respondOkWithStream, so commit the
-			// new offsets to all the brokers
-			c.commitOffsets();
-			final int sent = coes.getSentCount();
-
-			metricsSet.consumeTick(sent);
-			rl.onSend(topic, consumerGroup, clientId, sent);
-
-			final long elapsedMs = System.currentTimeMillis() - startTime;
-			logger.info("Sent " + sent + " msgs in " + elapsedMs + " ms; committed to offset " + c.getOffset());
-
-		} catch (UnavailableException excp) {
-			logger.warn(excp.getMessage(), excp);
-			
-			ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE, 
-					DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(), 
-					errorMessages.getServerUnav()+ excp.getMessage(),null,Utils.getFormattedDate(new Date()),topic,null,null,
-					clientId,ctx.getRequest().getRemoteHost());
-			LOG.info(errRes.toString());
-			throw new CambriaApiException(errRes);
-			
-		} catch (CambriaApiException excp) {
-			logger.warn(excp.getMessage(), excp);
-			throw excp;
-		} catch (Exception excp) {
-			logger.warn("Couldn't respond to client, closing cambria consumer", excp);
-			ctx.getConfigReader().getfConsumerFactory().destroyConsumer(topic, consumerGroup, clientId);
-			
-			ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE, 
-					DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(), 
-					"Couldn't respond to client, closing cambria consumer"+ excp.getMessage(),null,Utils.getFormattedDate(new Date()),topic,null,null,
-					clientId,ctx.getRequest().getRemoteHost());
-			LOG.info(errRes.toString());
-			throw new CambriaApiException(errRes);
-		} finally {
-			// If no cache, close the consumer now that we're done with it.
-			boolean kSetting_EnableCache = ConsumerFactory.kDefault_IsCacheEnabled;
-			String strkSetting_EnableCache=AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,ConsumerFactory.kSetting_EnableCache);
-			if(null!=strkSetting_EnableCache) kSetting_EnableCache=Boolean.parseBoolean(strkSetting_EnableCache);
-			//if (!ctx.getConfigReader().getSettings().getBoolean(ConsumerFactory.kSetting_EnableCache,	ConsumerFactory.kDefault_IsCacheEnabled) && (c != null)) {
-			if (!kSetting_EnableCache && (c != null)) {
-				c.close();
-
-			}
-		}
-	}
-
-	/**
-	 * @throws missingReqdSetting 
-	 * 
-	 */
-	@Override
-	public void pushEvents(DMaaPContext ctx, final String topic, InputStream msg, final String defaultPartition,
-			final String requestTime) throws ConfigDbException, AccessDeniedException, TopicExistsException,
-			CambriaApiException, IOException, missingReqdSetting,DMaaPAccessDeniedException {
-
-		// is this user allowed to write to this topic?
-		final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx);
-		final Topic metatopic = ctx.getConfigReader().getfMetaBroker().getTopic(topic);
-		boolean isAAFTopic=false;
-		
-			// was this host blacklisted?
-				final String remoteAddr = Utils.getRemoteAddress(ctx);
-				
-				if ( ctx.getConfigReader().getfIpBlackList().contains ( remoteAddr ) )
-				{
-					
-					ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, 
-							DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), "Source address [" + remoteAddr +
-						"] is blacklisted. Please contact the cluster management team."
-							,null,Utils.getFormattedDate(new Date()),topic,
-							Utils.getUserApiKey(ctx.getRequest()),ctx.getRequest().getRemoteHost(),
-							null,null);
-					LOG.info(errRes.toString());
-					throw new CambriaApiException(errRes);
-				}
-				
-				  String topicNameStd = null;
-	               
-	               //	topicNameStd= ctx.getConfigReader().getSettings().getString("enforced.topic.name.AAF");
-	            	topicNameStd= com.att.ajsc.beans.PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop,"enforced.topic.name.AAF");
-	            	String metricTopicname= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"metrics.send.cambria.topic");
-	            	 if (null==metricTopicname)
-	            		 metricTopicname="msgrtr.apinode.metrics.dmaap";
-	                boolean topicNameEnforced=false;
-	                if (null != topicNameStd && topic.startsWith(topicNameStd)  )
-	                {
-	                	topicNameEnforced = true;
-	                }
-		
-	               //Here check if the user has rights to publish on the topic
-	               //( This will be called when no auth is added or when UEB API Key Authentication is used)
-	               //checkUserWrite(user) method will throw an error when there is no Auth header added or when the
-	               //user has no publish rights
-	                
-				if(null != metatopic &&  null != metatopic.getOwner() && !("".equals(metatopic.getOwner())) && null==ctx.getRequest().getHeader("Authorization") && !topic.equalsIgnoreCase(metricTopicname)) 
-				{
-					metatopic.checkUserWrite(user);
-				}
-
-	
-				
-				// if headers are not provided then user will be null
-		 if(topicNameEnforced || (user == null && null!=ctx.getRequest().getHeader("Authorization") && !topic.equalsIgnoreCase(metricTopicname)))
-		{
-			// the topic name will be sent by the client
-						// String permission = "com.att.dmaap.mr.topic"+"|"+topic+"|"+"pub";
-						DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
-						String permission = aaf.aafPermissionString(topic, "pub");
-						if(!aaf.aafAuthentication(ctx.getRequest(), permission))
-						{
-							ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED, 
-									DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), 
-									errorMessages.getNotPermitted1()+" publish "+errorMessages.getNotPermitted2()+topic,null,Utils.getFormattedDate(new Date()),topic,
-									Utils.getUserApiKey(ctx.getRequest()),ctx.getRequest().getRemoteHost(),
-									null,null);
-							LOG.info(errRes.toString());
-							throw new DMaaPAccessDeniedException(errRes);
-						}
-						isAAFTopic=true;
-		}	
-		 
-		final HttpServletRequest req = ctx.getRequest();
-
-		// check for chunked input
-		boolean chunked = false;
-		if (null != req.getHeader(TRANSFER_ENCODING)) {
-			chunked = req.getHeader(TRANSFER_ENCODING).contains("chunked");
-		}
-		// get the media type, or set it to a generic value if it wasn't
-		// provided
-		String mediaType = req.getContentType();
-		if (mediaType == null || mediaType.length() == 0) {
-			mediaType = MimeTypes.kAppGenericBinary;
-		}
-
-		if (mediaType.contains("charset=UTF-8")) {
-			mediaType = mediaType.replace("; charset=UTF-8", "").trim();
-		}
-		
-		String istransidUEBtopicreqd = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"transidUEBtopicreqd");
-		boolean istransidreqd=false;
-		if (null != istransidUEBtopicreqd && istransidUEBtopicreqd.equalsIgnoreCase("true")){
-			istransidreqd = true; 
-		}
-		
-		if (isAAFTopic || istransidreqd ) {
-			pushEventsWithTransaction(ctx, msg, topic, defaultPartition, requestTime, chunked, mediaType);
-		}
-		else
-		{
-			pushEvents(ctx, topic, msg, defaultPartition, chunked, mediaType);
-		}
-			
-
-	}
-
-	/**
-	 * 
-	 * @param ctx
-	 * @param topic
-	 * @param msg
-	 * @param defaultPartition
-	 * @param chunked
-	 * @param mediaType
-	 * @throws ConfigDbException
-	 * @throws AccessDeniedException
-	 * @throws TopicExistsException
-	 * @throws CambriaApiException
-	 * @throws IOException
-	 */
-	private void pushEvents(DMaaPContext ctx, String topic, InputStream msg, String defaultPartition,
-			boolean chunked, String mediaType) throws ConfigDbException, AccessDeniedException, TopicExistsException,
-					CambriaApiException, IOException {
-		final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
-
-		// setup the event set
-		final CambriaEventSet events = new CambriaEventSet(mediaType, msg, chunked, defaultPartition);
-
-		// start processing, building a batch to push to the backend
-		final long startMs = System.currentTimeMillis();
-		long count = 0;
-		
-		long maxEventBatch=1024 * 16;
-		String batchlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,BATCH_LENGTH);
-		if(null!=batchlen)maxEventBatch=Long.parseLong(batchlen);
-		
-		// long maxEventBatch = ctx.getConfigReader().getSettings().getLong(BATCH_LENGTH, 1024 * 16);
-		final LinkedList<Publisher.message> batch = new LinkedList<Publisher.message>();
-		final ArrayList<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>();
-
-		try {
-			// for each message...
-			Publisher.message m = null;
-			while ((m = events.next()) != null) {
-				// add the message to the batch
-				batch.add(m);
-				final KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, m.getKey(),
-						m.getMessage());
-				kms.add(data);
-				// check if the batch is full
-				final int sizeNow = batch.size();
-				if (sizeNow > maxEventBatch) {
-					ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
-					kms.clear();
-					batch.clear();
-					metricsSet.publishTick(sizeNow);
-					count += sizeNow;
-				}
-			}
-
-			// send the pending batch
-			final int sizeNow = batch.size();
-			if (sizeNow > 0) {
-				ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
-				kms.clear();
-				batch.clear();
-				metricsSet.publishTick(sizeNow);
-				count += sizeNow;
-			}
-
-			final long endMs = System.currentTimeMillis();
-			final long totalMs = endMs - startMs;
-
-			LOG.info("Published " + count + " msgs in " + totalMs + "ms for topic " + topic);
-
-			// build a responseP
-			final JSONObject response = new JSONObject();
-			response.put("count", count);
-			response.put("serverTimeMs", totalMs);
-			DMaaPResponseBuilder.respondOk(ctx, response);
-
-		} catch (Exception excp) {
-			int status = HttpStatus.SC_NOT_FOUND;
-			String errorMsg=null;
-			if(excp instanceof CambriaApiException) {
-				 status = ((CambriaApiException) excp).getStatus();
-				 JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
-				 JSONObject errObject = new JSONObject(jsonTokener);
-				 errorMsg = (String) errObject.get("message");
-					
-			}
-			ErrorResponse errRes = new ErrorResponse(status, 
-					DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(), 
-					errorMessages.getPublishMsgError()+":"+topic+"."+errorMessages.getPublishMsgCount()+count+"."+errorMsg,null,Utils.getFormattedDate(new Date()),topic,
-					null,ctx.getRequest().getRemoteHost(),
-					null,null);
-			LOG.info(errRes.toString());
-			throw new CambriaApiException(errRes);
-			
-			
-		}
-	}
-
-	/**
-	 * 
-	 * @param ctx
-	 * @param inputStream
-	 * @param topic
-	 * @param partitionKey
-	 * @param requestTime
-	 * @param chunked
-	 * @param mediaType
-	 * @throws ConfigDbException
-	 * @throws AccessDeniedException
-	 * @throws TopicExistsException
-	 * @throws IOException
-	 * @throws CambriaApiException
-	 */
-	private void pushEventsWithTransaction(DMaaPContext ctx, InputStream inputStream, final String topic,
-			final String partitionKey, final String requestTime, final boolean chunked, final String mediaType)
-					throws ConfigDbException, AccessDeniedException, TopicExistsException, IOException,
-					CambriaApiException {
-
-		final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
-
-		// setup the event set
-		final CambriaEventSet events = new CambriaEventSet(mediaType, inputStream, chunked, partitionKey);
-
-		// start processing, building a batch to push to the backend
-		final long startMs = System.currentTimeMillis();
-		long count = 0;
-		long maxEventBatch =  1024 * 16;
-		String evenlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,BATCH_LENGTH);
-			if(null!=evenlen)maxEventBatch=Long.parseLong(evenlen);
-		//final long maxEventBatch = ctx.getConfigReader().getSettings().getLong(BATCH_LENGTH, 1024 * 16);
-		final LinkedList<Publisher.message> batch = new LinkedList<Publisher.message>();
-		final ArrayList<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>();
-
-		Publisher.message m = null;
-		int messageSequence = 1;
-		Long batchId = 1L;
-		final boolean transactionEnabled = true;
-		int publishBatchCount=0;
-		SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss.SS");
-
-		//LOG.warn("Batch Start Id: " + Utils.getFromattedBatchSequenceId(batchId));
-		try {
-			// for each message...
-			batchId=DMaaPContext.getBatchID();
-			
-			String responseTransactionId = null;
-			
-			while ((m = events.next()) != null) {
-			
-				//LOG.warn("Batch Start Id: " + Utils.getFromattedBatchSequenceId(batchId));
-				
-
-				addTransactionDetailsToMessage(m, topic, ctx.getRequest(), requestTime, messageSequence, batchId,
-						transactionEnabled);
-				messageSequence++;
-
-				// add the message to the batch
-				batch.add(m);
-				
-				responseTransactionId = m.getLogDetails().getTransactionId();
-				
-				JSONObject jsonObject = new JSONObject();
-				jsonObject.put("message", m.getMessage());
-				jsonObject.put("transactionId", responseTransactionId);
-				final KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, m.getKey(),
-						jsonObject.toString());
-				kms.add(data);
-
-				// check if the batch is full
-				final int sizeNow = batch.size();
-				if (sizeNow >= maxEventBatch) {
-					String startTime = sdf.format(new Date());
-					LOG.info("Batch Start Details:[serverIp="+ctx.getRequest().getLocalAddr()+",Batch Start Id=" + batchId+"]");
-					try {
-						ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
-						//transactionLogs(batch);
-						for (message msg : batch) {
-							LogDetails logDetails = msg.getLogDetails();
-							LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails());
-						}
-					} catch (Exception excp) {
-						
-						int status = HttpStatus.SC_NOT_FOUND;
-						String errorMsg=null;
-						if(excp instanceof CambriaApiException) {
-							 status = ((CambriaApiException) excp).getStatus();
-							 JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
-							 JSONObject errObject = new JSONObject(jsonTokener);
-							 errorMsg = (String) errObject.get("message");
-						}
-						ErrorResponse errRes = new ErrorResponse(status, 
-								DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(), 
-								"Transaction-"+errorMessages.getPublishMsgError()+":"+topic+ "."+errorMessages.getPublishMsgCount()+count+"."+errorMsg,
-								null,Utils.getFormattedDate(new Date()),topic,
-								Utils.getUserApiKey(ctx.getRequest()),ctx.getRequest().getRemoteHost(),
-								null,null);
-						LOG.info(errRes.toString());
-						throw new CambriaApiException(errRes);
-					}
-					kms.clear();
-					batch.clear();
-					metricsSet.publishTick(sizeNow);
-					publishBatchCount=sizeNow;
-					count += sizeNow;
-					//batchId++;
-					String endTime = sdf.format(new Date());
-					LOG.info("Batch End Details:[serverIp="+ctx.getRequest().getLocalAddr()+",Batch End Id=" + batchId
-							+ ",Batch Total=" + publishBatchCount+",Batch Start Time="+startTime+",Batch End Time="+endTime+"]");
-					batchId=DMaaPContext.getBatchID();
-				}
-			}
-
-			// send the pending batch
-			final int sizeNow = batch.size();
-			if (sizeNow > 0) {
-				String startTime = sdf.format(new Date());
-				LOG.info("Batch Start Details:[serverIp="+ctx.getRequest().getLocalAddr()+",Batch Start Id=" + batchId+"]");
-				try {
-					ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
-					//transactionLogs(batch);
-					for (message msg : batch) {
-						LogDetails logDetails = msg.getLogDetails();
-						LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails());
-					}
-				} catch (Exception excp) {
-					int status = HttpStatus.SC_NOT_FOUND;
-					String errorMsg=null;
-					if(excp instanceof CambriaApiException) {
-						 status = ((CambriaApiException) excp).getStatus();
-						 JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
-						 JSONObject errObject = new JSONObject(jsonTokener);
-						 errorMsg = (String) errObject.get("message");
-					}
-					
-					ErrorResponse errRes = new ErrorResponse(status, 
-							DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(), 
-							"Transaction-"+errorMessages.getPublishMsgError()+":"+topic+"."+ errorMessages.getPublishMsgCount()+count+"."+errorMsg,
-							null,Utils.getFormattedDate(new Date()),topic,
-							Utils.getUserApiKey(ctx.getRequest()),ctx.getRequest().getRemoteHost(),
-							null,null);
-					LOG.info(errRes.toString());
-					throw new CambriaApiException(errRes);
-				}
-				kms.clear();
-				metricsSet.publishTick(sizeNow);
-				count += sizeNow;
-				//batchId++;
-				String endTime = sdf.format(new Date());
-				publishBatchCount=sizeNow;
-				LOG.info("Batch End Details:[serverIp="+ctx.getRequest().getLocalAddr()+",Batch End Id=" + batchId
-						+ ",Batch Total=" + publishBatchCount+",Batch Start Time="+startTime+",Batch End Time="+endTime+"]");
-			}
-
-			final long endMs = System.currentTimeMillis();
-			final long totalMs = endMs - startMs;
-
-			LOG.info("Published " + count + " msgs in " + totalMs + "ms for topic " + topic);
-
-			if (null != responseTransactionId) {
-				ctx.getResponse().setHeader("transactionId", Utils.getResponseTransactionId(responseTransactionId));
-			}
-			
-			// build a response
-			final JSONObject response = new JSONObject();
-			response.put("count", count);
-			response.put("serverTimeMs", totalMs);
-			DMaaPResponseBuilder.respondOk(ctx, response);
-			
-		} catch (Exception excp) {
-			int status = HttpStatus.SC_NOT_FOUND;
-			String errorMsg=null;
-			if(excp instanceof CambriaApiException) {
-				 status = ((CambriaApiException) excp).getStatus();
-				 JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
-				 JSONObject errObject = new JSONObject(jsonTokener);
-				 errorMsg = (String) errObject.get("message");
-			}
-			
-			ErrorResponse errRes = new ErrorResponse(
-					status, 
-					DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(), 
-					"Transaction-"+errorMessages.getPublishMsgError()+":"+topic+"."+errorMessages.getPublishMsgCount()+count+"."+errorMsg,null,Utils.getFormattedDate(new Date()),topic,
-					Utils.getUserApiKey(ctx.getRequest()),ctx.getRequest().getRemoteHost(),
-					null,null);
-			LOG.info(errRes.toString());
-			throw new CambriaApiException(errRes);
-		}
-	}
-
-	/**
-	 * 
-	 * @param msg
-	 * @param topic
-	 * @param request
-	 * @param messageCreationTime
-	 * @param messageSequence
-	 * @param batchId
-	 * @param transactionEnabled
-	 */
-	private static void addTransactionDetailsToMessage(message msg, final String topic, HttpServletRequest request,
-			final String messageCreationTime, final int messageSequence, final Long batchId,
-			final boolean transactionEnabled) {
-		LogDetails logDetails = generateLogDetails(topic, request, messageCreationTime, messageSequence, batchId,
-				transactionEnabled);
-		logDetails.setMessageLengthInBytes(Utils.messageLengthInBytes(msg.getMessage()));
-		msg.setTransactionEnabled(transactionEnabled);
-		msg.setLogDetails(logDetails);
-	}
-
-
-
-	/**
-	 * 
-	 * @author author
-	 *
-	 */
-	private static class LogWrap {
-		private final String fId;
-
-		/**
-		 * constructor initialization
-		 * 
-		 * @param topic
-		 * @param cgroup
-		 * @param cid
-		 */
-		public LogWrap(String topic, String cgroup, String cid) {
-			fId = "[" + topic + "/" + cgroup + "/" + cid + "] ";
-		}
-
-		/**
-		 * 
-		 * @param msg
-		 */
-		public void info(String msg) {
-			LOG.info(fId + msg);
-		}
-
-		/**
-		 * 
-		 * @param msg
-		 * @param t
-		 */
-		public void warn(String msg, Exception t) {
-			LOG.warn(fId + msg, t);
-		}
-
-	}
-	
-	private boolean isTransEnabled() {
-		String istransidUEBtopicreqd = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"transidUEBtopicreqd");
-		boolean istransidreqd=false;
-		if ((null != istransidUEBtopicreqd && istransidUEBtopicreqd.equalsIgnoreCase("true")) ){
-			istransidreqd = true; 
-		}
-		
-		return istransidreqd;
-
-	}
-
-	private static LogDetails generateLogDetails(final String topicName, HttpServletRequest request,
-			final String messageTimestamp, int messageSequence, Long batchId, final boolean transactionEnabled) {
-		LogDetails logDetails = new LogDetails();
-		logDetails.setTopicId(topicName);
-		logDetails.setMessageTimestamp(messageTimestamp);
-		logDetails.setPublisherId(Utils.getUserApiKey(request));
-		logDetails.setPublisherIp(request.getRemoteHost());
-		logDetails.setMessageBatchId(batchId);
-		logDetails.setMessageSequence(String.valueOf(messageSequence));
-		logDetails.setTransactionEnabled(transactionEnabled);
-		logDetails.setTransactionIdTs(Utils.getFormattedDate(new Date()));
-		logDetails.setServerIp(request.getLocalAddr());
-		return logDetails;
-	}
-
-	/*public String getMetricsTopic() {
-		return metricsTopic;
-	}
-
-	public void setMetricsTopic(String metricsTopic) {
-		this.metricsTopic = metricsTopic;
-	}*/
-
-}
\ No newline at end of file
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/MMServiceImpl.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/MMServiceImpl.java
deleted file mode 100644
index cdba378..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/MMServiceImpl.java
+++ /dev/null
@@ -1,604 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service.impl;
-
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.LinkedList;
-
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import javax.ws.rs.core.Context;
-
-import org.apache.http.HttpStatus;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-
-import org.json.JSONObject;
-import org.json.JSONTokener;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Consumer;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.ConsumerFactory;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.MetricsSet;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Publisher;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.ConsumerFactory.UnavailableException;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Publisher.message;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.LogDetails;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants.CambriaConstants;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception.DMaaPAccessDeniedException;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception.DMaaPErrorMessages;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception.DMaaPResponseCode;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception.ErrorResponse;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metabroker.Topic;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metabroker.Broker.TopicExistsException;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.resources.CambriaEventSet;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.resources.CambriaOutboundEventStream;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security.DMaaPAAFAuthenticator;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security.DMaaPAAFAuthenticatorImpl;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service.MMService;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.ConfigurationReader;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.DMaaPResponseBuilder;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.Utils;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.beans.factory.annotation.Qualifier;
-import org.springframework.stereotype.Service;
-
-import com.att.ajsc.filemonitor.AJSCPropertiesMap;
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.drumlin.service.standards.MimeTypes;
-import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
-
-import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
-import com.att.nsa.util.rrConvertor;
-
-import kafka.producer.KeyedMessage;
-
-@Service
-public class MMServiceImpl implements MMService {
-	private static final String BATCH_LENGTH = "event.batch.length";
-	private static final String TRANSFER_ENCODING = "Transfer-Encoding";
-	//private static final Logger LOG = Logger.getLogger(MMServiceImpl.class);
-	private static final EELFLogger LOG = EELFManager.getInstance().getLogger(MMServiceImpl.class);
-	@Autowired
-	private DMaaPErrorMessages errorMessages;
-
-	@Autowired
-	@Qualifier("configurationReader")
-	private ConfigurationReader configReader;
-
-	// HttpServletRequest object
-	@Context
-	private HttpServletRequest request;
-
-	// HttpServletResponse object
-	@Context
-	private HttpServletResponse response;
-
-	@Override
-	public void addWhiteList() {
-
-	}
-
-	@Override
-	public void removeWhiteList() {
-
-	}
-
-	@Override
-	public void listWhiteList() {
-
-	}
-
-	@Override
-	public String subscribe(DMaaPContext ctx, String topic, String consumerGroup, String clientId)
-			throws ConfigDbException, TopicExistsException, AccessDeniedException, UnavailableException,
-			CambriaApiException, IOException {
-
-		// final long startTime = System.currentTimeMillis();
-		final HttpServletRequest req = ctx.getRequest();
-		ByteArrayOutputStream baos = new ByteArrayOutputStream();
-
-		// was this host blacklisted?
-		final String remoteAddr = Utils.getRemoteAddress(ctx);
-		
-		if (ctx.getConfigReader().getfIpBlackList().contains(remoteAddr)) {
-
-			ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
-					DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
-					"Source address [" + remoteAddr + "] is blacklisted. Please contact the cluster management team.",
-					null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
-					ctx.getRequest().getRemoteHost(), null, null);
-			LOG.info(errRes.toString());
-			throw new CambriaApiException(errRes);
-		}
-
-		int limit = CambriaConstants.kNoLimit;
-
-		if (req.getParameter("limit") != null) {
-			limit = Integer.parseInt(req.getParameter("limit"));
-		}
-		limit = 1;
-		// int timeoutMs = 60000;
-		int timeoutMs = CambriaConstants.kNoTimeout;
-		String strtimeoutMS = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "timeout");
-		if (strtimeoutMS != null)
-			timeoutMs = Integer.parseInt(strtimeoutMS);
-		// int timeoutMs = ctx.getConfigReader().getSettings().getInt("timeout",
-		// CambriaConstants.kNoTimeout);
-		if (req.getParameter("timeout") != null) {
-			timeoutMs = Integer.parseInt(req.getParameter("timeout"));
-		}
-
-		// By default no filter is applied if filter is not passed as a
-		// parameter in the request URI
-		String topicFilter = CambriaConstants.kNoFilter;
-		if (null != req.getParameter("filter")) {
-			topicFilter = req.getParameter("filter");
-		}
-		// pretty to print the messaages in new line
-		String prettyval = "0";
-		String strPretty = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "pretty");
-		if (null != strPretty)
-			prettyval = strPretty;
-
-		String metaval = "0";
-		String strmeta = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "meta");
-		if (null != strmeta)
-			metaval = strmeta;
-
-		final boolean pretty = rrConvertor.convertToBooleanBroad(prettyval);
-		// withMeta to print offset along with message
-		final boolean withMeta = rrConvertor.convertToBooleanBroad(metaval);
-
-		// is this user allowed to read this topic?
-		//final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx);
-		final Topic metatopic = ctx.getConfigReader().getfMetaBroker().getTopic(topic);
-
-		if (metatopic == null) {
-			// no such topic.
-			ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND,
-					DMaaPResponseCode.RESOURCE_NOT_FOUND.getResponseCode(),
-					errorMessages.getTopicNotExist() + "-[" + topic + "]", null, Utils.getFormattedDate(new Date()),
-					topic, null, null, clientId, ctx.getRequest().getRemoteHost());
-			LOG.info(errRes.toString());
-			throw new CambriaApiException(errRes);
-		}
-		//String metricTopicname = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,	"metrics.send.cambria.topic");
-		/*
-		 * if (null==metricTopicname)
-		 * metricTopicname="msgrtr.apinode.metrics.dmaap"; //else if(user!=null)
-		 * if(null==ctx.getRequest().getHeader("Authorization")&&
-		 * !topic.equalsIgnoreCase(metricTopicname)) { if (null !=
-		 * metatopic.getOwner() && !("".equals(metatopic.getOwner()))){ // check
-		 * permissions metatopic.checkUserRead(user); } }
-		 */
-
-		Consumer c = null;
-		try {
-			final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
-
-			c = ctx.getConfigReader().getfConsumerFactory().getConsumerFor(topic, consumerGroup, clientId, timeoutMs);
-
-			final CambriaOutboundEventStream coes = new CambriaOutboundEventStream.Builder(c).timeout(timeoutMs)
-					.limit(limit).filter(topicFilter).pretty(pretty).withMeta(withMeta).build();
-			coes.setDmaapContext(ctx);
-			coes.setTopic(metatopic);
-
-			DMaaPResponseBuilder.setNoCacheHeadings(ctx);
-
-			try {
-				coes.write(baos);
-			} catch (Exception ex) {
-
-			}
-
-			c.commitOffsets();
-			final int sent = coes.getSentCount();
-
-			metricsSet.consumeTick(sent);
-
-		} catch (UnavailableException excp) {
-
-			ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE,
-					DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(),
-					errorMessages.getServerUnav() + excp.getMessage(), null, Utils.getFormattedDate(new Date()), topic,
-					null, null, clientId, ctx.getRequest().getRemoteHost());
-			LOG.info(errRes.toString());
-			throw new CambriaApiException(errRes);
-
-		} catch (CambriaApiException excp) {
-
-			throw excp;
-		} catch (Exception excp) {
-
-			ctx.getConfigReader().getfConsumerFactory().destroyConsumer(topic, consumerGroup, clientId);
-
-			ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE,
-					DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(),
-					"Couldn't respond to client, closing cambria consumer" + excp.getMessage(), null,
-					Utils.getFormattedDate(new Date()), topic, null, null, clientId, ctx.getRequest().getRemoteHost());
-			LOG.info(errRes.toString());
-			throw new CambriaApiException(errRes);
-		} finally {
-
-			boolean kSetting_EnableCache = ConsumerFactory.kDefault_IsCacheEnabled;
-			String strkSetting_EnableCache = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
-					ConsumerFactory.kSetting_EnableCache);
-			if (null != strkSetting_EnableCache)
-				kSetting_EnableCache = Boolean.parseBoolean(strkSetting_EnableCache);
-
-			if (!kSetting_EnableCache && (c != null)) {
-				c.close();
-
-			}
-		}
-		return baos.toString();
-	}
-
-	@Override
-	public void pushEvents(DMaaPContext ctx, final String topic, InputStream msg, final String defaultPartition,
-			final String requestTime) throws ConfigDbException, AccessDeniedException, TopicExistsException,
-					CambriaApiException, IOException, missingReqdSetting {
-
-		//final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx);
-		//final Topic metatopic = ctx.getConfigReader().getfMetaBroker().getTopic(topic);
-
-		final String remoteAddr = Utils.getRemoteAddress(ctx);
-
-		if (ctx.getConfigReader().getfIpBlackList().contains(remoteAddr)) {
-
-			ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
-					DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
-					"Source address [" + remoteAddr + "] is blacklisted. Please contact the cluster management team.",
-					null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
-					ctx.getRequest().getRemoteHost(), null, null);
-			LOG.info(errRes.toString());
-			throw new CambriaApiException(errRes);
-		}
-
-		String topicNameStd = null;
-
-		topicNameStd = com.att.ajsc.beans.PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop,
-				"enforced.topic.name.AAF");
-		String metricTopicname = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
-				"metrics.send.cambria.topic");
-		if (null == metricTopicname)
-			metricTopicname = "msgrtr.apinode.metrics.dmaap";
-		boolean topicNameEnforced = false;
-		if (null != topicNameStd && topic.startsWith(topicNameStd)) {
-			topicNameEnforced = true;
-		}
-
-		final HttpServletRequest req = ctx.getRequest();
-
-		boolean chunked = false;
-		if (null != req.getHeader(TRANSFER_ENCODING)) {
-			chunked = req.getHeader(TRANSFER_ENCODING).contains("chunked");
-		}
-
-		String mediaType = req.getContentType();
-		if (mediaType == null || mediaType.length() == 0) {
-			mediaType = MimeTypes.kAppGenericBinary;
-		}
-
-		if (mediaType.contains("charset=UTF-8")) {
-			mediaType = mediaType.replace("; charset=UTF-8", "").trim();
-		}
-
-		if (!topic.equalsIgnoreCase(metricTopicname)) {
-			pushEventsWithTransaction(ctx, msg, topic, defaultPartition, requestTime, chunked, mediaType);
-		} else {
-			pushEvents(ctx, topic, msg, defaultPartition, chunked, mediaType);
-		}
-	}
-
-	private static void addTransactionDetailsToMessage(message msg, final String topic, HttpServletRequest request,
-			final String messageCreationTime, final int messageSequence, final Long batchId,
-			final boolean transactionEnabled) {
-		LogDetails logDetails = generateLogDetails(topic, request, messageCreationTime, messageSequence, batchId,
-				transactionEnabled);
-		logDetails.setMessageLengthInBytes(Utils.messageLengthInBytes(msg.getMessage()));
-		msg.setTransactionEnabled(transactionEnabled);
-		msg.setLogDetails(logDetails);
-	}
-
-	private static LogDetails generateLogDetails(final String topicName, HttpServletRequest request,
-			final String messageTimestamp, int messageSequence, Long batchId, final boolean transactionEnabled) {
-		LogDetails logDetails = new LogDetails();
-		logDetails.setTopicId(topicName);
-		logDetails.setMessageTimestamp(messageTimestamp);
-		logDetails.setPublisherId(Utils.getUserApiKey(request));
-		logDetails.setPublisherIp(request.getRemoteHost());
-		logDetails.setMessageBatchId(batchId);
-		logDetails.setMessageSequence(String.valueOf(messageSequence));
-		logDetails.setTransactionEnabled(transactionEnabled);
-		logDetails.setTransactionIdTs(Utils.getFormattedDate(new Date()));
-		logDetails.setServerIp(request.getLocalAddr());
-		return logDetails;
-	}
-
-	private void pushEvents(DMaaPContext ctx, String topic, InputStream msg, String defaultPartition, boolean chunked,
-			String mediaType) throws ConfigDbException, AccessDeniedException, TopicExistsException,
-					CambriaApiException, IOException {
-		final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
-
-		// setup the event set
-		final CambriaEventSet events = new CambriaEventSet(mediaType, msg, chunked, defaultPartition);
-
-		// start processing, building a batch to push to the backend
-		final long startMs = System.currentTimeMillis();
-		long count = 0;
-
-		long maxEventBatch = 1024 * 16;
-		String batchlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, BATCH_LENGTH);
-		if (null != batchlen)
-			maxEventBatch = Long.parseLong(batchlen);
-
-		// long maxEventBatch =
-		// ctx.getConfigReader().getSettings().getLong(BATCH_LENGTH, 1024 * 16);
-		final LinkedList<Publisher.message> batch = new LinkedList<Publisher.message>();
-		final ArrayList<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>();
-
-		try {
-			// for each message...
-			Publisher.message m = null;
-			while ((m = events.next()) != null) {
-				// add the message to the batch
-				batch.add(m);
-				final KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, m.getKey(),
-						m.getMessage());
-				kms.add(data);
-				// check if the batch is full
-				final int sizeNow = batch.size();
-				if (sizeNow > maxEventBatch) {
-					ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
-					kms.clear();
-					batch.clear();
-					metricsSet.publishTick(sizeNow);
-					count += sizeNow;
-				}
-			}
-
-			// send the pending batch
-			final int sizeNow = batch.size();
-			if (sizeNow > 0) {
-				ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
-				kms.clear();
-				batch.clear();
-				metricsSet.publishTick(sizeNow);
-				count += sizeNow;
-			}
-
-			final long endMs = System.currentTimeMillis();
-			final long totalMs = endMs - startMs;
-
-			LOG.info("Published " + count + " msgs in " + totalMs + "ms for topic " + topic);
-
-			// build a responseP
-			final JSONObject response = new JSONObject();
-			response.put("count", count);
-			response.put("serverTimeMs", totalMs);
-			// DMaaPResponseBuilder.respondOk(ctx, response);
-
-		} catch (Exception excp) {
-
-			int status = HttpStatus.SC_NOT_FOUND;
-			String errorMsg = null;
-			if (excp instanceof CambriaApiException) {
-				status = ((CambriaApiException) excp).getStatus();
-				JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
-				JSONObject errObject = new JSONObject(jsonTokener);
-				errorMsg = (String) errObject.get("message");
-
-			}
-			ErrorResponse errRes = new ErrorResponse(status, DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
-					errorMessages.getPublishMsgError() + ":" + topic + "." + errorMessages.getPublishMsgCount() + count
-							+ "." + errorMsg,
-					null, Utils.getFormattedDate(new Date()), topic, null, ctx.getRequest().getRemoteHost(), null,
-					null);
-			LOG.info(errRes.toString());
-			throw new CambriaApiException(errRes);
-
-		}
-	}
-
-	private void pushEventsWithTransaction(DMaaPContext ctx, InputStream inputStream, final String topic,
-			final String partitionKey, final String requestTime, final boolean chunked, final String mediaType)
-					throws ConfigDbException, AccessDeniedException, TopicExistsException, IOException,
-					CambriaApiException {
-
-		final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
-
-		// setup the event set
-		final CambriaEventSet events = new CambriaEventSet(mediaType, inputStream, chunked, partitionKey);
-
-		// start processing, building a batch to push to the backend
-		final long startMs = System.currentTimeMillis();
-		long count = 0;
-		long maxEventBatch = 1024 * 16;
-		String evenlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, BATCH_LENGTH);
-		if (null != evenlen)
-			maxEventBatch = Long.parseLong(evenlen);
-
-		final LinkedList<Publisher.message> batch = new LinkedList<Publisher.message>();
-		final ArrayList<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>();
-
-		Publisher.message m = null;
-		int messageSequence = 1;
-		Long batchId = 1L;
-		final boolean transactionEnabled = true;
-		int publishBatchCount = 0;
-		SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss.SS");
-
-		// LOG.warn("Batch Start Id: " +
-		// Utils.getFromattedBatchSequenceId(batchId));
-		try {
-			// for each message...
-			batchId = DMaaPContext.getBatchID();
-
-			String responseTransactionId = null;
-
-			while ((m = events.next()) != null) {
-
-				// LOG.warn("Batch Start Id: " +
-				// Utils.getFromattedBatchSequenceId(batchId));
-
-				addTransactionDetailsToMessage(m, topic, ctx.getRequest(), requestTime, messageSequence, batchId,
-						transactionEnabled);
-				messageSequence++;
-
-				// add the message to the batch
-				batch.add(m);
-
-				responseTransactionId = m.getLogDetails().getTransactionId();
-
-				JSONObject jsonObject = new JSONObject();
-				jsonObject.put("message", m.getMessage());
-				jsonObject.put("transactionId", responseTransactionId);
-				final KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, m.getKey(),
-						jsonObject.toString());
-				kms.add(data);
-
-				// check if the batch is full
-				final int sizeNow = batch.size();
-				if (sizeNow >= maxEventBatch) {
-					String startTime = sdf.format(new Date());
-					LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id="
-							+ batchId + "]");
-					try {
-						ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
-						// transactionLogs(batch);
-						for (message msg : batch) {
-							LogDetails logDetails = msg.getLogDetails();
-							LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails());
-						}
-					} catch (Exception excp) {
-
-						int status = HttpStatus.SC_NOT_FOUND;
-						String errorMsg = null;
-						if (excp instanceof CambriaApiException) {
-							status = ((CambriaApiException) excp).getStatus();
-							JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
-							JSONObject errObject = new JSONObject(jsonTokener);
-							errorMsg = (String) errObject.get("message");
-						}
-						ErrorResponse errRes = new ErrorResponse(status,
-								DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
-								"Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "."
-										+ errorMessages.getPublishMsgCount() + count + "." + errorMsg,
-								null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
-								ctx.getRequest().getRemoteHost(), null, null);
-						LOG.info(errRes.toString());
-						throw new CambriaApiException(errRes);
-					}
-					kms.clear();
-					batch.clear();
-					metricsSet.publishTick(sizeNow);
-					publishBatchCount = sizeNow;
-					count += sizeNow;
-					// batchId++;
-					String endTime = sdf.format(new Date());
-					LOG.info("Batch End Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch End Id="
-							+ batchId + ",Batch Total=" + publishBatchCount + ",Batch Start Time=" + startTime
-							+ ",Batch End Time=" + endTime + "]");
-					batchId = DMaaPContext.getBatchID();
-				}
-			}
-
-			// send the pending batch
-			final int sizeNow = batch.size();
-			if (sizeNow > 0) {
-				String startTime = sdf.format(new Date());
-				LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id="
-						+ batchId + "]");
-				try {
-					ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
-					// transactionLogs(batch);
-					for (message msg : batch) {
-						LogDetails logDetails = msg.getLogDetails();
-						LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails());
-					}
-				} catch (Exception excp) {
-					int status = HttpStatus.SC_NOT_FOUND;
-					String errorMsg = null;
-					if (excp instanceof CambriaApiException) {
-						status = ((CambriaApiException) excp).getStatus();
-						JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
-						JSONObject errObject = new JSONObject(jsonTokener);
-						errorMsg = (String) errObject.get("message");
-					}
-
-					ErrorResponse errRes = new ErrorResponse(status,
-							DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
-							"Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "."
-									+ errorMessages.getPublishMsgCount() + count + "." + errorMsg,
-							null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
-							ctx.getRequest().getRemoteHost(), null, null);
-					LOG.info(errRes.toString());
-					throw new CambriaApiException(errRes);
-				}
-				kms.clear();
-				metricsSet.publishTick(sizeNow);
-				count += sizeNow;
-				// batchId++;
-				String endTime = sdf.format(new Date());
-				publishBatchCount = sizeNow;
-				LOG.info("Batch End Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch End Id=" + batchId
-						+ ",Batch Total=" + publishBatchCount + ",Batch Start Time=" + startTime + ",Batch End Time="
-						+ endTime + "]");
-			}
-
-			final long endMs = System.currentTimeMillis();
-			final long totalMs = endMs - startMs;
-
-			LOG.info("Published " + count + " msgs in " + totalMs + "ms for topic " + topic);
-
-			// build a response
-			final JSONObject response = new JSONObject();
-			response.put("count", count);
-			response.put("serverTimeMs", totalMs);
-
-		} catch (Exception excp) {
-			int status = HttpStatus.SC_NOT_FOUND;
-			String errorMsg = null;
-			if (excp instanceof CambriaApiException) {
-				status = ((CambriaApiException) excp).getStatus();
-				JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
-				JSONObject errObject = new JSONObject(jsonTokener);
-				errorMsg = (String) errObject.get("message");
-			}
-
-			ErrorResponse errRes = new ErrorResponse(status, DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
-					"Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "."
-							+ errorMessages.getPublishMsgCount() + count + "." + errorMsg,
-					null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
-					ctx.getRequest().getRemoteHost(), null, null);
-			LOG.info(errRes.toString());
-			throw new CambriaApiException(errRes);
-		}
-	}
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/MetricsServiceImpl.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/MetricsServiceImpl.java
deleted file mode 100644
index c7db60d..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/MetricsServiceImpl.java
+++ /dev/null
@@ -1,115 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service.impl;
-
-import java.io.IOException;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import org.json.JSONObject;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.MetricsSet;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service.MetricsService;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.DMaaPResponseBuilder;
-import org.springframework.stereotype.Component;
-
-import com.att.nsa.metrics.CdmMeasuredItem;
-
-/**
- * 
- * 
- * This will provide all the generated metrics details also it can provide the
- * get metrics details
- * 
- * 
- * @author author
- *
- *
- */
-@Component
-public class MetricsServiceImpl implements MetricsService {
-
-	//private static final Logger LOG = Logger.getLogger(MetricsService.class.toString());
-	private static final EELFLogger LOG = EELFManager.getInstance().getLogger(MetricsService.class);
-	/**
-	 * 
-	 * 
-	 * @param ctx
-	 * @throws IOException
-	 * 
-	 * 
-	 * get Metric details
-	 * 
-	 */
-	@Override
-	
-	public void get(DMaaPContext ctx) throws IOException {
-		LOG.info("Inside  : MetricsServiceImpl : get()");
-		final MetricsSet metrics = ctx.getConfigReader().getfMetrics();
-		DMaaPResponseBuilder.setNoCacheHeadings(ctx);
-		final JSONObject result = metrics.toJson();
-		DMaaPResponseBuilder.respondOk(ctx, result);
-		LOG.info("============ Metrics generated : " + result.toString() + "=================");
-
-	}
-
-
-	@Override
-	/**
-	 * 
-	 * get Metric by name
-	 * 
-	 * 
-	 * @param ctx
-	 * @param name
-	 * @throws IOException
-	 * @throws CambriaApiException
-	 * 
-	 * 
-	 */
-	public void getMetricByName(DMaaPContext ctx, String name) throws IOException, CambriaApiException {
-		LOG.info("Inside  : MetricsServiceImpl : getMetricByName()");
-		final MetricsSet metrics = ctx.getConfigReader().getfMetrics();
-
-		final CdmMeasuredItem item = metrics.getItem(name);
-		/**
-		 * check if item is null
-		 */
-		if (item == null) {
-			throw new CambriaApiException(404, "No metric named [" + name + "].");
-		}
-
-		final JSONObject entry = new JSONObject();
-		entry.put("summary", item.summarize());
-		entry.put("raw", item.getRawValueString());
-
-		DMaaPResponseBuilder.setNoCacheHeadings(ctx);
-
-		final JSONObject result = new JSONObject();
-		result.put(name, entry);
-
-		DMaaPResponseBuilder.respondOk(ctx, result);
-		LOG.info("============ Metrics generated : " + entry.toString() + "=================");
-	}
-
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/TopicServiceImpl.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/TopicServiceImpl.java
deleted file mode 100644
index a04e110..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/TopicServiceImpl.java
+++ /dev/null
@@ -1,649 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-/**
- * 
- */
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service.impl;
-
-import java.io.IOException;
-
-import org.apache.http.HttpStatus;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPKafkaMetaBroker;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.TopicBean;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants.CambriaConstants;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception.DMaaPAccessDeniedException;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception.DMaaPErrorMessages;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception.DMaaPResponseCode;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception.ErrorResponse;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metabroker.Broker;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metabroker.Topic;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metabroker.Broker.TopicExistsException;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security.DMaaPAAFAuthenticator;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security.DMaaPAAFAuthenticatorImpl;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security.DMaaPAuthenticatorImpl;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service.TopicService;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.DMaaPResponseBuilder;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.stereotype.Service;
-
-import com.att.ajsc.filemonitor.AJSCPropertiesMap;
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.security.NsaAcl;
-import com.att.nsa.security.NsaApiKey;
-import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
-
-/**
- * @author author
- *
- */
-@Service
-public class TopicServiceImpl implements TopicService {
-
-	//private static final Logger LOGGER = Logger.getLogger(TopicServiceImpl.class);
-	private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(TopicServiceImpl.class);
-	@Autowired
-	private DMaaPErrorMessages errorMessages;
-	
-	//@Value("${msgRtr.topicfactory.aaf}")
-	//private String mrFactory;
-	
-	
-	/**
-	 * @param dmaapContext
-	 * @throws JSONException
-	 * @throws ConfigDbException
-	 * @throws IOException
-	 * 
-	 */
-	@Override
-	public void getTopics(DMaaPContext dmaapContext) throws JSONException, ConfigDbException, IOException {
-
-		LOGGER.info("Fetching list of all the topics.");
-		JSONObject json = new JSONObject();
-
-		JSONArray topicsList = new JSONArray();
-
-		for (Topic topic : getMetaBroker(dmaapContext).getAllTopics()) {
-			topicsList.put(topic.getName());
-		}
-
-		json.put("topics", topicsList);
-
-		LOGGER.info("Returning list of all the topics.");
-		DMaaPResponseBuilder.respondOk(dmaapContext, json);
-
-	}
-
-	/**
-	 * @param dmaapContext
-	 * @throws JSONException
-	 * @throws ConfigDbException
-	 * @throws IOException
-	 * 
-	 */
-	public void getAllTopics(DMaaPContext dmaapContext) throws JSONException, ConfigDbException, IOException {
-
-		LOGGER.info("Fetching list of all the topics.");
-		JSONObject json = new JSONObject();
-
-		JSONArray topicsList = new JSONArray();
-
-		for (Topic topic : getMetaBroker(dmaapContext).getAllTopics()) {
-			JSONObject obj = new JSONObject();
-			obj.put("topicName", topic.getName());
-			//obj.put("description", topic.getDescription());
-			obj.put("owner", topic.getOwner());
-			obj.put("txenabled", topic.isTransactionEnabled());
-			topicsList.put(obj);
-		}
-
-		json.put("topics", topicsList);
-
-		LOGGER.info("Returning list of all the topics.");
-		DMaaPResponseBuilder.respondOk(dmaapContext, json);
-
-	}
-
-	
-	/**
-	 * @param dmaapContext
-	 * @param topicName
-	 * @throws ConfigDbException
-	 * @throws IOException
-	 * @throws TopicExistsException
-	 */
-	@Override
-	public void getTopic(DMaaPContext dmaapContext, String topicName)
-			throws ConfigDbException, IOException, TopicExistsException {
-
-		LOGGER.info("Fetching details of topic " + topicName);
-		Topic t = getMetaBroker(dmaapContext).getTopic(topicName);
-
-		if (null == t) {
-			LOGGER.error("Topic [" + topicName + "] does not exist.");
-			throw new TopicExistsException("Topic [" + topicName + "] does not exist.");
-		}
-
-		JSONObject o = new JSONObject();
-		o.put ( "name", t.getName () );
-		o.put ( "description", t.getDescription () );
-		
-		if (null!=t.getOwners ())
-		o.put ( "owner", t.getOwners ().iterator ().next () );
-		if(null!=t.getReaderAcl ())
-		o.put ( "readerAcl", aclToJson ( t.getReaderAcl () ) );
-		if(null!=t.getWriterAcl ())
-		o.put ( "writerAcl", aclToJson ( t.getWriterAcl () ) );
-	
-		LOGGER.info("Returning details of topic " + topicName);
-		DMaaPResponseBuilder.respondOk(dmaapContext, o);
-
-	}
-
-	
-	/**
-	 * @param dmaapContext
-	 * @param topicBean
-	 * @throws CambriaApiException
-	 * @throws AccessDeniedException
-	 * @throws IOException
-	 * @throws TopicExistsException
-	 * @throws JSONException
-	 * 
-	 * 
-	 * 
-	 */
-	@Override
-	public void createTopic(DMaaPContext dmaapContext, TopicBean topicBean)
-			throws CambriaApiException, DMaaPAccessDeniedException,IOException, TopicExistsException {
-
-		LOGGER.info("Creating topic " + topicBean.getTopicName());
-		
-		final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext);
-		String key = null;
-		String appName=dmaapContext.getRequest().getHeader("AppName");
-		String enfTopicName= com.att.ajsc.beans.PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop,"enforced.topic.name.AAF");
-	
-		if(user != null)
-		{
-			key = user.getKey();
-			
-			if(  enfTopicName != null && topicBean.getTopicName().indexOf(enfTopicName) >=0 ) {
-				
-				LOGGER.error("Failed to create topic"+topicBean.getTopicName()+", Authentication failed.");
-				
-				ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED, 
-						DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), 
-						errorMessages.getCreateTopicFail()+" "+errorMessages.getNotPermitted1()+" create "+errorMessages.getNotPermitted2());
-				LOGGER.info(errRes.toString());
-				throw new DMaaPAccessDeniedException(errRes);
-				
-			}
-		}
-				
-		//else if (user==null && (null==dmaapContext.getRequest().getHeader("Authorization") && null == dmaapContext.getRequest().getHeader("cookie")) ) {
-			else if (user == null &&  null==dmaapContext.getRequest().getHeader("Authorization")     && 
-					 (null == appName  &&  null == dmaapContext.getRequest().getHeader("cookie"))) {
-			LOGGER.error("Failed to create topic"+topicBean.getTopicName()+", Authentication failed.");
-			
-			ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED, 
-					DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), 
-					errorMessages.getCreateTopicFail()+" "+errorMessages.getNotPermitted1()+" create "+errorMessages.getNotPermitted2());
-			LOGGER.info(errRes.toString());
-			throw new DMaaPAccessDeniedException(errRes);
-		}
-		
-		if (user == null &&  (null!=dmaapContext.getRequest().getHeader("Authorization") ||
-					 null != dmaapContext.getRequest().getHeader("cookie"))) {
-			//if (user == null && (null!=dmaapContext.getRequest().getHeader("Authorization") || null != dmaapContext.getRequest().getHeader("cookie"))) {
-			 // ACL authentication is not provided so we will use the aaf authentication
-			LOGGER.info("Authorization the topic");
-		
-			String permission = "";
-			String nameSpace="";
-			if(topicBean.getTopicName().indexOf(".")>1)
-			 nameSpace = topicBean.getTopicName().substring(0,topicBean.getTopicName().lastIndexOf("."));
-		
-			 String mrFactoryVal=AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"msgRtr.topicfactory.aaf");
-		
-			//AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSettings_KafkaZookeeper);
-			
-			permission = mrFactoryVal+nameSpace+"|create";
-			DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
-			
-			if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
-			{
-				
-				LOGGER.error("Failed to create topic"+topicBean.getTopicName()+", Authentication failed.");
-				
-				ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED, 
-						DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), 
-						errorMessages.getCreateTopicFail()+" "+errorMessages.getNotPermitted1()+" create "+errorMessages.getNotPermitted2());
-				LOGGER.info(errRes.toString());
-				throw new DMaaPAccessDeniedException(errRes);
-				
-			}else{
-				// if user is null and aaf authentication is ok then key should be ""
-				//key = "";
-				/**
-				 * Added as part of AAF user it should return username
-				 */
-				
-				key = dmaapContext.getRequest().getUserPrincipal().getName().toString();
-				LOGGER.info("key ==================== "+key);
-				
-			}
-		}
-
-		try {
-			final String topicName = topicBean.getTopicName();
-			final String desc = topicBean.getTopicDescription();
-
-			final  int partitions = topicBean.getPartitionCount();
-		
-			final int replicas = topicBean.getReplicationCount();
-			boolean transactionEnabled = topicBean.isTransactionEnabled();
-			
-
-			final Broker metabroker = getMetaBroker(dmaapContext);
-			final Topic t = metabroker.createTopic(topicName, desc, key, partitions, replicas,
-					transactionEnabled);
-
-			LOGGER.info("Topic created successfully. Sending response");
-			DMaaPResponseBuilder.respondOk(dmaapContext, topicToJson(t));
-		} catch (JSONException excp) {
-			
-			LOGGER.error("Failed to create topic. Couldn't parse JSON data.", excp);
-			ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_BAD_REQUEST, 
-					DMaaPResponseCode.INCORRECT_JSON.getResponseCode(), 
-					errorMessages.getIncorrectJson());
-			LOGGER.info(errRes.toString());
-			throw new CambriaApiException(errRes);
-			
-		}
-	}
-
-	/**
-	 * @param dmaapContext
-	 * @param topicName
-	 * @throws ConfigDbException
-	 * @throws IOException
-	 * @throws TopicExistsException
-	 * @throws CambriaApiException
-	 * @throws AccessDeniedException
-	 */
-	@Override
-	public void deleteTopic(DMaaPContext dmaapContext, String topicName)
-			throws IOException, ConfigDbException, CambriaApiException, TopicExistsException, DMaaPAccessDeniedException, AccessDeniedException {
-
-		LOGGER.info("Deleting topic " + topicName);
-		final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext);
-
-		if (user == null && null!=dmaapContext.getRequest().getHeader("Authorization")) {
-			LOGGER.info("Authenticating the user, as ACL authentication is not provided");
-//			String permission = "com.att.dmaap.mr.topic"+"|"+topicName+"|"+"manage";
-			String permission = "";
-			String nameSpace = topicName.substring(0,topicName.lastIndexOf("."));
-			 String mrFactoryVal=AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"msgRtr.topicfactory.aaf");
-//			String tokens[] = topicName.split(".mr.topic.");
-			permission = mrFactoryVal+nameSpace+"|destroy";
-			DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
-			if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
-			{
-				LOGGER.error("Failed to delete topi"+topicName+". Authentication failed.");
-				ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, 
-						DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), 
-						errorMessages.getCreateTopicFail()+" "+errorMessages.getNotPermitted1()+" delete "+errorMessages.getNotPermitted2());
-				LOGGER.info(errRes.toString());
-				throw new DMaaPAccessDeniedException(errRes);
-			}
-			
-			
-		}
-
-		final Broker metabroker = getMetaBroker(dmaapContext);
-		final Topic topic = metabroker.getTopic(topicName);
-
-		if (topic == null) {
-			LOGGER.error("Failed to delete topic. Topic [" + topicName + "] does not exist.");
-			throw new TopicExistsException("Failed to delete topic. Topic [" + topicName + "] does not exist.");
-		}
-
-		metabroker.deleteTopic(topicName);
-
-		LOGGER.info("Topic [" + topicName + "] deleted successfully. Sending response.");
-		DMaaPResponseBuilder.respondOkWithHtml(dmaapContext, "Topic [" + topicName + "] deleted successfully");
-
-	}
-
-	/**
-	 * 
-	 * @param dmaapContext
-	 * @return
-	 */
-	private DMaaPKafkaMetaBroker getMetaBroker(DMaaPContext dmaapContext) {
-		return (DMaaPKafkaMetaBroker) dmaapContext.getConfigReader().getfMetaBroker();
-	}
-
-	/**
-	 * @param dmaapContext
-	 * @param topicName
-	 * @throws ConfigDbException
-	 * @throws IOException
-	 * @throws TopicExistsException
-	 * 
-	 */
-	@Override
-	public void getPublishersByTopicName(DMaaPContext dmaapContext, String topicName)
-			throws ConfigDbException, IOException, TopicExistsException {
-		LOGGER.info("Retrieving list of all the publishers for topic " + topicName);
-		Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
-
-		if (topic == null) {
-			LOGGER.error("Failed to retrieve publishers list for topic. Topic [" + topicName + "] does not exist.");
-			throw new TopicExistsException(
-					"Failed to retrieve publishers list for topic. Topic [" + topicName + "] does not exist.");
-		}
-		
-		
-
-		final NsaAcl acl = topic.getWriterAcl();
-
-		LOGGER.info("Returning list of all the publishers for topic " + topicName + ". Sending response.");
-		DMaaPResponseBuilder.respondOk(dmaapContext, aclToJson(acl));
-
-	}
-
-	/**
-	 * 
-	 * @param acl
-	 * @return
-	 */
-	private static JSONObject aclToJson(NsaAcl acl) {
-		final JSONObject o = new JSONObject();
-		if (acl == null) {
-			o.put("enabled", false);
-			o.put("users", new JSONArray());
-		} else {
-			o.put("enabled", acl.isActive());
-
-			final JSONArray a = new JSONArray();
-			for (String user : acl.getUsers()) {
-				a.put(user);
-			}
-			o.put("users", a);
-		}
-		return o;
-	}
-
-	/**
-	 * @param dmaapContext
-	 * @param topicName
-	 */
-	@Override
-	public void getConsumersByTopicName(DMaaPContext dmaapContext, String topicName)
-			throws IOException, ConfigDbException, TopicExistsException {
-		LOGGER.info("Retrieving list of all the consumers for topic " + topicName);
-		Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
-
-		if (topic == null) {
-			LOGGER.error("Failed to retrieve consumers list for topic. Topic [" + topicName + "] does not exist.");
-			throw new TopicExistsException(
-					"Failed to retrieve consumers list for topic. Topic [" + topicName + "] does not exist.");
-		}
-
-		final NsaAcl acl = topic.getReaderAcl();
-
-		LOGGER.info("Returning list of all the consumers for topic " + topicName + ". Sending response.");
-		DMaaPResponseBuilder.respondOk(dmaapContext, aclToJson(acl));
-
-	}
-
-	/**
-	 * 
-	 * @param t
-	 * @return
-	 */
-	private static JSONObject topicToJson(Topic t) {
-		final JSONObject o = new JSONObject();
-
-		o.put("name", t.getName());
-		o.put("description", t.getDescription());
-		o.put("owner", t.getOwner());
-		o.put("readerAcl", aclToJson(t.getReaderAcl()));
-		o.put("writerAcl", aclToJson(t.getWriterAcl()));
-
-		return o;
-	}
-
-	/**
-	 * @param dmaapContext
-	 * @param topicName
-	 * @param producerId
-	 * @throws ConfigDbException
-	 * @throws IOException
-	 * @throws TopicExistsException
-	 * @throws AccessDeniedException
-	 * @throws  
-	 * 
-	 */
-	@Override
-	public void permitPublisherForTopic(DMaaPContext dmaapContext, String topicName, String producerId)
-			throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,CambriaApiException {
-
-		LOGGER.info("Granting write access to producer [" + producerId + "] for topic " + topicName);
-		final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext);
-		
-//		if (user == null) {
-//			
-//			LOGGER.info("Authenticating the user, as ACL authentication is not provided");
-////			String permission = "com.att.dmaap.mr.topic"+"|"+topicName+"|"+"manage";
-//			
-//			DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
-//			String permission = aaf.aafPermissionString(topicName, "manage");
-//			if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
-//			{
-//				LOGGER.error("Failed to permit write access to producer [" + producerId + "] for topic " + topicName
-//									+ ". Authentication failed.");
-//				ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, 
-//						DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), 
-//						errorMessages.getNotPermitted1()+" <Grant publish permissions> "+errorMessages.getNotPermitted2()+ topicName);
-//				LOGGER.info(errRes);
-//				throw new DMaaPAccessDeniedException(errRes);
-//			}
-//		}
-
-		Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
-
-		if (null == topic) {
-			LOGGER.error("Failed to permit write access to producer [" + producerId + "] for topic. Topic [" + topicName
-					+ "] does not exist.");
-			throw new TopicExistsException("Failed to permit write access to producer [" + producerId
-					+ "] for topic. Topic [" + topicName + "] does not exist.");
-		}
-
-		topic.permitWritesFromUser(producerId, user);
-
-		LOGGER.info("Write access has been granted to producer [" + producerId + "] for topic [" + topicName
-				+ "]. Sending response.");
-		DMaaPResponseBuilder.respondOkWithHtml(dmaapContext, "Write access has been granted to publisher.");
-
-	}
-
-	/**
-	 * @param dmaapContext
-	 * @param topicName
-	 * @param producerId
-	 * @throws ConfigDbException
-	 * @throws IOException
-	 * @throws TopicExistsException
-	 * @throws AccessDeniedException
-	 * @throws DMaaPAccessDeniedException 
-	 * 
-	 */
-	@Override
-	public void denyPublisherForTopic(DMaaPContext dmaapContext, String topicName, String producerId)
-			throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException, DMaaPAccessDeniedException {
-
-		LOGGER.info("Revoking write access to producer [" + producerId + "] for topic " + topicName);
-		final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext);
-//		if (user == null) {
-//			
-////			String permission = "com.att.dmaap.mr.topic"+"|"+topicName+"|"+"manage";
-//			DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
-//			String permission = aaf.aafPermissionString(topicName, "manage");
-//			if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
-//			{
-//				LOGGER.error("Failed to revoke write access to producer [" + producerId + "] for topic " + topicName
-//						+ ". Authentication failed.");
-//				ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, 
-//						DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), 
-//						errorMessages.getNotPermitted1()+" <Revoke publish permissions> "+errorMessages.getNotPermitted2()+ topicName);
-//				LOGGER.info(errRes);
-//				throw new DMaaPAccessDeniedException(errRes);
-//				
-//			}
-//		}
-
-		Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
-
-		if (null == topic) {
-			LOGGER.error("Failed to revoke write access to producer [" + producerId + "] for topic. Topic [" + topicName
-					+ "] does not exist.");
-			throw new TopicExistsException("Failed to revoke write access to producer [" + producerId
-					+ "] for topic. Topic [" + topicName + "] does not exist.");
-		}
-
-		topic.denyWritesFromUser(producerId, user);
-
-		LOGGER.info("Write access has been revoked to producer [" + producerId + "] for topic [" + topicName
-				+ "]. Sending response.");
-		DMaaPResponseBuilder.respondOkWithHtml(dmaapContext, "Write access has been revoked for publisher.");
-
-	}
-
-	/**
-	 * @param dmaapContext
-	 * @param topicName
-	 * @param consumerId
-	 * @throws DMaaPAccessDeniedException 
-	 */
-	@Override
-	public void permitConsumerForTopic(DMaaPContext dmaapContext, String topicName, String consumerId)
-			throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException, DMaaPAccessDeniedException {
-
-		LOGGER.info("Granting read access to consumer [" + consumerId + "] for topic " + topicName);
-		final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext);
-//		if (user == null) {
-//			
-////			String permission = "com.att.dmaap.mr.topic"+"|"+topicName+"|"+"manage";
-//			DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
-//			String permission = aaf.aafPermissionString(topicName, "manage");
-//			if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
-//			{
-//				LOGGER.error("Failed to permit read access to consumer [" + consumerId + "] for topic " + topicName
-//						+ ". Authentication failed.");
-//				ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, 
-//						DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), 
-//						errorMessages.getNotPermitted1()+" <Grant consume permissions> "+errorMessages.getNotPermitted2()+ topicName);
-//				LOGGER.info(errRes);
-//				throw new DMaaPAccessDeniedException(errRes);
-//			}
-//		}
-
-		Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
-
-		if (null == topic) {
-			LOGGER.error("Failed to permit read access to consumer [" + consumerId + "] for topic. Topic [" + topicName
-					+ "] does not exist.");
-			throw new TopicExistsException("Failed to permit read access to consumer [" + consumerId
-					+ "] for topic. Topic [" + topicName + "] does not exist.");
-		}
-
-		topic.permitReadsByUser(consumerId, user);
-
-		LOGGER.info("Read access has been granted to consumer [" + consumerId + "] for topic [" + topicName
-				+ "]. Sending response.");
-		DMaaPResponseBuilder.respondOkWithHtml(dmaapContext,
-				"Read access has been granted for consumer [" + consumerId + "] for topic [" + topicName + "].");
-	}
-
-	/**
-	 * @param dmaapContext
-	 * @param topicName
-	 * @param consumerId
-	 * @throws DMaaPAccessDeniedException 
-	 */
-	@Override
-	public void denyConsumerForTopic(DMaaPContext dmaapContext, String topicName, String consumerId)
-			throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException, DMaaPAccessDeniedException {
-
-		LOGGER.info("Revoking read access to consumer [" + consumerId + "] for topic " + topicName);
-		final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext);
-//		if (user == null) {
-////			String permission = "com.att.dmaap.mr.topic"+"|"+topicName+"|"+"manage";
-//			DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
-//			String permission = aaf.aafPermissionString(topicName, "manage");
-//			if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
-//			{
-//				LOGGER.error("Failed to revoke read access to consumer [" + consumerId + "] for topic " + topicName
-//						+ ". Authentication failed.");
-//				ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, 
-//						DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), 
-//						errorMessages.getNotPermitted1()+" <Grant consume permissions> "+errorMessages.getNotPermitted2()+ topicName);
-//				LOGGER.info(errRes);
-//				throw new DMaaPAccessDeniedException(errRes);
-//			}
-//			
-//			
-//		}
-
-		Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
-
-		if (null == topic) {
-			LOGGER.error("Failed to revoke read access to consumer [" + consumerId + "] for topic. Topic [" + topicName
-					+ "] does not exist.");
-			throw new TopicExistsException("Failed to permit read access to consumer [" + consumerId
-					+ "] for topic. Topic [" + topicName + "] does not exist.");
-		}
-
-		topic.denyReadsByUser(consumerId, user);
-
-		LOGGER.info("Read access has been revoked to consumer [" + consumerId + "] for topic [" + topicName
-				+ "]. Sending response.");
-		DMaaPResponseBuilder.respondOkWithHtml(dmaapContext,
-				"Read access has been revoked for consumer [" + consumerId + "] for topic [" + topicName + "].");
-
-	}
-
-
-	
-	
-	
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/TransactionServiceImpl.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/TransactionServiceImpl.java
deleted file mode 100644
index 2299b65..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/TransactionServiceImpl.java
+++ /dev/null
@@ -1,100 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service.impl;
-
-import java.io.IOException;
-
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service.TransactionService;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.transaction.TransactionObj;
-import org.springframework.stereotype.Service;
-
-import com.att.aft.dme2.internal.jettison.json.JSONException;
-import com.att.nsa.configs.ConfigDbException;
-
-/**
- * Once the transaction rest gateway will be using that time it will provide all
- * the transaction details like fetching all the transactional objects or get
- * any particular transaction object details
- * 
- * @author author
- *
- */
-@Service
-public class TransactionServiceImpl implements TransactionService {
-
-	@Override
-	public void checkTransaction(TransactionObj trnObj) {
-		/* Need to implement the method */
-	}
-
-	@Override
-	public void getAllTransactionObjs(DMaaPContext dmaapContext)
-			throws ConfigDbException, IOException {
-
-		/*
-		 * ConfigurationReader configReader = dmaapContext.getConfigReader();
-		 * 
-		 * LOG.info("configReader : "+configReader.toString());
-		 * 
-		 * final JSONObject result = new JSONObject (); final JSONArray
-		 * transactionIds = new JSONArray (); result.put ( "transactionIds",
-		 * transactionIds );
-		 * 
-		 * DMaaPTransactionObjDB<DMaaPTransactionObj> transDb =
-		 * configReader.getfTranDb();
-		 * 
-		 * for (String transactionId : transDb.loadAllTransactionObjs()) {
-		 * transactionIds.put (transactionId); } LOG.info(
-		 * "========== TransactionServiceImpl: getAllTransactionObjs: Transaction objects are : "
-		 * + transactionIds.toString()+"===========");
-		 * DMaaPResponseBuilder.respondOk(dmaapContext, result);
-		 */
-	}
-
-	@Override
-	public void getTransactionObj(DMaaPContext dmaapContext,
-			String transactionId) throws ConfigDbException, JSONException,
-			IOException {
-
-		/*
-		 * if (null != transactionId) {
-		 * 
-		 * ConfigurationReader configReader = dmaapContext.getConfigReader();
-		 * 
-		 * DMaaPTransactionObj trnObj;
-		 * 
-		 * trnObj = configReader.getfTranDb().loadTransactionObj(transactionId);
-		 * 
-		 * 
-		 * if (null != trnObj) { trnObj.serialize(); JSONObject result =
-		 * trnObj.asJsonObject(); DMaaPResponseBuilder.respondOk(dmaapContext,
-		 * result);
-		 * LOG.info("========== TransactionServiceImpl: getTransactionObj : "+
-		 * result.toString()+"==========="); return; }
-		 * 
-		 * } LOG.info(
-		 * "========== TransactionServiceImpl: getTransactionObj: Error : Transaction object does not exist. "
-		 * +"===========");
-		 */
-	}
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/UIServiceImpl.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/UIServiceImpl.java
deleted file mode 100644
index 7582d78..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/service/impl/UIServiceImpl.java
+++ /dev/null
@@ -1,206 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service.impl;
-
-import java.io.IOException;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import org.json.JSONArray;
-import org.json.JSONObject;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPKafkaMetaBroker;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metabroker.Topic;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.service.UIService;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.DMaaPResponseBuilder;
-import org.springframework.stereotype.Service;
-
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.security.db.NsaApiDb;
-import com.att.nsa.security.db.simple.NsaSimpleApiKey;
-
-import kafka.common.TopicExistsException;
-
-/**
- * @author author
- *
- */
-@Service
-public class UIServiceImpl implements UIService {
-
-	//private static final Logger LOGGER = Logger.getLogger(UIServiceImpl.class);
-	private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(UIServiceImpl.class);
-	/**
-	 * Returning template of hello page
-	 * @param dmaapContext
-	 * @throws IOException
-	 */
-	@Override
-	public void hello(DMaaPContext dmaapContext) throws IOException {
-		LOGGER.info("Returning template of hello page.");
-		DMaaPResponseBuilder.respondOkWithHtml(dmaapContext, "templates/hello.html");
-	}
-
-	/**
-	 * Fetching list of all api keys and returning in a templated form for display.
-	 * @param dmaapContext
-	 * @throws ConfigDbException
-	 * @throws IOException
-	 */
-	@Override
-	public void getApiKeysTable(DMaaPContext dmaapContext) throws ConfigDbException, IOException {
-		// TODO - We need to work on the templates and how data will be set in
-		// the template
-		LOGGER.info("Fetching list of all api keys and returning in a templated form for display.");
-		Map<String, NsaSimpleApiKey> keyMap = getApiKeyDb(dmaapContext).loadAllKeyRecords();
-
-		LinkedList<JSONObject> keyList = new LinkedList<JSONObject>();
-
-		JSONObject jsonList = new JSONObject();
-
-		for (Entry<String, NsaSimpleApiKey> e : keyMap.entrySet()) {
-			final NsaSimpleApiKey key = e.getValue();
-			final JSONObject jsonObject = new JSONObject();
-			jsonObject.put("key", key.getKey());
-			jsonObject.put("email", key.getContactEmail());
-			jsonObject.put("description", key.getDescription());
-			keyList.add(jsonObject);
-		}
-
-		jsonList.put("apiKeys", keyList);
-
-		LOGGER.info("Returning list of all the api keys in JSON format for the template.");
-		// "templates/apiKeyList.html"
-		DMaaPResponseBuilder.respondOk(dmaapContext, jsonList);
-
-	}
-
-	/**
-	 * @param dmaapContext
-	 * @param apiKey
-	 * @throws Exception
-	 */
-	@Override
-	public void getApiKey(DMaaPContext dmaapContext, String apiKey) throws Exception {
-		// TODO - We need to work on the templates and how data will be set in
-		// the template
-		LOGGER.info("Fetching detials of apikey: " + apiKey);
-		final NsaSimpleApiKey key = getApiKeyDb(dmaapContext).loadApiKey(apiKey);
-
-		if (null != key) {
-			LOGGER.info("Details of apikey [" + apiKey + "] found. Returning response");
-			DMaaPResponseBuilder.respondOk(dmaapContext, key.asJsonObject());
-		} else {
-			LOGGER.info("Details of apikey [" + apiKey + "] not found. Returning response");
-			throw new Exception("Key [" + apiKey + "] not found.");
-		}
-
-	}
-
-	/**
-	 * Fetching list of all the topics
-	 * @param dmaapContext
-	 * @throws ConfigDbException
-	 * @throws IOException
-	 */
-	@Override
-	public void getTopicsTable(DMaaPContext dmaapContext) throws ConfigDbException, IOException {
-		// TODO - We need to work on the templates and how data will be set in
-		// the template
-		LOGGER.info("Fetching list of all the topics and returning in a templated form for display");
-		List<Topic> topicsList = getMetaBroker(dmaapContext).getAllTopics();
-
-		JSONObject jsonObject = new JSONObject();
-
-		JSONArray topicsArray = new JSONArray();
-
-		List<Topic> topicList = getMetaBroker(dmaapContext).getAllTopics();
-
-		for (Topic topic : topicList) {
-			JSONObject obj = new JSONObject();
-			obj.put("topicName", topic.getName());
-			obj.put("description", topic.getDescription());
-			obj.put("owner", topic.getOwner());
-			topicsArray.put(obj);
-		}
-
-		jsonObject.put("topics", topicsList);
-
-		LOGGER.info("Returning the list of topics in templated format for display.");
-		DMaaPResponseBuilder.respondOk(dmaapContext, jsonObject);
-
-	}
-
-	/**
-	 * @param dmaapContext
-	 * @param topicName
-	 * @throws ConfigDbException
-	 * @throws IOException
-	 * @throws TopicExistsException
-	 */
-	@Override
-	public void getTopic(DMaaPContext dmaapContext, String topicName)
-			throws ConfigDbException, IOException, TopicExistsException {
-		// TODO - We need to work on the templates and how data will be set in
-		// the template
-		LOGGER.info("Fetching detials of apikey: " + topicName);
-		Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
-
-		if (null == topic) {
-			LOGGER.error("Topic [" + topicName + "] does not exist.");
-			throw new TopicExistsException("Topic [" + topicName + "] does not exist.");
-		}
-
-		JSONObject json = new JSONObject();
-		json.put("topicName", topic.getName());
-		json.put("description", topic.getDescription());
-		json.put("owner", topic.getOwner());
-
-		LOGGER.info("Returning details of topic [" + topicName + "]. Sending response.");
-		DMaaPResponseBuilder.respondOk(dmaapContext, json);
-
-	}
-
-	/**
-	 * 
-	 * @param dmaapContext
-	 * @return
-	 */
-	private NsaApiDb<NsaSimpleApiKey> getApiKeyDb(DMaaPContext dmaapContext) {
-		return dmaapContext.getConfigReader().getfApiKeyDb();
-
-	}
-
-	/**
-	 * 
-	 * @param dmaapContext
-	 * @return
-	 */
-	private DMaaPKafkaMetaBroker getMetaBroker(DMaaPContext dmaapContext) {
-		return (DMaaPKafkaMetaBroker) dmaapContext.getConfigReader().getfMetaBroker();
-	}
-
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/DMaaPTransactionFactory.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/DMaaPTransactionFactory.java
deleted file mode 100644
index a721885..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/DMaaPTransactionFactory.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.transaction;
-/**
- * 
- * @author author
- *
- * @param <K>
- */
-public interface DMaaPTransactionFactory<K extends DMaaPTransactionObj> {
-
-	/**
-	 * 
-	 * @param data
-	 * @return
-	 */
-	K makeNewTransactionObj ( String data );
-	/**
-	 * 
-	 * @param id
-	 * @return
-	 */
-	K makeNewTransactionId ( String id );
-
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/DMaaPTransactionObj.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/DMaaPTransactionObj.java
deleted file mode 100644
index 1fa299d..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/DMaaPTransactionObj.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.transaction;
-
-import org.json.JSONObject;
-/**
- * This is an interface for DMaaP transactional logging object class.
- * @author author
- *
- */
-public interface DMaaPTransactionObj {
-	/**
-	 * This will get the transaction id
-	 * @return id transactionId
-	 */
-	String getId();
-	/**
-	 * This will set the transaction id
-	 * @param id transactionId
-	 */
-	void setId(String id);
-	/**
-	 * This will sync the transaction object mapping
-	 * @return String or null
-	 */
-	String serialize();
-	/**
-	 * get the total message count once the publisher published
-	 * @return long totalMessageCount
-	 */
-	long getTotalMessageCount();
-	/**
-	 * set the total message count once the publisher published
-	 * @param totalMessageCount
-	 */
-	void setTotalMessageCount(long totalMessageCount);
-	/**
-	 * get the total Success Message Count once the publisher published
-	 * @return getSuccessMessageCount
-	 */
-	long getSuccessMessageCount();
-	/**
-	 * set the total Success Message Count once the publisher published
-	 * @param successMessageCount
-	 */
-	void setSuccessMessageCount(long successMessageCount);
-	/**
-	 * get the failure Message Count once the publisher published
-	 * @return failureMessageCount
-	 */
-	long getFailureMessageCount();
-	/**
-	 * set the failure Message Count once the publisher published
-	 * @param failureMessageCount
-	 */
-	void setFailureMessageCount(long failureMessageCount);
-
-	/**
-	 * wrapping the data into json object
-	 * @return JSONObject
-	 */
-	JSONObject asJsonObject();
-
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/DMaaPTransactionObjDB.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/DMaaPTransactionObjDB.java
deleted file mode 100644
index a391842..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/DMaaPTransactionObjDB.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.transaction;
-
-import java.util.Set;
-
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.security.NsaSecurityManagerException;
-
-
-/**
- * Persistent storage for Transaction Object and secrets built over an abstract config db. Instances
- * of this DB must support concurrent access.
- * @author author
- *
- * @param <K> DMaaPTransactionObj
- */
-public interface DMaaPTransactionObjDB <K extends DMaaPTransactionObj> {
-
-
-	/**
-	 * Create a new Transaction Object. If one exists, 
-	 * @param id
-	 * @return the new Transaction record
-	 * @throws ConfigDbException 
-	 */
-	K createTransactionObj (String id) throws KeyExistsException, ConfigDbException;
-
-
-	/**
-	 * An exception to signal a Transaction object already exists 
-	 * @author author
-	 *
-	 */
-	public static class KeyExistsException extends NsaSecurityManagerException
-	{
-		/**
-		 * If the key exists
-		 * @param key
-		 */
-		public KeyExistsException ( String key ) { super ( "Transaction Object " + key + " exists" ); }
-		private static final long serialVersionUID = 1L;
-	}
-
-	/**
-	 * Save a Transaction Object record. This must be used after changing auxiliary data on the record.
-	 * Note that the transaction must exist (via createTransactionObj). 
-	 * @param transactionObj
-	 * @throws ConfigDbException 
-	 */
-	void saveTransactionObj ( K transactionObj ) throws ConfigDbException;
-	
-	/**
-	 * Load an Transaction Object record based on the Transaction ID value
-	 * @param transactionId
-	 * @return a transaction record or null
-	 * @throws ConfigDbException 
-	 */
-	K loadTransactionObj ( String transactionId ) throws ConfigDbException;
-	
-	/**
-	 * Load all Transaction objects.
-	 * @return
-	 * @throws ConfigDbException
-	 */
-	Set<String> loadAllTransactionObjs () throws ConfigDbException;
-}
\ No newline at end of file
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/TransactionObj.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/TransactionObj.java
deleted file mode 100644
index e79bf01..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/TransactionObj.java
+++ /dev/null
@@ -1,202 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.transaction;
-
-import org.json.JSONObject;
-
-/**
- * This is the class which will have the transaction enabled logging object
- * details
- * 
- * @author author
- *
- */
-public class TransactionObj implements DMaaPTransactionObj {
-
-	private String id;
-	private String createTime;
-	private long totalMessageCount;
-	private long successMessageCount;
-	private long failureMessageCount;
-	private JSONObject fData = new JSONObject();
-	private TrnRequest trnRequest;
-	private static final String kAuxData = "transaction";
-
-	/**
-	 * Initializing constructor  
-	 * put the json data for transaction enabled logging
-	 * 
-	 * @param data
-	 */
-	public TransactionObj(JSONObject data) {
-		fData = data;
-
-		// check for required fields (these throw if not present)
-		getId();
-		getTotalMessageCount();
-		getSuccessMessageCount();
-		getFailureMessageCount();
-
-		// make sure we've got an aux data object
-		final JSONObject aux = fData.optJSONObject(kAuxData);
-		if (aux == null) {
-			fData.put(kAuxData, new JSONObject());
-		}
-	}
-
-	/**
-	 * this constructor will have the details of transaction id,
-	 * totalMessageCount successMessageCount, failureMessageCount to get the
-	 * transaction object
-	 * 
-	 * @param id
-	 * @param totalMessageCount
-	 * @param successMessageCount
-	 * @param failureMessageCount
-	 */
-	public TransactionObj(String id, long totalMessageCount, long successMessageCount, long failureMessageCount) {
-		this.id = id;
-		this.totalMessageCount = totalMessageCount;
-		this.successMessageCount = successMessageCount;
-		this.failureMessageCount = failureMessageCount;
-
-	}
-
-	/**
-	 * The constructor passing only transaction id
-	 * 
-	 * @param id
-	 */
-	public TransactionObj(String id) {
-		this.id = id;
-	}
-
-	/**
-	 * Wrapping the data into json object
-	 * 
-	 * @return JSONObject
-	 */
-	public JSONObject asJsonObject() {
-		final JSONObject full = new JSONObject(fData, JSONObject.getNames(fData));
-		return full;
-	}
-
-	/**
-	 * To get the transaction id
-	 */
-	public String getId() {
-		return id;
-	}
-
-	/**
-	 * To set the transaction id
-	 */
-	public void setId(String id) {
-		this.id = id;
-	}
-
-	/**
-	 * 
-	 * @return
-	 */
-	public String getCreateTime() {
-		return createTime;
-	}
-
-	/**
-	 * 
-	 * @param createTime
-	 */
-	public void setCreateTime(String createTime) {
-		this.createTime = createTime;
-	}
-
-	@Override
-	public String serialize() {
-		fData.put("transactionId", id);
-		fData.put("totalMessageCount", totalMessageCount);
-		fData.put("successMessageCount", successMessageCount);
-		fData.put("failureMessageCount", failureMessageCount);
-		return fData.toString();
-	}
-
-	public long getTotalMessageCount() {
-		return totalMessageCount;
-	}
-
-	public void setTotalMessageCount(long totalMessageCount) {
-		this.totalMessageCount = totalMessageCount;
-	}
-
-	public long getSuccessMessageCount() {
-		return successMessageCount;
-	}
-
-	public void setSuccessMessageCount(long successMessageCount) {
-		this.successMessageCount = successMessageCount;
-	}
-
-	public long getFailureMessageCount() {
-		return failureMessageCount;
-	}
-
-	/**
-	 * @param failureMessageCount
-	 */
-	public void setFailureMessageCount(long failureMessageCount) {
-		this.failureMessageCount = failureMessageCount;
-	}
-
-	/**
-	 * 
-	 * @return JSOnObject fData
-	 */
-	public JSONObject getfData() {
-		return fData;
-	}
-
-	/**
-	 * set the json object into data
-	 * 
-	 * @param fData
-	 */
-	public void setfData(JSONObject fData) {
-		this.fData = fData;
-	}
-
-	/**
-	 * 
-	 * @return
-	 */
-	public TrnRequest getTrnRequest() {
-		return trnRequest;
-	}
-
-	/**
-	 * 
-	 * @param trnRequest
-	 */
-	public void setTrnRequest(TrnRequest trnRequest) {
-		this.trnRequest = trnRequest;
-	}
-
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/TrnRequest.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/TrnRequest.java
deleted file mode 100644
index bb8fe37..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/TrnRequest.java
+++ /dev/null
@@ -1,183 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.transaction;
-
-/**
- * Created for transaction enable logging details, this is nothing but a bean
- * class.
- * 
- * @author author
- *
- */
-public class TrnRequest {
-
-	private String id;
-	private String requestCreate;
-	private String requestHost;
-	private String serverHost;
-	private String messageProceed;
-	private String totalMessage;
-	private String clientType;
-	private String url;
-
-	/**
-	 * 
-	 * 
-	 * 
-	 * @return id
-	 * 
-	 */
-	public String getId() {
-		return id;
-	}
-
-	/**
-	 * 
-	 * 
-	 * @param id
-	 */
-	public void setId(String id) {
-		this.id = id;
-	}
-
-	/**
-	 * 
-	 * 
-	 * @return requestCreate
-	 */
-	public String getRequestCreate() {
-		return requestCreate;
-	}
-
-	/**
-	 * 
-	 * @param requestCreate
-	 */
-	public void setRequestCreate(String requestCreate) {
-		this.requestCreate = requestCreate;
-	}
-
-	/**
-	 * 
-	 * @return
-	 */
-	public String getRequestHost() {
-		return requestHost;
-	}
-
-	/**
-	 * 
-	 * @param requestHost
-	 */
-	public void setRequestHost(String requestHost) {
-		this.requestHost = requestHost;
-	}
-
-	/**
-	 * 
-	 * 
-	 * 
-	 * @return
-	 */
-	public String getServerHost() {
-		return serverHost;
-	}
-
-	/**
-	 * 
-	 * @param serverHost
-	 */
-	public void setServerHost(String serverHost) {
-		this.serverHost = serverHost;
-	}
-
-	/**
-	 * 
-	 * 
-	 * 
-	 * @return
-	 */
-	public String getMessageProceed() {
-		return messageProceed;
-	}
-
-	/**
-	 * 
-	 * @param messageProceed
-	 */
-	public void setMessageProceed(String messageProceed) {
-		this.messageProceed = messageProceed;
-	}
-
-	/**
-	 * 
-	 * @return
-	 */
-	public String getTotalMessage() {
-		return totalMessage;
-	}
-
-	/**
-	 * 
-	 * @param totalMessage
-	 * 
-	 * 
-	 */
-	public void setTotalMessage(String totalMessage) {
-		this.totalMessage = totalMessage;
-	}
-
-	/**
-	 * 
-	 * @return
-	 */
-	public String getClientType() {
-		return clientType;
-	}
-
-	/**
-	 * 
-	 * @param clientType
-	 * 
-	 */
-	public void setClientType(String clientType) {
-		this.clientType = clientType;
-	}
-
-	/**
-	 * 
-	 * @return
-	 */
-	public String getUrl() {
-		return url;
-	}
-
-	/**
-	 * 
-	 * @param url
-	 * 
-	 */
-	public void setUrl(String url) {
-		this.url = url;
-	}
-
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/impl/DMaaPSimpleTransactionFactory.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/impl/DMaaPSimpleTransactionFactory.java
deleted file mode 100644
index bd18794..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/transaction/impl/DMaaPSimpleTransactionFactory.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.transaction.impl;
-
-import org.json.JSONObject;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.transaction.DMaaPTransactionFactory;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.transaction.DMaaPTransactionObj;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.transaction.TransactionObj;
-
-/**
- * A factory for the simple Transaction implementation
- * 
- * 
- * @author author
- *
- */
-public class DMaaPSimpleTransactionFactory implements DMaaPTransactionFactory<DMaaPTransactionObj> {
-	/**
-	 * 
-	 * @param data
-	 * @return DMaaPTransactionObj
-	 */
-	@Override
-	public DMaaPTransactionObj makeNewTransactionObj(String data) {
-		JSONObject jsonObject = new JSONObject(data);
-		return new TransactionObj(jsonObject.getString("transactionId"), jsonObject.getLong("totalMessageCount"),
-				jsonObject.getLong("successMessageCount"), jsonObject.getLong("failureMessageCount"));
-	}
-
-	/**
-	 * 
-	 * @param id
-	 * @return TransactionObj
-	 * 
-	 * 
-	 */
-	@Override
-	public DMaaPTransactionObj makeNewTransactionId(String id) {
-		return new TransactionObj(id);
-	}
-
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/ConfigurationReader.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/ConfigurationReader.java
deleted file mode 100644
index 34951b8..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/ConfigurationReader.java
+++ /dev/null
@@ -1,497 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils;
-
-import javax.servlet.ServletException;
-
-import org.I0Itec.zkclient.ZkClient;
-import org.apache.curator.framework.CuratorFramework;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.ConsumerFactory;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.MetricsSet;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.Publisher;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.kafka.KafkaConsumerCache.KafkaConsumerCacheException;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.memory.MemoryConsumerFactory;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.memory.MemoryMetaBroker;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.memory.MemoryQueue;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.memory.MemoryQueuePublisher;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPCambriaLimiter;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPZkConfigDb;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants.CambriaConstants;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.metabroker.Broker;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security.DMaaPAuthenticator;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security.impl.DMaaPOriginalUebAuthenticator;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.Emailer;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.beans.factory.annotation.Qualifier;
-import org.springframework.stereotype.Component;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.configs.confimpl.MemConfigDb;
-import com.att.nsa.drumlin.till.nv.rrNvReadable;
-import com.att.nsa.drumlin.till.nv.rrNvReadable.invalidSettingValue;
-import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
-import com.att.nsa.limits.Blacklist;
-import com.att.nsa.security.NsaAuthenticatorService;
-//import com.att.nsa.security.authenticators.OriginalUebAuthenticator;
-import com.att.nsa.security.db.BaseNsaApiDbImpl;
-import com.att.nsa.security.db.NsaApiDb;
-import com.att.nsa.security.db.NsaApiDb.KeyExistsException;
-import com.att.nsa.security.db.simple.NsaSimpleApiKey;
-import com.att.nsa.security.db.simple.NsaSimpleApiKeyFactory;
-
-/**
- * Class is created for all the configuration for rest and service layer
- * integration.
- *
- */
-@Component
-public class ConfigurationReader {
-
-//	private rrNvReadable settings;
-	private Broker fMetaBroker;
-	private ConsumerFactory fConsumerFactory;
-	private Publisher fPublisher;
-	private MetricsSet fMetrics;
-	@Autowired
-	private DMaaPCambriaLimiter fRateLimiter;
-	private NsaApiDb<NsaSimpleApiKey> fApiKeyDb;
-	/* private DMaaPTransactionObjDB<DMaaPTransactionObj> fTranDb; */
-	private DMaaPAuthenticator<NsaSimpleApiKey> fSecurityManager;
-	private NsaAuthenticatorService<NsaSimpleApiKey> nsaSecurityManager;
-	private static CuratorFramework curator;
-	private ZkClient zk;
-	private DMaaPZkConfigDb fConfigDb;
-	private MemoryQueue q;
-	private MemoryMetaBroker mmb;
-	private Blacklist fIpBlackList;
-	private Emailer fEmailer;
-
-	private static final EELFLogger log = EELFManager.getInstance().getLogger(ConfigurationReader.class);
-	//private static final Logger log = Logger.getLogger(ConfigurationReader.class.toString());
-
-	/**
-	 * constructor to initialize all the values
-	 * 
-	 * @param settings
-	 * @param fMetrics
-	 * @param zk
-	 * @param fConfigDb
-	 * @param fPublisher
-	 * @param curator
-	 * @param fConsumerFactory
-	 * @param fMetaBroker
-	 * @param q
-	 * @param mmb
-	 * @param fApiKeyDb
-	 * @param fSecurityManager
-	 * @throws missingReqdSetting
-	 * @throws invalidSettingValue
-	 * @throws ServletException
-	 * @throws KafkaConsumerCacheException
-	 * @throws ConfigDbException 
-	 */
-	@Autowired
-	public ConfigurationReader(@Qualifier("propertyReader") rrNvReadable settings,
-			@Qualifier("dMaaPMetricsSet") MetricsSet fMetrics, @Qualifier("dMaaPZkClient") ZkClient zk,
-			@Qualifier("dMaaPZkConfigDb") DMaaPZkConfigDb fConfigDb, @Qualifier("kafkaPublisher") Publisher fPublisher,
-			@Qualifier("curator") CuratorFramework curator,
-			@Qualifier("dMaaPKafkaConsumerFactory") ConsumerFactory fConsumerFactory,
-			@Qualifier("dMaaPKafkaMetaBroker") Broker fMetaBroker, @Qualifier("q") MemoryQueue q,
-			@Qualifier("mmb") MemoryMetaBroker mmb, @Qualifier("dMaaPNsaApiDb") NsaApiDb<NsaSimpleApiKey> fApiKeyDb,
-			/*
-			 * @Qualifier("dMaaPTranDb")
-			 * DMaaPTransactionObjDB<DMaaPTransactionObj> fTranDb,
-			 */
-			@Qualifier("dMaaPAuthenticatorImpl") DMaaPAuthenticator<NsaSimpleApiKey> fSecurityManager
-			)
-					throws missingReqdSetting, invalidSettingValue, ServletException, KafkaConsumerCacheException, ConfigDbException {
-		//this.settings = settings;
-		this.fMetrics = fMetrics;
-		this.zk = zk;
-		this.fConfigDb = fConfigDb;
-		this.fPublisher = fPublisher;
-		ConfigurationReader.curator = curator;
-		this.fConsumerFactory = fConsumerFactory;
-		this.fMetaBroker = fMetaBroker;
-		this.q = q;
-		this.mmb = mmb;
-		this.fApiKeyDb = fApiKeyDb;
-		/* this.fTranDb = fTranDb; */
-		this.fSecurityManager = fSecurityManager;
-		
-		long allowedtimeSkewMs=600000L;
-		String strallowedTimeSkewM= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"authentication.allowedTimeSkewMs");
-		if(null!=strallowedTimeSkewM)allowedtimeSkewMs= Long.parseLong(strallowedTimeSkewM);
-				
-	//	boolean requireSecureChannel = true;
-		//String strrequireSecureChannel= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"aauthentication.requireSecureChannel");
-		//if(strrequireSecureChannel!=null)requireSecureChannel=Boolean.parseBoolean(strrequireSecureChannel);
-		//this.nsaSecurityManager = new NsaAuthenticatorService<NsaSimpleApiKey>(this.fApiKeyDb, settings.getLong("authentication.allowedTimeSkewMs", 600000L), settings.getBoolean("authentication.requireSecureChannel", true));
-		//this.nsaSecurityManager = new NsaAuthenticatorService<NsaSimpleApiKey>(this.fApiKeyDb, allowedtimeSkewMs, requireSecureChannel);
-		
-		servletSetup();
-	}
-
-	protected void servletSetup()
-			throws rrNvReadable.missingReqdSetting, rrNvReadable.invalidSettingValue, ServletException, ConfigDbException {
-		try {
-
-			fMetrics.toJson();
-			fMetrics.setupCambriaSender();
-
-			// add the admin authenticator
-				//		final String adminSecret = settings.getString ( CambriaConstants.kSetting_AdminSecret, null );
-						final String adminSecret = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kSetting_AdminSecret);
-						//adminSecret = "fe3cCompound";
-						if ( adminSecret != null && adminSecret.length () > 0 )
-						{
-							try
-							{
-								
-								final NsaApiDb<NsaSimpleApiKey> adminDb = new BaseNsaApiDbImpl<NsaSimpleApiKey> ( new MemConfigDb(), new NsaSimpleApiKeyFactory() );
-								adminDb.createApiKey ( "admin", adminSecret );
-								//nsaSecurityManager.addAuthenticator ( new OriginalUebAuthenticator<NsaSimpleApiKey> ( adminDb, 10*60*1000 ) );
-						        fSecurityManager.addAuthenticator ( new DMaaPOriginalUebAuthenticator<NsaSimpleApiKey> ( adminDb, 10*60*1000 ) );
-							}
-							catch ( KeyExistsException e )
-							{
-								throw new RuntimeException ( "This key can't exist in a fresh in-memory DB!", e );
-							}
-						}
-						
-			// setup a backend
-			//final String type = settings.getString(CambriaConstants.kBrokerType, CambriaConstants.kBrokerType_Kafka);
-			 String type = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kBrokerType);
-			if (type==null) type = CambriaConstants.kBrokerType_Kafka;
-			if (CambriaConstants.kBrokerType_Kafka.equalsIgnoreCase(type)) {
-				log.info("Broker Type is:" + CambriaConstants.kBrokerType_Kafka);
-
-			} else if (CambriaConstants.kBrokerType_Memory.equalsIgnoreCase(type)) {
-				log.info("Broker Type is:" + CambriaConstants.kBrokerType_Memory);
-
-				fPublisher = new MemoryQueuePublisher(q, mmb);
-				fMetaBroker = mmb;
-				fConsumerFactory = new MemoryConsumerFactory(q);
-			} else {
-				throw new IllegalArgumentException(
-						"Unrecognized type for " + CambriaConstants.kBrokerType + ": " + type + ".");
-			}
-			
-			fIpBlackList = new Blacklist ( getfConfigDb(), getfConfigDb().parse ( "/ipBlacklist" ) );
-			this.fEmailer = new Emailer();
-			
-			log.info("Broker Type is:" + type);
-
-		} catch (SecurityException e) {
-			throw new ServletException(e);
-		}
-	}
-
-	/**
-	 * method returns metaBroker
-	 * 
-	 * @return
-	 */
-	public Broker getfMetaBroker() {
-		return fMetaBroker;
-	}
-
-	/**
-	 * method to set the metaBroker
-	 * 
-	 * @param fMetaBroker
-	 */
-	public void setfMetaBroker(Broker fMetaBroker) {
-		this.fMetaBroker = fMetaBroker;
-	}
-
-	/**
-	 * method to get ConsumerFactory Object
-	 * 
-	 * @return
-	 */
-	public ConsumerFactory getfConsumerFactory() {
-		return fConsumerFactory;
-	}
-
-	/**
-	 * method to set the consumerfactory object
-	 * 
-	 * @param fConsumerFactory
-	 */
-	public void setfConsumerFactory(ConsumerFactory fConsumerFactory) {
-		this.fConsumerFactory = fConsumerFactory;
-	}
-
-	/**
-	 * method to get Publisher object
-	 * 
-	 * @return
-	 */
-	public Publisher getfPublisher() {
-		return fPublisher;
-	}
-
-	/**
-	 * method to set Publisher object
-	 * 
-	 * @param fPublisher
-	 */
-	public void setfPublisher(Publisher fPublisher) {
-		this.fPublisher = fPublisher;
-	}
-
-	/**
-	 * method to get MetricsSet Object
-	 * 
-	 * @return
-	 */
-	public MetricsSet getfMetrics() {
-		return fMetrics;
-	}
-
-	/**
-	 * method to set MetricsSet Object
-	 * 
-	 * @param fMetrics
-	 */
-	public void setfMetrics(MetricsSet fMetrics) {
-		this.fMetrics = fMetrics;
-	}
-
-	/**
-	 * method to get DMaaPCambriaLimiter object
-	 * 
-	 * @return
-	 */
-	public DMaaPCambriaLimiter getfRateLimiter() {
-		return fRateLimiter;
-	}
-
-	/**
-	 * method to set DMaaPCambriaLimiter object
-	 * 
-	 * @param fRateLimiter
-	 */
-	public void setfRateLimiter(DMaaPCambriaLimiter fRateLimiter) {
-		this.fRateLimiter = fRateLimiter;
-	}
-
-	/**
-	 * Method to get DMaaPAuthenticator object
-	 * 
-	 * @return
-	 */
-	public DMaaPAuthenticator<NsaSimpleApiKey> getfSecurityManager() {
-		return fSecurityManager;
-	}
-
-	/**
-	 * method to set DMaaPAuthenticator object
-	 * 
-	 * @param fSecurityManager
-	 */
-	public void setfSecurityManager(DMaaPAuthenticator<NsaSimpleApiKey> fSecurityManager) {
-		this.fSecurityManager = fSecurityManager;
-	}
-
-	/**
-	 * method to get rrNvReadable object
-	 * 
-	 * @return
-	 */
-	/*public rrNvReadable getSettings() {
-		return settings;
-	}*/
-
-	/**
-	 * method to set rrNvReadable object
-	 * 
-	 * @param settings
-	 */
-	/*public void setSettings(rrNvReadable settings) {
-		this.settings = settings;
-	}*/
-
-	/**
-	 * method to get CuratorFramework object
-	 * 
-	 * @return
-	 */
-	public static CuratorFramework getCurator() {
-		return curator;
-	}
-
-	/**
-	 * method to set CuratorFramework object
-	 * 
-	 * @param curator
-	 */
-	public static void setCurator(CuratorFramework curator) {
-		ConfigurationReader.curator = curator;
-	}
-
-	/**
-	 * method to get ZkClient object
-	 * 
-	 * @return
-	 */
-	public ZkClient getZk() {
-		return zk;
-	}
-
-	/**
-	 * method to set ZkClient object
-	 * 
-	 * @param zk
-	 */
-	public void setZk(ZkClient zk) {
-		this.zk = zk;
-	}
-
-	/**
-	 * method to get DMaaPZkConfigDb object
-	 * 
-	 * @return
-	 */
-	public DMaaPZkConfigDb getfConfigDb() {
-		return fConfigDb;
-	}
-
-	/**
-	 * method to set DMaaPZkConfigDb object
-	 * 
-	 * @param fConfigDb
-	 */
-	public void setfConfigDb(DMaaPZkConfigDb fConfigDb) {
-		this.fConfigDb = fConfigDb;
-	}
-
-	/**
-	 * method to get MemoryQueue object
-	 * 
-	 * @return
-	 */
-	public MemoryQueue getQ() {
-		return q;
-	}
-
-	/**
-	 * method to set MemoryQueue object
-	 * 
-	 * @param q
-	 */
-	public void setQ(MemoryQueue q) {
-		this.q = q;
-	}
-
-	/**
-	 * method to get MemoryMetaBroker object
-	 * 
-	 * @return
-	 */
-	public MemoryMetaBroker getMmb() {
-		return mmb;
-	}
-
-	/**
-	 * method to set MemoryMetaBroker object
-	 * 
-	 * @param mmb
-	 */
-	public void setMmb(MemoryMetaBroker mmb) {
-		this.mmb = mmb;
-	}
-
-	/**
-	 * method to get NsaApiDb object
-	 * 
-	 * @return
-	 */
-	public NsaApiDb<NsaSimpleApiKey> getfApiKeyDb() {
-		return fApiKeyDb;
-	}
-
-	/**
-	 * method to set NsaApiDb object
-	 * 
-	 * @param fApiKeyDb
-	 */
-	public void setfApiKeyDb(NsaApiDb<NsaSimpleApiKey> fApiKeyDb) {
-		this.fApiKeyDb = fApiKeyDb;
-	}
-
-	/*
-	 * public DMaaPTransactionObjDB<DMaaPTransactionObj> getfTranDb() { return
-	 * fTranDb; }
-	 * 
-	 * public void setfTranDb(DMaaPTransactionObjDB<DMaaPTransactionObj>
-	 * fTranDb) { this.fTranDb = fTranDb; }
-	 */
-	/**
-	 * method to get the zookeeper connection String
-	 * 
-	 * @param settings
-	 * @return
-	 */
-	public static String getMainZookeeperConnectionString() {
-		//return settings.getString(CambriaConstants.kSetting_ZkConfigDbServers,			CambriaConstants.kDefault_ZkConfigDbServers);
-		
-		 String typeVal = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kSetting_ZkConfigDbServers);
-		 if (typeVal==null) typeVal=CambriaConstants.kDefault_ZkConfigDbServers;
-		 
-		 return typeVal;
-	}
-
-	public static String getMainZookeeperConnectionSRoot(){
-		String strVal=com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kSetting_ZkConfigDbRoot);
-	
-		if (null==strVal)
-			strVal=CambriaConstants.kDefault_ZkConfigDbRoot;
-	
-		return strVal;
-	}
-	
-	public Blacklist getfIpBlackList() {
-		return fIpBlackList;
-	}
-
-	public void setfIpBlackList(Blacklist fIpBlackList) {
-		this.fIpBlackList = fIpBlackList;
-	}
-
-	public NsaAuthenticatorService<NsaSimpleApiKey> getNsaSecurityManager() {
-		return nsaSecurityManager;
-	}
-
-	public void setNsaSecurityManager(NsaAuthenticatorService<NsaSimpleApiKey> nsaSecurityManager) {
-		this.nsaSecurityManager = nsaSecurityManager;
-	}
-	
-	public Emailer getSystemEmailer()
-	  {
-	    return this.fEmailer;
-	  }
-
-
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/DMaaPCuratorFactory.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/DMaaPCuratorFactory.java
deleted file mode 100644
index 8950ec8..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/DMaaPCuratorFactory.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils;
-
-import org.apache.curator.framework.CuratorFramework;
-import org.apache.curator.framework.CuratorFrameworkFactory;
-import org.apache.curator.retry.ExponentialBackoffRetry;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants.CambriaConstants;
-
-import com.att.nsa.drumlin.till.nv.rrNvReadable;
-
-/**
- * 
- * 
- * @author author
- *
- *
- */
-public class DMaaPCuratorFactory {
-	/**
-	 * 
-	 * method provide CuratorFramework object
-	 * 
-	 * @param settings
-	 * @return
-	 * 
-	 * 
-	 * 
-	 */
-	public static CuratorFramework getCurator(rrNvReadable settings) {
-		String Setting_ZkConfigDbServers =com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, CambriaConstants.kSetting_ZkConfigDbServers);
-		 
-		if(null==Setting_ZkConfigDbServers)
-			 Setting_ZkConfigDbServers =CambriaConstants.kDefault_ZkConfigDbServers; 
-		
-		String strSetting_ZkSessionTimeoutMs = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, CambriaConstants.kSetting_ZkSessionTimeoutMs);
-		if (strSetting_ZkSessionTimeoutMs==null) strSetting_ZkSessionTimeoutMs = CambriaConstants.kDefault_ZkSessionTimeoutMs+"";
-		int Setting_ZkSessionTimeoutMs = Integer.parseInt(strSetting_ZkSessionTimeoutMs);
-		
-		String str_ZkConnectionTimeoutMs = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, CambriaConstants.kSetting_ZkSessionTimeoutMs);
-		if (str_ZkConnectionTimeoutMs==null) str_ZkConnectionTimeoutMs = CambriaConstants.kDefault_ZkConnectionTimeoutMs+"";
-		int setting_ZkConnectionTimeoutMs = Integer.parseInt(str_ZkConnectionTimeoutMs);
-		
-		
-		CuratorFramework curator = CuratorFrameworkFactory.newClient(
-				Setting_ZkConfigDbServers,Setting_ZkSessionTimeoutMs,setting_ZkConnectionTimeoutMs
-				,new ExponentialBackoffRetry(1000, 5));
-		return curator;
-	}
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/DMaaPResponseBuilder.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/DMaaPResponseBuilder.java
deleted file mode 100644
index a24dd6b..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/DMaaPResponseBuilder.java
+++ /dev/null
@@ -1,358 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils;
-
-import java.io.ByteArrayInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.io.PrintWriter;
-import java.io.Writer;
-
-import javax.servlet.http.HttpServletResponse;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import org.json.JSONException;
-import org.json.JSONObject;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
-
-/**
- * class is used to create response object which is given to user
- * 
- * @author author
- *
- */
-
-public class DMaaPResponseBuilder {
-
-	//private static Logger log = Logger.getLogger(DMaaPResponseBuilder.class);
-	private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPResponseBuilder.class);
-	protected static final int kBufferLength = 4096;
-
-	public static void setNoCacheHeadings(DMaaPContext ctx) {
-		HttpServletResponse response = ctx.getResponse();
-		response.addHeader("Cache-Control", "no-store, no-cache, must-revalidate");
-		response.addHeader("Pragma", "no-cache");
-		response.addHeader("Expires", "0");
-	}
-
-	/**
-	 * static method is used to create response object associated with
-	 * JSONObject
-	 * 
-	 * @param ctx
-	 * @param result
-	 * @throws JSONException
-	 * @throws IOException
-	 */
-	public static void respondOk(DMaaPContext ctx, JSONObject result) throws JSONException, IOException {
-
-		respondOkWithStream(ctx, "application/json", new ByteArrayInputStream(result.toString(4).getBytes()));
-
-	}
-
-	/**
-	 * method used to set staus to 204
-	 * 
-	 * @param ctx
-	 */
-	public static void respondOkNoContent(DMaaPContext ctx) {
-		try {
-			ctx.getResponse().setStatus(204);
-		} catch (Exception excp) {
-			log.error(excp.getMessage(), excp);
-		}
-	}
-
-	/**
-	 * static method is used to create response object associated with html
-	 * 
-	 * @param ctx
-	 * @param html
-	 */
-	public static void respondOkWithHtml(DMaaPContext ctx, String html) {
-		try {
-			respondOkWithStream(ctx, "text/html", new ByteArrayInputStream(html.toString().getBytes()));
-		} catch (Exception excp) {
-			log.error(excp.getMessage(), excp);
-		}
-	}
-
-	/**
-	 * method used to create response object associated with InputStream
-	 * 
-	 * @param ctx
-	 * @param mediaType
-	 * @param is
-	 * @throws IOException
-	 */
-	public static void respondOkWithStream(DMaaPContext ctx, String mediaType, final InputStream is)
-			throws IOException {
-		/*
-		 * creates response object associated with streamwriter
-		 */
-		respondOkWithStream(ctx, mediaType, new StreamWriter() {
-
-			public void write(OutputStream os) throws IOException {
-				copyStream(is, os);
-			}
-		});
-
-	}
-
-	/**
-	 * 
-	 * @param ctx
-	 * @param mediaType
-	 * @param writer
-	 * @throws IOException
-	 */
-	public static void respondOkWithStream(DMaaPContext ctx, String mediaType, StreamWriter writer) throws IOException {
-
-		ctx.getResponse().setStatus(200);
-		OutputStream os = getStreamForBinaryResponse(ctx, mediaType);
-		writer.write(os);
-
-	}
-
-	/**
-	 * static method to create error objects
-	 * 
-	 * @param ctx
-	 * @param errCode
-	 * @param msg
-	 */
-	public static void respondWithError(DMaaPContext ctx, int errCode, String msg) {
-		try {
-			ctx.getResponse().sendError(errCode, msg);
-		} catch (IOException excp) {
-			log.error(excp.getMessage(), excp);
-		}
-	}
-
-	/**
-	 * method to create error objects
-	 * 
-	 * @param ctx
-	 * @param errCode
-	 * @param body
-	 */
-	public static void respondWithError(DMaaPContext ctx, int errCode, JSONObject body) {
-		try {
-			sendErrorAndBody(ctx, errCode, body.toString(4), "application/json");
-		} catch (Exception excp) {
-			log.error(excp.getMessage(), excp);
-		}
-	}
-
-	/**
-	 * static method creates error object in JSON
-	 * 
-	 * @param ctx
-	 * @param errCode
-	 * @param msg
-	 */
-	public static void respondWithErrorInJson(DMaaPContext ctx, int errCode, String msg) {
-		try {
-			JSONObject o = new JSONObject();
-			o.put("status", errCode);
-			o.put("message", msg);
-			respondWithError(ctx, errCode, o);
-
-		} catch (Exception excp) {
-			log.error(excp.getMessage(), excp);
-		}
-	}
-
-	/**
-	 * static method used to copy the stream with the help of another method
-	 * copystream
-	 * 
-	 * @param in
-	 * @param out
-	 * @throws IOException
-	 */
-	public static void copyStream(InputStream in, OutputStream out) throws IOException {
-		copyStream(in, out, 4096);
-	}
-
-	/**
-	 * static method to copy the streams
-	 * 
-	 * @param in
-	 * @param out
-	 * @param bufferSize
-	 * @throws IOException
-	 */
-	public static void copyStream(InputStream in, OutputStream out, int bufferSize) throws IOException {
-		byte[] buffer = new byte[bufferSize];
-		int len;
-		while ((len = in.read(buffer)) != -1) {
-			out.write(buffer, 0, len);
-		}
-		out.close();
-	}
-
-	/**
-	 * interface used to define write method for outputStream
-	 */
-	public static abstract interface StreamWriter {
-		/**
-		 * abstract method used to write the response
-		 * 
-		 * @param paramOutputStream
-		 * @throws IOException
-		 */
-		public abstract void write(OutputStream paramOutputStream) throws IOException;
-	}
-
-	/**
-	 * static method returns stream for binary response
-	 * 
-	 * @param ctx
-	 * @return
-	 * @throws IOException
-	 */
-	public static OutputStream getStreamForBinaryResponse(DMaaPContext ctx) throws IOException {
-		return getStreamForBinaryResponse(ctx, "application/octet-stream");
-	}
-
-	/**
-	 * static method returns stream for binaryResponses
-	 * 
-	 * @param ctx
-	 * @param contentType
-	 * @return
-	 * @throws IOException
-	 */
-	public static OutputStream getStreamForBinaryResponse(DMaaPContext ctx, String contentType) throws IOException {
-		ctx.getResponse().setContentType(contentType);
-
-		boolean fResponseEntityAllowed = (!(ctx.getRequest().getMethod().equalsIgnoreCase("HEAD")));
-
-		OutputStream os = null;
-		if (fResponseEntityAllowed) {
-			os = ctx.getResponse().getOutputStream();
-		} else {
-			os = new NullStream();
-		}
-		return os;
-	}
-
-	/**
-	 * 
-	 * @author author
-	 *
-	 */
-	private static class NullStream extends OutputStream {
-		/**
-		 * @param b
-		 *            integer
-		 */
-		public void write(int b) {
-		}
-	}
-
-	private static class NullWriter extends Writer {
-		/**
-		 * write method
-		 * @param cbuf
-		 * @param off
-		 * @param len
-		 */
-		public void write(char[] cbuf, int off, int len) {
-		}
-
-		/**
-		 * flush method
-		 */
-		public void flush() {
-		}
-
-		/**
-		 * close method
-		 */
-		public void close() {
-		}
-	}
-
-	/**
-	 * sttaic method fetch stream for text
-	 * 
-	 * @param ctx
-	 * @param err
-	 * @param content
-	 * @param mimeType
-	 */
-	public static void sendErrorAndBody(DMaaPContext ctx, int err, String content, String mimeType) {
-		try {
-			setStatus(ctx, err);
-			getStreamForTextResponse(ctx, mimeType).println(content);
-		} catch (IOException e) {
-			log.error(new StringBuilder().append("Error sending error response: ").append(e.getMessage()).toString(),
-					e);
-		}
-	}
-
-	/**
-	 * method to set the code
-	 * 
-	 * @param ctx
-	 * @param code
-	 */
-	public static void setStatus(DMaaPContext ctx, int code) {
-		ctx.getResponse().setStatus(code);
-	}
-
-	/**
-	 * static method returns stream for text response
-	 * 
-	 * @param ctx
-	 * @return
-	 * @throws IOException
-	 */
-	public static PrintWriter getStreamForTextResponse(DMaaPContext ctx) throws IOException {
-		return getStreamForTextResponse(ctx, "text/html");
-	}
-
-	/**
-	 * static method returns stream for text response
-	 * 
-	 * @param ctx
-	 * @param contentType
-	 * @return
-	 * @throws IOException
-	 */
-	public static PrintWriter getStreamForTextResponse(DMaaPContext ctx, String contentType) throws IOException {
-		ctx.getResponse().setContentType(contentType);
-
-		PrintWriter pw = null;
-		boolean fResponseEntityAllowed = (!(ctx.getRequest().getMethod().equalsIgnoreCase("HEAD")));
-
-		if (fResponseEntityAllowed) {
-			pw = ctx.getResponse().getWriter();
-		} else {
-			pw = new PrintWriter(new NullWriter());
-		}
-		return pw;
-	}
-}
\ No newline at end of file
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/Emailer.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/Emailer.java
deleted file mode 100644
index 3f87d59..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/Emailer.java
+++ /dev/null
@@ -1,215 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils;
-
-import java.io.IOException;
-import java.util.Properties;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-
-import javax.mail.BodyPart;
-import javax.mail.Message;
-import javax.mail.Multipart;
-import javax.mail.PasswordAuthentication;
-import javax.mail.Session;
-import javax.mail.Transport;
-import javax.mail.internet.InternetAddress;
-import javax.mail.internet.MimeBodyPart;
-import javax.mail.internet.MimeMessage;
-import javax.mail.internet.MimeMultipart;
-
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants.CambriaConstants;
-
-//import org.slf4j.Logger;
-//import org.slf4j.LoggerFactory;
-
-import com.att.ajsc.filemonitor.AJSCPropertiesMap;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import com.att.nsa.drumlin.till.nv.rrNvReadable;
-
-/**
- * Send an email from a message.
- * 
- * @author author
- */
-public class Emailer
-{
-	public static final String kField_To = "to";
-	public static final String kField_Subject = "subject";
-	public static final String kField_Message = "message";
-
-	public Emailer()
-	{
-		fExec = Executors.newCachedThreadPool ();
-	//	fSettings = settings;
-	}
-
-	public void send ( String to, String subj, String body ) throws IOException
-	{
-		final String[] addrs = to.split ( "," );
-
-		if ( to.length () > 0 )
-		{
-			final MailTask mt = new MailTask ( addrs, subj, body );
-			fExec.submit ( mt );
-		}
-		else
-		{
-			log.warn ( "At least one address is required." );
-		}
-	}
-
-	public void close ()
-	{
-		fExec.shutdown ();
-	}
-
-	private final ExecutorService fExec;
-	//private final rrNvReadable fSettings;
-
-	//private static final Logger log = LoggerFactory.getLogger ( Emailer.class );
-
-	private static final EELFLogger log = EELFManager.getInstance().getLogger(Emailer.class);
-	
-	public static final String kSetting_MailAuthUser = "mailLogin";
-	public static final String kSetting_MailAuthPwd = "mailPassword";
-	public static final String kSetting_MailFromEmail = "mailFromEmail";
-	public static final String kSetting_MailFromName = "mailFromName";
-	public static final String kSetting_SmtpServer = "mailSmtpServer";
-	public static final String kSetting_SmtpServerPort = "mailSmtpServerPort";
-	public static final String kSetting_SmtpServerSsl = "mailSmtpServerSsl";
-	public static final String kSetting_SmtpServerUseAuth = "mailSmtpServerUseAuth";
-
-	private class MailTask implements Runnable
-	{
-		public MailTask ( String[] to, String subject, String msgBody )
-		{
-			fToAddrs = to;
-			fSubject = subject;
-			fBody = msgBody;
-		}
-
-		private String getSetting ( String settingKey, String defval )
-		{
-			//return fSettings.getString ( settingKey, defval );
-			String strSet = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,settingKey);
-			if(strSet==null)strSet=defval;
-			return strSet;
-		}
-
-		// we need to get setting values from the evaluator but also the channel config
-		private void makeSetting ( Properties props, String propKey, String settingKey, String defval )
-		{
-			props.put ( propKey, getSetting ( settingKey, defval ) );
-		}
-
-		private void makeSetting ( Properties props, String propKey, String settingKey, int defval )
-		{
-			makeSetting ( props, propKey, settingKey, "" + defval );
-		}
-
-		private void makeSetting ( Properties props, String propKey, String settingKey, boolean defval )
-		{
-			makeSetting ( props, propKey, settingKey, "" + defval );
-		}
-
-		@Override
-		public void run ()
-		{
-			final StringBuffer tag = new StringBuffer ();
-			final StringBuffer addrList = new StringBuffer ();
-			tag.append ( "(" );
-			for ( String to : fToAddrs )
-			{
-				if ( addrList.length () > 0 )
-				{
-					addrList.append ( ", " );
-				}
-				addrList.append ( to );
-			}
-			tag.append ( addrList.toString () );
-			tag.append ( ") \"" );
-			tag.append ( fSubject );
-			tag.append ( "\"" );
-			
-			log.info ( "sending mail to " + tag );
-
-			try
-			{
-				final Properties prop = new Properties ();
-				makeSetting ( prop, "mail.smtp.port", kSetting_SmtpServerPort, 587 );
-				prop.put ( "mail.smtp.socketFactory.fallback", "false" );
-				prop.put ( "mail.smtp.quitwait", "false" );
-				makeSetting ( prop, "mail.smtp.host", kSetting_SmtpServer, "smtp.it.att.com" );
-				makeSetting ( prop, "mail.smtp.auth", kSetting_SmtpServerUseAuth, true );
-				makeSetting ( prop, "mail.smtp.starttls.enable", kSetting_SmtpServerSsl, true );
-
-				final String un = getSetting ( kSetting_MailAuthUser, "" );
-				final String pw = getSetting ( kSetting_MailAuthPwd, "" );
-				final Session session = Session.getInstance ( prop,
-					new javax.mail.Authenticator()
-					{
-						@Override
-						protected PasswordAuthentication getPasswordAuthentication()
-						{
-							return new PasswordAuthentication ( un, pw );
-						}
-					}
-				);
-				
-				final Message msg = new MimeMessage ( session );
-
-				final InternetAddress from = new InternetAddress (
-					getSetting ( kSetting_MailFromEmail, "team@sa2020.it.att.com" ),
-					getSetting ( kSetting_MailFromName, "The GFP/SA2020 Team" ) );
-				msg.setFrom ( from );
-				msg.setReplyTo ( new InternetAddress[] { from } );
-				msg.setSubject ( fSubject );
-
-				for ( String toAddr : fToAddrs )
-				{
-					final InternetAddress to = new InternetAddress ( toAddr );
-					msg.addRecipient ( Message.RecipientType.TO, to );
-				}
-
-				final Multipart multipart = new MimeMultipart ( "related" );
-				final BodyPart htmlPart = new MimeBodyPart ();
-				htmlPart.setContent ( fBody, "text/plain" );
-				multipart.addBodyPart ( htmlPart );
-				msg.setContent ( multipart );
-
-				Transport.send ( msg );
-
-				log.info ( "mailing " + tag + " off without error" );
-			}
-			catch ( Exception e )
-			{
-				log.warn ( "Exception caught for " + tag, e );
-			}
-		}
-
-		private final String[] fToAddrs;
-		private final String fSubject;
-		private final String fBody;
-	}
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/PropertyReader.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/PropertyReader.java
deleted file mode 100644
index a3b65e1..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/PropertyReader.java
+++ /dev/null
@@ -1,133 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils;
-
-import java.io.File;
-import java.net.MalformedURLException;
-import java.net.URL;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.constants.CambriaConstants;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import com.att.nsa.drumlin.till.nv.impl.nvPropertiesFile;
-import com.att.nsa.drumlin.till.nv.impl.nvReadableStack;
-import com.att.nsa.drumlin.till.nv.impl.nvReadableTable;
-
-/**
- * 
- * @author 
- *
- *
- */
-public class PropertyReader extends nvReadableStack {
-	/**
-	 * 
-	 * initializing logger
-	 * 
-	 */
-	//private static final Logger LOGGER = Logger.getLogger(PropertyReader.class);
-	private static final EELFLogger log = EELFManager.getInstance().getLogger(PropertyReader.class);
-//	private static final String MSGRTR_PROPERTIES_FILE = "msgRtrApi.properties";
-
-	/**
-	 * constructor initialization
-	 * 
-	 * @throws loadException
-	 * 
-	 */
-	public PropertyReader() throws loadException {
-	/*	Map<String, String> argMap = new HashMap<String, String>();
-		final String config = getSetting(argMap, CambriaConstants.kConfig, MSGRTR_PROPERTIES_FILE);
-		final URL settingStream = findStream(config, ConfigurationReader.class);
-		push(new nvPropertiesFile(settingStream));
-		push(new nvReadableTable(argMap));*/
-	}
-
-	/**
-	 * 
-	 * 
-	 * @param argMap
-	 * @param key
-	 * @param defaultValue
-	 * @return
-	 * 
-	 */
-	@SuppressWarnings("unused")
-	private static String getSetting(Map<String, String> argMap, final String key, final String defaultValue) {
-		String val = (String) argMap.get(key);
-		if (null == val) {
-			return defaultValue;
-		}
-		return val;
-	}
-
-	/**
-	 * 
-	 * @param resourceName
-	 * @param clazz
-	 * @return
-	 * @exception MalformedURLException
-	 * 
-	 */
-	/*public static URL findStream(final String resourceName, Class<?> clazz) {
-		try {
-			File file = new File(resourceName);
-
-			if (file.isAbsolute()) {
-				return file.toURI().toURL();
-			}
-
-			String filesRoot = System.getProperty("RRWT_FILES", null);
-
-			if (null != filesRoot) {
-
-				String fullPath = filesRoot + "/" + resourceName;
-
-				LOGGER.debug("Looking for [" + fullPath + "].");
-
-				file = new File(fullPath);
-				if (file.exists()) {
-					return file.toURI().toURL();
-				}
-			}
-
-			URL res = clazz.getClassLoader().getResource(resourceName);
-
-			if (null != res) {
-				return res;
-			}
-
-			res = ClassLoader.getSystemResource(resourceName);
-
-			if (null != res) {
-				return res;
-			}
-		} catch (MalformedURLException e) {
-			LOGGER.error("Unexpected failure to convert a local filename into a URL: " + e.getMessage(), e);
-		}
-		return null;
-	}
-*/
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/Utils.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/Utils.java
deleted file mode 100644
index 300cf86..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/cambria/utils/Utils.java
+++ /dev/null
@@ -1,145 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils;
-
-import java.text.DecimalFormat;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.Enumeration;
-import java.util.LinkedList;
-import java.util.List;
-
-import javax.servlet.http.HttpServletRequest;
-
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPContext;
-/**
- * This is an utility class for various operations for formatting
- * @author author
- *
- */
-public class Utils {
-
-	private static final String DATE_FORMAT = "dd-MM-yyyy::hh:mm:ss:SSS";
-	public static final String CAMBRIA_AUTH_HEADER = "X-CambriaAuth";
-	private static final String BATCH_ID_FORMAT = "000000";
-
-	private Utils() {
-		super();
-	}
-
-	/**
-	 * Formatting the date 
-	 * @param date
-	 * @return date or null
-	 */
-	public static String getFormattedDate(Date date) {
-		SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT);
-		if (null != date){
-			return sdf.format(date);
-		}
-		return null;
-	}
-	/**
-	 * to get the details of User Api Key
-	 * @param request
-	 * @return authkey or null
-	 */
-	public static String getUserApiKey(HttpServletRequest request) {
-		final String auth = request.getHeader(CAMBRIA_AUTH_HEADER);
-		if (null != auth) {
-			final String[] splittedAuthKey = auth.split(":");
-			return splittedAuthKey[0];
-		}else if (null!=request.getHeader("Authorization")){
-			/**
-			 * AAF implementation enhancement
-			 */
-			 String user= request.getUserPrincipal().getName().toString();
-			return user.substring(0, user.lastIndexOf("@"));
-		}
-		return null;
-	}
-	/**
-	 * to format the batch sequence id
-	 * @param batchId
-	 * @return batchId
-	 */
-	public static String getFromattedBatchSequenceId(Long batchId) {
-		DecimalFormat format = new DecimalFormat(BATCH_ID_FORMAT);
-		return format.format(batchId);
-	}
-
-	/**
-	 * to get the message length in bytes
-	 * @param message
-	 * @return bytes or 0
-	 */
-	public static long messageLengthInBytes(String message) {
-		if (null != message) {
-			return message.getBytes().length;
-		}
-		return 0;
-	}
-	/**
-	 * To get transaction id details
-	 * @param transactionId
-	 * @return transactionId or null
-	 */
-	public static String getResponseTransactionId(String transactionId) {
-		if (null != transactionId && !transactionId.isEmpty()) {
-			return transactionId.substring(0, transactionId.lastIndexOf("::"));
-		}
-		return null;
-	}
-
-	/**
-	 * get the thread sleep time
-	 * @param ratePerMinute
-	 * @return ratePerMinute or 0
-	 */
-	public static long getSleepMsForRate ( double ratePerMinute )
-	{
-		if ( ratePerMinute <= 0.0 ) return 0;
-		return Math.max ( 1000, Math.round ( 60 * 1000 / ratePerMinute ) );
-	}
-
-	  public static String getRemoteAddress(DMaaPContext ctx)
-	  {
-	    String reqAddr = ctx.getRequest().getRemoteAddr();
-	    String fwdHeader = getFirstHeader("X-Forwarded-For",ctx);
-	    return ((fwdHeader != null) ? fwdHeader : reqAddr);
-	  }
-	  public static String getFirstHeader(String h,DMaaPContext ctx)
-	  {
-	    List l = getHeader(h,ctx);
-	    return ((l.size() > 0) ? (String)l.iterator().next() : null);
-	  }
-	  public static List<String> getHeader(String h,DMaaPContext ctx)
-	  {
-	    LinkedList list = new LinkedList();
-	    Enumeration e = ctx.getRequest().getHeaders(h);
-	    while (e.hasMoreElements())
-	    {
-	      list.add(e.nextElement().toString());
-	    }
-	    return list;
-	  }
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/filter/ContentLengthFilter.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/filter/ContentLengthFilter.java
deleted file mode 100644
index 7f90e80..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/filter/ContentLengthFilter.java
+++ /dev/null
@@ -1,133 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.filter;
-
-import java.io.IOException;
-
-import javax.servlet.Filter;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletRequest;
-
-import org.apache.http.HttpStatus;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import org.json.JSONObject;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.CambriaApiException;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception.DMaaPErrorMessages;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception.DMaaPResponseCode;
-import org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.exception.ErrorResponse;
-import org.springframework.context.ApplicationContext;
-import org.springframework.web.context.support.WebApplicationContextUtils;
-
-/**
- * Servlet Filter implementation class ContentLengthFilter
- */
-public class ContentLengthFilter implements Filter {
-
-	private DefaultLength defaultLength;
-
-	private FilterConfig filterConfig = null;
-	DMaaPErrorMessages errorMessages = null;
-	//private Logger log = Logger.getLogger(ContentLengthFilter.class.toString());
-	private static final EELFLogger log = EELFManager.getInstance().getLogger(ContentLengthFilter.class);
-	/**
-	 * Default constructor.
-	 */
-
-	public ContentLengthFilter() {
-		// TODO Auto-generated constructor stub
-	}
-
-	/**
-	 * @see Filter#destroy()
-	 */
-	public void destroy() {
-		// TODO Auto-generated method stub
-	}
-
-	/**
-	 * @see Filter#doFilter(ServletRequest, ServletResponse, FilterChain)
-	 */
-	public void doFilter(ServletRequest req, ServletResponse res, FilterChain chain) throws IOException,
-			ServletException {
-		// TODO Auto-generated method stub
-		// place your code here
-		log.info("inside servlet do filter content length checking before pub/sub");
-		HttpServletRequest request = (HttpServletRequest) req;
-		JSONObject jsonObj = null;
-		int requestLength = 0;
-		try {
-			// retrieving content length from message header
-
-			if (null != request.getHeader("Content-Length")) {
-				requestLength = Integer.parseInt(request.getHeader("Content-Length"));
-			}
-			// retrieving encoding from message header
-			String transferEncoding = request.getHeader("Transfer-Encoding");
-			// checking for no encoding, chunked and requestLength greater then
-			// default length
-			if (null != transferEncoding && !(transferEncoding.contains("chunked"))
-					&& (requestLength > Integer.parseInt(defaultLength.getDefaultLength()))) {
-				jsonObj = new JSONObject().append("defaultlength", defaultLength)
-						.append("requestlength", requestLength);
-				log.error("message length is greater than default");
-				throw new CambriaApiException(jsonObj);
-			} else if (null == transferEncoding && (requestLength > Integer.parseInt(defaultLength.getDefaultLength()))) {
-				jsonObj = new JSONObject().append("defaultlength", defaultLength.getDefaultLength()).append(
-						"requestlength", requestLength);
-				log.error("Request message is not chunked or request length is greater than default length");
-				throw new CambriaApiException(jsonObj);
-			} else {
-				chain.doFilter(req, res);
-			}
-		} catch (CambriaApiException | NumberFormatException e) {
-			log.error("message size is greater then default");
-			ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_EXPECTATION_FAILED,
-					DMaaPResponseCode.MSG_SIZE_EXCEEDS_MSG_LIMIT.getResponseCode(), errorMessages.getMsgSizeExceeds()
-							+ jsonObj.toString());
-			log.info(errRes.toString());
-			// throw new CambriaApiException(errRes);
-		}
-
-	}
-
-	/**
-	 * @see Filter#init(FilterConfig)
-	 */
-	public void init(FilterConfig fConfig) throws ServletException {
-		// TODO Auto-generated method stub
-		this.filterConfig = fConfig;
-		log.info("Filter Content Length Initialize");
-		ApplicationContext ctx = WebApplicationContextUtils.getRequiredWebApplicationContext(fConfig
-				.getServletContext());
-		DefaultLength defLength = (DefaultLength) ctx.getBean("defLength");
-		DMaaPErrorMessages errorMessages = (DMaaPErrorMessages) ctx.getBean("DMaaPErrorMessages");
-		this.errorMessages = errorMessages;
-		this.defaultLength = defLength;
-
-	}
-
-}
diff --git a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/filter/DefaultLength.java b/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/filter/DefaultLength.java
deleted file mode 100644
index 9fe91cf..0000000
--- a/src/main/java/org/onap/dmaap/messagerouter/msgrtr/nsa/filter/DefaultLength.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*******************************************************************************
- *  ============LICENSE_START=======================================================
- *  org.onap.dmaap
- *  ================================================================================
- *  Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *        http://www.apache.org/licenses/LICENSE-2.0
- *  
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *  ============LICENSE_END=========================================================
- *
- *  ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *  
- *******************************************************************************/
-package org.onap.dmaap.messagerouter.msgrtr.nsa.filter;
-
-
-public class DefaultLength {
-	
-	String defaultLength;
-
-	public String getDefaultLength() {
-		return defaultLength;
-	}
-
-	public void setDefaultLength(String defaultLength) {
-		this.defaultLength = defaultLength;
-	}
-
-}
diff --git a/src/main/resources/DMaaPUrl.properties b/src/main/resources/DMaaPUrl.properties
index 8c90912..a0cf9ab 100644
--- a/src/main/resources/DMaaPUrl.properties
+++ b/src/main/resources/DMaaPUrl.properties
@@ -36,4 +36,4 @@ url=http://hltd436.hydc.sbc.com:8080/DMaaP/dmaaprest/
 date=2015-11-23T8:56:19-0700
 
 # topic
-topicName=org.onap.dmaap.messagerouter.msgrtr.app.dmaap.mr.sharjeel
\ No newline at end of file
+topicName=com.att.app.dmaap.mr.sharjeel
\ No newline at end of file
diff --git a/src/main/resources/dme2testcase.properties b/src/main/resources/dme2testcase.properties
index 6f358a3..ad38c43 100644
--- a/src/main/resources/dme2testcase.properties
+++ b/src/main/resources/dme2testcase.properties
@@ -39,17 +39,17 @@ filterType=filter={"class":"Equals", "field":"email", "value":"ai039a@att.com"}
 #topics
 subContextPathGetAllTopic=/topics
 subContextPathGetOneTopic=/topics/
-SubContextPathGetPublisher=/topics/org.onap.dmaap.messagerouter.msgrtr.app.dmaap.mr.sharjeel/producers
-SubContextPathGetPermitPublisher=/topics/org.onap.dmaap.messagerouter.msgrtr.app.dmaap.mr.sharjeel/producers/rk229m@csp.att.com
-SubContextPathGetConsumer=/topics/org.onap.dmaap.messagerouter.msgrtr.app.dmaap.mr.sharjeel/consumers
+SubContextPathGetPublisher=/topics/com.att.app.dmaap.mr.sharjeel/producers
+SubContextPathGetPermitPublisher=/topics/com.att.app.dmaap.mr.sharjeel/producers/rk229m@csp.att.com
+SubContextPathGetConsumer=/topics/com.att.app.dmaap.mr.sharjeel/consumers
 SubContextPathCreateTopic=/topics/create
-SubContextPathGetPermitConsumer=/topics/org.onap.dmaap.messagerouter.msgrtr.app.dmaap.mr.sharjeel/consumers/rk229m@att.com
-newTopic=org.onap.dmaap.messagerouter.msgrtr.dmaap.mr.junittestingtopic
+SubContextPathGetPermitConsumer=/topics/com.att.app.dmaap.mr.sharjeel/consumers/rk229m@att.com
+newTopic=com.att.dmaap.mr.junittestingtopic
 topicDescription=new topic creation
 partition=1
 replication=1
 txenabled=true
-deleteTopic=org.onap.dmaap.messagerouter.msgrtr.dmaap.mr.deleteTopic
+deleteTopic=com.att.dmaap.mr.deleteTopic
 
 
 #Admin
diff --git a/src/main/resources/endpoint.properties b/src/main/resources/endpoint.properties
index f1ab131..34e222f 100644
--- a/src/main/resources/endpoint.properties
+++ b/src/main/resources/endpoint.properties
@@ -22,7 +22,7 @@
 Latitude =37.66
 Longitude =-122.096839
 Version =1.0.0
-ServiceName =org.onap.dmaap.messagerouter.msgrtr.acsi.saat.dt.dmaap.dev.demo1
+ServiceName =com.att.acsi.saat.dt.dmaap.dev.demo1
 Environment =DEV
 RouteOffer =LA
 HostName =hltd436.hydc.sbc.com
diff --git a/src/main/resources/routes.conf b/src/main/resources/routes.conf
index 14c4f56..ccaa94d 100644
--- a/src/main/resources/routes.conf
+++ b/src/main/resources/routes.conf
@@ -1,4 +1,4 @@
-package org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.endpoints
+package com.att.nsa.cambria.endpoints
 
 #
 #	We need to deprecate the original non-versioned paths and use /v1/ for them.
diff --git a/src/main/scripts/cambriaTool.sh b/src/main/scripts/cambriaTool.sh
index 175a19c..a9d6e15 100644
--- a/src/main/scripts/cambriaTool.sh
+++ b/src/main/scripts/cambriaTool.sh
@@ -52,4 +52,4 @@ else
     JAVA=java
 fi
 
-$JAVA -cp ${BASE_DIR}/etc${PATHSEP}${BASE_DIR}/lib/* org.onap.dmaap.messagerouter.messageservice.nsa.cambria.tools.ConfigTool $*
+$JAVA -cp ${BASE_DIR}/etc${PATHSEP}${BASE_DIR}/lib/* com.att.nsa.cambria.tools.ConfigTool $*
diff --git a/src/main/scripts/swmpkgclean.sh b/src/main/scripts/swmpkgclean.sh
index 5f8699f..7e6bc51 100644
--- a/src/main/scripts/swmpkgclean.sh
+++ b/src/main/scripts/swmpkgclean.sh
@@ -24,7 +24,7 @@
 # SWM can only store a finite amount of packages in its repository, so this script deletes the oldest package.
 # This script is run by Jenkins after the build is finished (post SWM upload).
 
-SWM_COMPONENT="org.onap.dmaap.messagerouter.msgrtr.nsa:msgrtr"
+SWM_COMPONENT="com.att.nsa:msgrtr"
 
 SWM_PKGS=`/opt/app/swm/aftswmcli/bin/swmcli "component pkglist -c $SWM_COMPONENT -df -dh -dj -sui"`
 SWM_PKGS_COUNT=`echo "$SWM_PKGS" | wc -l`
diff --git a/src/main/webapp/WEB-INF/spring-context.xml b/src/main/webapp/WEB-INF/spring-context.xml
index d7f128e..5e2e985 100644
--- a/src/main/webapp/WEB-INF/spring-context.xml
+++ b/src/main/webapp/WEB-INF/spring-context.xml
@@ -56,49 +56,49 @@
 	<bean id="drumlinRequestRouter"
 		class="com.att.nsa.drumlin.service.framework.routing.DrumlinRequestRouter" />
 
-	<bean id="dMaaPMetricsSet" class="org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPMetricsSet">
+	<bean id="dMaaPMetricsSet" class="com.att.nsa.cambria.beans.DMaaPMetricsSet">
 		<constructor-arg ref="propertyReader" />
 	</bean>
 
-	<bean id="dMaaPZkClient" class=" org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPZkClient">
+	<bean id="dMaaPZkClient" class=" com.att.nsa.cambria.beans.DMaaPZkClient">
 		<constructor-arg ref="propertyReader" />
 	</bean>
 
-	<bean id="dMaaPZkConfigDb" class=" org.onap.dmaap.messagerouter.msgrt.nsa.cambria.beans.DMaaPZkConfigDb">
+	<bean id="dMaaPZkConfigDb" class=" com.att.nsa.cambria.beans.DMaaPZkConfigDb">
 		<constructor-arg ref="dMaaPZkClient" />
 		<constructor-arg ref="propertyReader" />
 	</bean>
 
-	<bean id="kafkaPublisher" class=" org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.kafka.KafkaPublisher">
+	<bean id="kafkaPublisher" class=" com.att.nsa.cambria.backends.kafka.KafkaPublisher">
 		<constructor-arg ref="propertyReader" />
 	</bean>
 
-	<bean id="dMaaPKafkaConsumerFactory" class=" org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPKafkaConsumerFactory">
+	<bean id="dMaaPKafkaConsumerFactory" class=" com.att.nsa.cambria.beans.DMaaPKafkaConsumerFactory">
 		<constructor-arg ref="propertyReader" />
 		<constructor-arg ref="dMaaPMetricsSet" />
 		<constructor-arg ref="curator" />
 	</bean>
 
-	<bean id="curator" class="org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.utils.DMaaPCuratorFactory"
+	<bean id="curator" class="com.att.nsa.cambria.utils.DMaaPCuratorFactory"
 		factory-method="getCurator">
 		<constructor-arg ref="propertyReader" />
 	</bean>
 
-	<bean id="dMaaPKafkaMetaBroker" class=" org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPKafkaMetaBroker">
+	<bean id="dMaaPKafkaMetaBroker" class=" com.att.nsa.cambria.beans.DMaaPKafkaMetaBroker">
 		<constructor-arg ref="propertyReader" />
 		<constructor-arg ref="dMaaPZkClient" />
 		<constructor-arg ref="dMaaPZkConfigDb" />
 	</bean>
 
-	<bean id="q" class=" org.onap.dmaap.messagerouter.msgrtr.com.att.nsa.cambria.backends.memory.MemoryQueue" />
+	<bean id="q" class=" com.att.nsa.cambria.backends.memory.MemoryQueue" />
 
-	<bean id="mmb" class=" org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.backends.memory.MemoryMetaBroker">
+	<bean id="mmb" class=" com.att.nsa.cambria.backends.memory.MemoryMetaBroker">
 		<constructor-arg ref="q" />
 		<constructor-arg ref="dMaaPZkConfigDb" />
 	<!-- <constructor-arg ref="propertyReader" />-->
 	</bean>
 
-	<bean id="dMaaPNsaApiDb" class="org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.beans.DMaaPNsaApiDb"
+	<bean id="dMaaPNsaApiDb" class="com.att.nsa.cambria.beans.DMaaPNsaApiDb"
 		factory-method="buildApiKeyDb">
 		<constructor-arg ref="propertyReader" />
 		<constructor-arg ref="dMaaPZkConfigDb" />
@@ -108,10 +108,10 @@
 		factory-method="buildTransactionDb"> <constructor-arg ref="propertyReader" 
 		/> <constructor-arg ref="dMaaPZkConfigDb" /> </bean> -->
 
-	<bean id="dMaaPAuthenticatorImpl" class="org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.security.DMaaPAuthenticatorImpl">
+	<bean id="dMaaPAuthenticatorImpl" class="com.att.nsa.cambria.security.DMaaPAuthenticatorImpl">
 		<constructor-arg ref="dMaaPNsaApiDb" />
 	</bean>
-	<bean id="defLength" class="org.onap.dmaap.messagerouter.msgrtr.nsa.filter.DefaultLength">
+	<bean id="defLength" class="com.att.nsa.filter.DefaultLength">
 		<property name="defaultLength" value="${maxcontentlength}"></property>
 	</bean>
 
diff --git a/src/main/webapp/WEB-INF/web.xml b/src/main/webapp/WEB-INF/web.xml
index c3e07b5..46d7b45 100644
--- a/src/main/webapp/WEB-INF/web.xml
+++ b/src/main/webapp/WEB-INF/web.xml
@@ -31,12 +31,12 @@
     <param-value>/WEB-INF/spring-context.xml</param-value>
   </context-param>
   <listener>
-    <listener-class>org.onap.dmaap.messagerouter.msgrtr.nsa.cambria.listener.CambriaServletContextListener</listener-class>
+    <listener-class>com.att.nsa.cambria.listener.CambriaServletContextListener</listener-class>
   </listener>
   <filter>
     <display-name>ContentLengthFilter</display-name>
     <filter-name>ContentLengthFilter</filter-name>
-    <filter-class>org.onap.dmaap.messagerouter.msgrtr.nsa.filter.ContentLengthFilter</filter-class>
+    <filter-class>com.att.nsa.filter.ContentLengthFilter</filter-class>
   </filter>
   <filter-mapping>
     <filter-name>ContentLengthFilter</filter-name>
-- 
cgit